summaryrefslogtreecommitdiffstats
path: root/third_party/libwebrtc/pc
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-19 00:47:55 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-19 00:47:55 +0000
commit26a029d407be480d791972afb5975cf62c9360a6 (patch)
treef435a8308119effd964b339f76abb83a57c29483 /third_party/libwebrtc/pc
parentInitial commit. (diff)
downloadfirefox-26a029d407be480d791972afb5975cf62c9360a6.tar.xz
firefox-26a029d407be480d791972afb5975cf62c9360a6.zip
Adding upstream version 124.0.1.upstream/124.0.1
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'third_party/libwebrtc/pc')
-rw-r--r--third_party/libwebrtc/pc/BUILD.gn2890
-rw-r--r--third_party/libwebrtc/pc/DEPS26
-rw-r--r--third_party/libwebrtc/pc/OWNERS11
-rw-r--r--third_party/libwebrtc/pc/audio_rtp_receiver.cc347
-rw-r--r--third_party/libwebrtc/pc/audio_rtp_receiver.h165
-rw-r--r--third_party/libwebrtc/pc/audio_rtp_receiver_unittest.cc127
-rw-r--r--third_party/libwebrtc/pc/audio_track.cc70
-rw-r--r--third_party/libwebrtc/pc/audio_track.h66
-rw-r--r--third_party/libwebrtc/pc/channel.cc1219
-rw-r--r--third_party/libwebrtc/pc/channel.h507
-rw-r--r--third_party/libwebrtc/pc/channel_interface.h105
-rw-r--r--third_party/libwebrtc/pc/channel_unittest.cc2529
-rw-r--r--third_party/libwebrtc/pc/connection_context.cc189
-rw-r--r--third_party/libwebrtc/pc/connection_context.h155
-rw-r--r--third_party/libwebrtc/pc/data_channel_controller.cc440
-rw-r--r--third_party/libwebrtc/pc/data_channel_controller.h165
-rw-r--r--third_party/libwebrtc/pc/data_channel_controller_unittest.cc214
-rw-r--r--third_party/libwebrtc/pc/data_channel_integrationtest.cc1167
-rw-r--r--third_party/libwebrtc/pc/data_channel_unittest.cc1152
-rw-r--r--third_party/libwebrtc/pc/data_channel_utils.cc54
-rw-r--r--third_party/libwebrtc/pc/data_channel_utils.h63
-rw-r--r--third_party/libwebrtc/pc/dtls_srtp_transport.cc330
-rw-r--r--third_party/libwebrtc/pc/dtls_srtp_transport.h105
-rw-r--r--third_party/libwebrtc/pc/dtls_srtp_transport_unittest.cc576
-rw-r--r--third_party/libwebrtc/pc/dtls_transport.cc148
-rw-r--r--third_party/libwebrtc/pc/dtls_transport.h76
-rw-r--r--third_party/libwebrtc/pc/dtls_transport_unittest.cc181
-rw-r--r--third_party/libwebrtc/pc/dtmf_sender.cc243
-rw-r--r--third_party/libwebrtc/pc/dtmf_sender.h118
-rw-r--r--third_party/libwebrtc/pc/dtmf_sender_unittest.cc371
-rw-r--r--third_party/libwebrtc/pc/external_hmac.cc143
-rw-r--r--third_party/libwebrtc/pc/external_hmac.h72
-rw-r--r--third_party/libwebrtc/pc/g3doc/dtls_transport.md53
-rw-r--r--third_party/libwebrtc/pc/g3doc/peer_connection.md59
-rw-r--r--third_party/libwebrtc/pc/g3doc/rtp.md99
-rw-r--r--third_party/libwebrtc/pc/g3doc/sctp_transport.md42
-rw-r--r--third_party/libwebrtc/pc/g3doc/srtp.md72
-rw-r--r--third_party/libwebrtc/pc/ice_server_parsing.cc360
-rw-r--r--third_party/libwebrtc/pc/ice_server_parsing.h42
-rw-r--r--third_party/libwebrtc/pc/ice_server_parsing_unittest.cc242
-rw-r--r--third_party/libwebrtc/pc/ice_transport.cc36
-rw-r--r--third_party/libwebrtc/pc/ice_transport.h52
-rw-r--r--third_party/libwebrtc/pc/ice_transport_unittest.cc64
-rw-r--r--third_party/libwebrtc/pc/jitter_buffer_delay.cc38
-rw-r--r--third_party/libwebrtc/pc/jitter_buffer_delay.h42
-rw-r--r--third_party/libwebrtc/pc/jitter_buffer_delay_unittest.cc56
-rw-r--r--third_party/libwebrtc/pc/jsep_ice_candidate.cc76
-rw-r--r--third_party/libwebrtc/pc/jsep_session_description.cc367
-rw-r--r--third_party/libwebrtc/pc/jsep_session_description_unittest.cc530
-rw-r--r--third_party/libwebrtc/pc/jsep_transport.cc713
-rw-r--r--third_party/libwebrtc/pc/jsep_transport.h330
-rw-r--r--third_party/libwebrtc/pc/jsep_transport_collection.cc390
-rw-r--r--third_party/libwebrtc/pc/jsep_transport_collection.h173
-rw-r--r--third_party/libwebrtc/pc/jsep_transport_controller.cc1451
-rw-r--r--third_party/libwebrtc/pc/jsep_transport_controller.h498
-rw-r--r--third_party/libwebrtc/pc/jsep_transport_controller_unittest.cc2746
-rw-r--r--third_party/libwebrtc/pc/jsep_transport_unittest.cc1386
-rw-r--r--third_party/libwebrtc/pc/legacy_stats_collector.cc1398
-rw-r--r--third_party/libwebrtc/pc/legacy_stats_collector.h218
-rw-r--r--third_party/libwebrtc/pc/legacy_stats_collector_interface.h43
-rw-r--r--third_party/libwebrtc/pc/legacy_stats_collector_unittest.cc1964
-rw-r--r--third_party/libwebrtc/pc/local_audio_source.cc31
-rw-r--r--third_party/libwebrtc/pc/local_audio_source.h50
-rw-r--r--third_party/libwebrtc/pc/local_audio_source_unittest.cc30
-rw-r--r--third_party/libwebrtc/pc/media_protocol_names.cc105
-rw-r--r--third_party/libwebrtc/pc/media_protocol_names.h47
-rw-r--r--third_party/libwebrtc/pc/media_session.cc3145
-rw-r--r--third_party/libwebrtc/pc/media_session.h400
-rw-r--r--third_party/libwebrtc/pc/media_session_unittest.cc5039
-rw-r--r--third_party/libwebrtc/pc/media_stream.cc96
-rw-r--r--third_party/libwebrtc/pc/media_stream.h59
-rw-r--r--third_party/libwebrtc/pc/media_stream_observer.cc98
-rw-r--r--third_party/libwebrtc/pc/media_stream_observer.h57
-rw-r--r--third_party/libwebrtc/pc/media_stream_proxy.h44
-rw-r--r--third_party/libwebrtc/pc/media_stream_track_proxy.h65
-rw-r--r--third_party/libwebrtc/pc/media_stream_unittest.cc153
-rw-r--r--third_party/libwebrtc/pc/peer_connection.cc3046
-rw-r--r--third_party/libwebrtc/pc/peer_connection.h718
-rw-r--r--third_party/libwebrtc/pc/peer_connection_adaptation_integrationtest.cc172
-rw-r--r--third_party/libwebrtc/pc/peer_connection_bundle_unittest.cc1074
-rw-r--r--third_party/libwebrtc/pc/peer_connection_crypto_unittest.cc809
-rw-r--r--third_party/libwebrtc/pc/peer_connection_data_channel_unittest.cc337
-rw-r--r--third_party/libwebrtc/pc/peer_connection_encodings_integrationtest.cc2008
-rw-r--r--third_party/libwebrtc/pc/peer_connection_end_to_end_unittest.cc767
-rw-r--r--third_party/libwebrtc/pc/peer_connection_factory.cc352
-rw-r--r--third_party/libwebrtc/pc/peer_connection_factory.h162
-rw-r--r--third_party/libwebrtc/pc/peer_connection_factory_proxy.h58
-rw-r--r--third_party/libwebrtc/pc/peer_connection_factory_unittest.cc736
-rw-r--r--third_party/libwebrtc/pc/peer_connection_field_trial_tests.cc277
-rw-r--r--third_party/libwebrtc/pc/peer_connection_header_extension_unittest.cc589
-rw-r--r--third_party/libwebrtc/pc/peer_connection_histogram_unittest.cc790
-rw-r--r--third_party/libwebrtc/pc/peer_connection_ice_unittest.cc1589
-rw-r--r--third_party/libwebrtc/pc/peer_connection_integrationtest.cc3853
-rw-r--r--third_party/libwebrtc/pc/peer_connection_interface_unittest.cc3867
-rw-r--r--third_party/libwebrtc/pc/peer_connection_internal.h192
-rw-r--r--third_party/libwebrtc/pc/peer_connection_jsep_unittest.cc2421
-rw-r--r--third_party/libwebrtc/pc/peer_connection_media_unittest.cc2137
-rw-r--r--third_party/libwebrtc/pc/peer_connection_message_handler.cc85
-rw-r--r--third_party/libwebrtc/pc/peer_connection_message_handler.h52
-rw-r--r--third_party/libwebrtc/pc/peer_connection_proxy.h170
-rw-r--r--third_party/libwebrtc/pc/peer_connection_rampup_tests.cc455
-rw-r--r--third_party/libwebrtc/pc/peer_connection_rtp_unittest.cc1983
-rw-r--r--third_party/libwebrtc/pc/peer_connection_signaling_unittest.cc1368
-rw-r--r--third_party/libwebrtc/pc/peer_connection_simulcast_unittest.cc629
-rw-r--r--third_party/libwebrtc/pc/peer_connection_svc_integrationtest.cc308
-rw-r--r--third_party/libwebrtc/pc/peer_connection_wrapper.cc350
-rw-r--r--third_party/libwebrtc/pc/peer_connection_wrapper.h203
-rw-r--r--third_party/libwebrtc/pc/proxy.cc25
-rw-r--r--third_party/libwebrtc/pc/proxy.h499
-rw-r--r--third_party/libwebrtc/pc/proxy_unittest.cc260
-rw-r--r--third_party/libwebrtc/pc/remote_audio_source.cc184
-rw-r--r--third_party/libwebrtc/pc/remote_audio_source.h91
-rw-r--r--third_party/libwebrtc/pc/rtc_stats_collector.cc2201
-rw-r--r--third_party/libwebrtc/pc/rtc_stats_collector.h332
-rw-r--r--third_party/libwebrtc/pc/rtc_stats_collector_unittest.cc3841
-rw-r--r--third_party/libwebrtc/pc/rtc_stats_integrationtest.cc1204
-rw-r--r--third_party/libwebrtc/pc/rtc_stats_traversal.cc133
-rw-r--r--third_party/libwebrtc/pc/rtc_stats_traversal.h44
-rw-r--r--third_party/libwebrtc/pc/rtc_stats_traversal_unittest.cc210
-rw-r--r--third_party/libwebrtc/pc/rtcp_mux_filter.cc125
-rw-r--r--third_party/libwebrtc/pc/rtcp_mux_filter.h78
-rw-r--r--third_party/libwebrtc/pc/rtcp_mux_filter_unittest.cc192
-rw-r--r--third_party/libwebrtc/pc/rtp_media_utils.cc98
-rw-r--r--third_party/libwebrtc/pc/rtp_media_utils.h64
-rw-r--r--third_party/libwebrtc/pc/rtp_media_utils_unittest.cc97
-rw-r--r--third_party/libwebrtc/pc/rtp_parameters_conversion.cc378
-rw-r--r--third_party/libwebrtc/pc/rtp_parameters_conversion.h97
-rw-r--r--third_party/libwebrtc/pc/rtp_parameters_conversion_unittest.cc602
-rw-r--r--third_party/libwebrtc/pc/rtp_receiver.cc42
-rw-r--r--third_party/libwebrtc/pc/rtp_receiver.h101
-rw-r--r--third_party/libwebrtc/pc/rtp_receiver_proxy.h54
-rw-r--r--third_party/libwebrtc/pc/rtp_sender.cc899
-rw-r--r--third_party/libwebrtc/pc/rtp_sender.h454
-rw-r--r--third_party/libwebrtc/pc/rtp_sender_proxy.h59
-rw-r--r--third_party/libwebrtc/pc/rtp_sender_receiver_unittest.cc1954
-rw-r--r--third_party/libwebrtc/pc/rtp_transceiver.cc791
-rw-r--r--third_party/libwebrtc/pc/rtp_transceiver.h381
-rw-r--r--third_party/libwebrtc/pc/rtp_transceiver_unittest.cc485
-rw-r--r--third_party/libwebrtc/pc/rtp_transmission_manager.cc730
-rw-r--r--third_party/libwebrtc/pc/rtp_transmission_manager.h278
-rw-r--r--third_party/libwebrtc/pc/rtp_transport.cc303
-rw-r--r--third_party/libwebrtc/pc/rtp_transport.h148
-rw-r--r--third_party/libwebrtc/pc/rtp_transport_internal.h174
-rw-r--r--third_party/libwebrtc/pc/rtp_transport_unittest.cc352
-rw-r--r--third_party/libwebrtc/pc/scenario_tests/BUILD.gn24
-rw-r--r--third_party/libwebrtc/pc/scenario_tests/goog_cc_test.cc109
-rw-r--r--third_party/libwebrtc/pc/sctp_data_channel.cc1002
-rw-r--r--third_party/libwebrtc/pc/sctp_data_channel.h305
-rw-r--r--third_party/libwebrtc/pc/sctp_transport.cc203
-rw-r--r--third_party/libwebrtc/pc/sctp_transport.h101
-rw-r--r--third_party/libwebrtc/pc/sctp_transport_unittest.cc216
-rw-r--r--third_party/libwebrtc/pc/sctp_utils.cc244
-rw-r--r--third_party/libwebrtc/pc/sctp_utils.h94
-rw-r--r--third_party/libwebrtc/pc/sctp_utils_unittest.cc242
-rw-r--r--third_party/libwebrtc/pc/sdp_offer_answer.cc5503
-rw-r--r--third_party/libwebrtc/pc/sdp_offer_answer.h690
-rw-r--r--third_party/libwebrtc/pc/sdp_offer_answer_unittest.cc1100
-rw-r--r--third_party/libwebrtc/pc/sdp_state_provider.h54
-rw-r--r--third_party/libwebrtc/pc/sdp_utils.cc73
-rw-r--r--third_party/libwebrtc/pc/sdp_utils.h62
-rw-r--r--third_party/libwebrtc/pc/session_description.cc311
-rw-r--r--third_party/libwebrtc/pc/session_description.h598
-rw-r--r--third_party/libwebrtc/pc/session_description_unittest.cc133
-rw-r--r--third_party/libwebrtc/pc/simulcast_description.cc57
-rw-r--r--third_party/libwebrtc/pc/simulcast_description.h115
-rw-r--r--third_party/libwebrtc/pc/simulcast_sdp_serializer.cc395
-rw-r--r--third_party/libwebrtc/pc/simulcast_sdp_serializer.h61
-rw-r--r--third_party/libwebrtc/pc/simulcast_sdp_serializer_unittest.cc485
-rw-r--r--third_party/libwebrtc/pc/slow_peer_connection_integration_test.cc506
-rw-r--r--third_party/libwebrtc/pc/srtp_filter.cc280
-rw-r--r--third_party/libwebrtc/pc/srtp_filter.h147
-rw-r--r--third_party/libwebrtc/pc/srtp_filter_unittest.cc472
-rw-r--r--third_party/libwebrtc/pc/srtp_session.cc520
-rw-r--r--third_party/libwebrtc/pc/srtp_session.h146
-rw-r--r--third_party/libwebrtc/pc/srtp_session_unittest.cc254
-rw-r--r--third_party/libwebrtc/pc/srtp_transport.cc522
-rw-r--r--third_party/libwebrtc/pc/srtp_transport.h176
-rw-r--r--third_party/libwebrtc/pc/srtp_transport_unittest.cc428
-rw-r--r--third_party/libwebrtc/pc/stream_collection.h101
-rw-r--r--third_party/libwebrtc/pc/test/DEPS5
-rw-r--r--third_party/libwebrtc/pc/test/android_test_initializer.cc51
-rw-r--r--third_party/libwebrtc/pc/test/android_test_initializer.h20
-rw-r--r--third_party/libwebrtc/pc/test/fake_audio_capture_module.cc519
-rw-r--r--third_party/libwebrtc/pc/test/fake_audio_capture_module.h236
-rw-r--r--third_party/libwebrtc/pc/test/fake_audio_capture_module_unittest.cc198
-rw-r--r--third_party/libwebrtc/pc/test/fake_data_channel_controller.h238
-rw-r--r--third_party/libwebrtc/pc/test/fake_peer_connection_base.h371
-rw-r--r--third_party/libwebrtc/pc/test/fake_peer_connection_for_stats.h568
-rw-r--r--third_party/libwebrtc/pc/test/fake_periodic_video_source.h103
-rw-r--r--third_party/libwebrtc/pc/test/fake_periodic_video_track_source.h46
-rw-r--r--third_party/libwebrtc/pc/test/fake_rtc_certificate_generator.h222
-rw-r--r--third_party/libwebrtc/pc/test/fake_video_track_renderer.h33
-rw-r--r--third_party/libwebrtc/pc/test/fake_video_track_source.h54
-rw-r--r--third_party/libwebrtc/pc/test/frame_generator_capturer_video_track_source.h92
-rw-r--r--third_party/libwebrtc/pc/test/integration_test_helpers.cc97
-rw-r--r--third_party/libwebrtc/pc/test/integration_test_helpers.h1945
-rw-r--r--third_party/libwebrtc/pc/test/mock_channel_interface.h88
-rw-r--r--third_party/libwebrtc/pc/test/mock_data_channel.h79
-rw-r--r--third_party/libwebrtc/pc/test/mock_peer_connection_internal.h325
-rw-r--r--third_party/libwebrtc/pc/test/mock_peer_connection_observers.h599
-rw-r--r--third_party/libwebrtc/pc/test/mock_rtp_receiver_internal.h82
-rw-r--r--third_party/libwebrtc/pc/test/mock_rtp_sender_internal.h112
-rw-r--r--third_party/libwebrtc/pc/test/mock_voice_media_receive_channel_interface.h130
-rw-r--r--third_party/libwebrtc/pc/test/peer_connection_test_wrapper.cc429
-rw-r--r--third_party/libwebrtc/pc/test/peer_connection_test_wrapper.h150
-rw-r--r--third_party/libwebrtc/pc/test/rtc_stats_obtainer.h55
-rw-r--r--third_party/libwebrtc/pc/test/rtp_transport_test_util.h98
-rw-r--r--third_party/libwebrtc/pc/test/simulcast_layer_util.cc55
-rw-r--r--third_party/libwebrtc/pc/test/simulcast_layer_util.h39
-rw-r--r--third_party/libwebrtc/pc/test/srtp_test_util.h45
-rw-r--r--third_party/libwebrtc/pc/test/svc_e2e_tests.cc507
-rw-r--r--third_party/libwebrtc/pc/test/test_sdp_strings.h184
-rw-r--r--third_party/libwebrtc/pc/track_media_info_map.cc273
-rw-r--r--third_party/libwebrtc/pc/track_media_info_map.h116
-rw-r--r--third_party/libwebrtc/pc/track_media_info_map_unittest.cc332
-rw-r--r--third_party/libwebrtc/pc/transceiver_list.cc88
-rw-r--r--third_party/libwebrtc/pc/transceiver_list.h162
-rw-r--r--third_party/libwebrtc/pc/transport_stats.cc21
-rw-r--r--third_party/libwebrtc/pc/transport_stats.h52
-rw-r--r--third_party/libwebrtc/pc/usage_pattern.cc49
-rw-r--r--third_party/libwebrtc/pc/usage_pattern.h77
-rw-r--r--third_party/libwebrtc/pc/used_ids.h192
-rw-r--r--third_party/libwebrtc/pc/used_ids_unittest.cc178
-rw-r--r--third_party/libwebrtc/pc/video_rtp_receiver.cc383
-rw-r--r--third_party/libwebrtc/pc/video_rtp_receiver.h184
-rw-r--r--third_party/libwebrtc/pc/video_rtp_receiver_unittest.cc209
-rw-r--r--third_party/libwebrtc/pc/video_rtp_track_source.cc89
-rw-r--r--third_party/libwebrtc/pc/video_rtp_track_source.h93
-rw-r--r--third_party/libwebrtc/pc/video_rtp_track_source_unittest.cc140
-rw-r--r--third_party/libwebrtc/pc/video_track.cc144
-rw-r--r--third_party/libwebrtc/pc/video_track.h88
-rw-r--r--third_party/libwebrtc/pc/video_track_source.cc40
-rw-r--r--third_party/libwebrtc/pc/video_track_source.h74
-rw-r--r--third_party/libwebrtc/pc/video_track_source_proxy.cc29
-rw-r--r--third_party/libwebrtc/pc/video_track_source_proxy.h60
-rw-r--r--third_party/libwebrtc/pc/video_track_unittest.cc103
-rw-r--r--third_party/libwebrtc/pc/webrtc_sdp.cc3835
-rw-r--r--third_party/libwebrtc/pc/webrtc_sdp.h117
-rw-r--r--third_party/libwebrtc/pc/webrtc_sdp_unittest.cc5103
-rw-r--r--third_party/libwebrtc/pc/webrtc_session_description_factory.cc465
-rw-r--r--third_party/libwebrtc/pc/webrtc_session_description_factory.h151
241 files changed, 122851 insertions, 0 deletions
diff --git a/third_party/libwebrtc/pc/BUILD.gn b/third_party/libwebrtc/pc/BUILD.gn
new file mode 100644
index 0000000000..7c22a26d12
--- /dev/null
+++ b/third_party/libwebrtc/pc/BUILD.gn
@@ -0,0 +1,2890 @@
+# Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+# Visibility considerations:
+#
+# Most targets in this file should have visibility ":*", as they are only
+# used internally.
+# Some functions are cleared for wider webrtc usage; these have default
+# visibility (set to "//*", not the gn default of "*").
+# These are:
+# - rtc_pc
+# - session_description
+# - simulcast_description
+# - peerconnection
+# - sdp_utils
+# - media_stream_observer
+# - video_track_source
+# - libjingle_peerconnection
+#
+# Some targets are depended on by external users for historical reasons,
+# and are therefore marked with visibility "*". This is in the process
+# of being removed.
+#
+# Some targets are only publicly visible in Chrome builds.
+# These are marked up as such.
+
+import("../webrtc.gni")
+if (is_android) {
+ import("//build/config/android/config.gni")
+ import("//build/config/android/rules.gni")
+}
+
+group("pc") {
+ deps = [ ":rtc_pc" ]
+}
+
+rtc_library("proxy") {
+ visibility = [ ":*" ]
+ sources = [
+ "proxy.cc",
+ "proxy.h",
+ ]
+ deps = [
+ "../api:scoped_refptr",
+ "../api/task_queue",
+ "../rtc_base:event_tracer",
+ "../rtc_base:rtc_event",
+ "../rtc_base:stringutils",
+ "../rtc_base:threading",
+ "../rtc_base/system:rtc_export",
+ ]
+}
+
+rtc_source_set("channel") {
+ visibility = [
+ ":*",
+ "../test/peer_scenario",
+ ]
+ sources = [
+ "channel.cc",
+ "channel.h",
+ ]
+ deps = [
+ ":channel_interface",
+ ":rtp_media_utils",
+ ":rtp_transport_internal",
+ ":session_description",
+ "../api:libjingle_peerconnection_api",
+ "../api:rtp_parameters",
+ "../api:rtp_transceiver_direction",
+ "../api:scoped_refptr",
+ "../api:sequence_checker",
+ "../api/crypto:options",
+ "../api/task_queue:pending_task_safety_flag",
+ "../api/units:timestamp",
+ "../call:rtp_interfaces",
+ "../call:rtp_receiver",
+ "../media:codec",
+ "../media:media_channel",
+ "../media:media_channel_impl",
+ "../media:rid_description",
+ "../media:rtc_media_base",
+ "../media:rtp_utils",
+ "../media:stream_params",
+ "../modules/rtp_rtcp:rtp_rtcp_format",
+ "../p2p:rtc_p2p",
+ "../rtc_base:async_packet_socket",
+ "../rtc_base:checks",
+ "../rtc_base:copy_on_write_buffer",
+ "../rtc_base:event_tracer",
+ "../rtc_base:logging",
+ "../rtc_base:macromagic",
+ "../rtc_base:network_route",
+ "../rtc_base:socket",
+ "../rtc_base:stringutils",
+ "../rtc_base:threading",
+ "../rtc_base:unique_id_generator",
+ "../rtc_base/containers:flat_set",
+ "../rtc_base/network:sent_packet",
+ "../rtc_base/third_party/sigslot",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/algorithm:container",
+ "//third_party/abseil-cpp/absl/strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+}
+
+rtc_source_set("channel_interface") {
+ visibility = [ ":*" ]
+ sources = [ "channel_interface.h" ]
+ deps = [
+ ":rtp_transport_internal",
+ "../api:libjingle_peerconnection_api",
+ "../api:rtp_parameters",
+ "../media:media_channel",
+ "../media:rtc_media_base",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
+}
+
+rtc_source_set("dtls_srtp_transport") {
+ visibility = [ ":*" ]
+ sources = [
+ "dtls_srtp_transport.cc",
+ "dtls_srtp_transport.h",
+ ]
+ deps = [
+ ":srtp_transport",
+ "../api:dtls_transport_interface",
+ "../api:libjingle_peerconnection_api",
+ "../api:rtc_error",
+ "../p2p:rtc_p2p",
+ "../rtc_base:buffer",
+ "../rtc_base:checks",
+ "../rtc_base:logging",
+ "../rtc_base:ssl",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
+}
+
+rtc_source_set("dtls_transport") {
+ visibility = [ ":*" ]
+ sources = [
+ "dtls_transport.cc",
+ "dtls_transport.h",
+ ]
+ deps = [
+ ":ice_transport",
+ "../api:dtls_transport_interface",
+ "../api:ice_transport_interface",
+ "../api:libjingle_peerconnection_api",
+ "../api:make_ref_counted",
+ "../api:scoped_refptr",
+ "../api:sequence_checker",
+ "../p2p:rtc_p2p",
+ "../rtc_base:checks",
+ "../rtc_base:logging",
+ "../rtc_base:macromagic",
+ "../rtc_base:ssl",
+ "../rtc_base:threading",
+ "../rtc_base/synchronization:mutex",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
+}
+
+rtc_source_set("external_hmac") {
+ visibility = [ ":*" ]
+ sources = [
+ "external_hmac.cc",
+ "external_hmac.h",
+ ]
+ deps = [
+ "../rtc_base:logging",
+ "../rtc_base:zero_memory",
+ ]
+ if (rtc_build_libsrtp) {
+ deps += [ "//third_party/libsrtp" ]
+ }
+}
+
+rtc_source_set("ice_transport") {
+ visibility = [ ":*" ]
+ sources = [
+ "ice_transport.cc",
+ "ice_transport.h",
+ ]
+ deps = [
+ "../api:ice_transport_interface",
+ "../api:libjingle_peerconnection_api",
+ "../api:sequence_checker",
+ "../rtc_base:checks",
+ "../rtc_base:macromagic",
+ "../rtc_base:threading",
+ ]
+}
+
+rtc_source_set("jsep_transport") {
+ visibility = [ ":*" ]
+ sources = [
+ "jsep_transport.cc",
+ "jsep_transport.h",
+ ]
+ deps = [
+ ":dtls_srtp_transport",
+ ":dtls_transport",
+ ":rtcp_mux_filter",
+ ":rtp_transport",
+ ":rtp_transport_internal",
+ ":sctp_transport",
+ ":session_description",
+ ":srtp_filter",
+ ":srtp_transport",
+ ":transport_stats",
+ "../api:array_view",
+ "../api:candidate",
+ "../api:ice_transport_interface",
+ "../api:libjingle_peerconnection_api",
+ "../api:rtc_error",
+ "../api:scoped_refptr",
+ "../api:sequence_checker",
+ "../api/transport:datagram_transport_interface",
+ "../media:rtc_data_sctp_transport_internal",
+ "../p2p:rtc_p2p",
+ "../rtc_base:checks",
+ "../rtc_base:copy_on_write_buffer",
+ "../rtc_base:event_tracer",
+ "../rtc_base:logging",
+ "../rtc_base:macromagic",
+ "../rtc_base:ssl",
+ "../rtc_base:stringutils",
+ "../rtc_base:threading",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
+}
+
+rtc_source_set("jsep_transport_collection") {
+ visibility = [ ":*" ]
+ sources = [
+ "jsep_transport_collection.cc",
+ "jsep_transport_collection.h",
+ ]
+ deps = [
+ ":jsep_transport",
+ ":session_description",
+ "../api:libjingle_peerconnection_api",
+ "../api:sequence_checker",
+ "../p2p:rtc_p2p",
+ "../rtc_base:checks",
+ "../rtc_base:logging",
+ "../rtc_base:macromagic",
+ "../rtc_base/system:no_unique_address",
+ ]
+ absl_deps = []
+}
+
+rtc_source_set("jsep_transport_controller") {
+ visibility = [
+ ":*",
+ "../test/peer_scenario:*",
+ ]
+ sources = [
+ "jsep_transport_controller.cc",
+ "jsep_transport_controller.h",
+ ]
+ deps = [
+ ":channel",
+ ":dtls_srtp_transport",
+ ":dtls_transport",
+ ":jsep_transport",
+ ":jsep_transport_collection",
+ ":rtp_transport",
+ ":rtp_transport_internal",
+ ":sctp_transport",
+ ":session_description",
+ ":srtp_transport",
+ ":transport_stats",
+ "../api:async_dns_resolver",
+ "../api:candidate",
+ "../api:dtls_transport_interface",
+ "../api:ice_transport_factory",
+ "../api:ice_transport_interface",
+ "../api:libjingle_peerconnection_api",
+ "../api:rtc_error",
+ "../api:rtp_parameters",
+ "../api:scoped_refptr",
+ "../api:sequence_checker",
+ "../api/crypto:options",
+ "../api/rtc_event_log",
+ "../api/transport:datagram_transport_interface",
+ "../api/transport:enums",
+ "../api/transport:sctp_transport_factory_interface",
+ "../media:rtc_data_sctp_transport_internal",
+ "../p2p:rtc_p2p",
+ "../rtc_base:callback_list",
+ "../rtc_base:checks",
+ "../rtc_base:copy_on_write_buffer",
+ "../rtc_base:event_tracer",
+ "../rtc_base:logging",
+ "../rtc_base:macromagic",
+ "../rtc_base:ssl",
+ "../rtc_base:threading",
+ "../rtc_base/third_party/sigslot",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/algorithm:container",
+ "//third_party/abseil-cpp/absl/functional:any_invocable",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+}
+
+rtc_source_set("media_session") {
+ visibility = [ "*" ] # Used by Chrome
+ sources = [
+ "media_session.cc",
+ "media_session.h",
+ ]
+ deps = [
+ ":jsep_transport",
+ ":media_protocol_names",
+ ":rtp_media_utils",
+ ":session_description",
+ ":simulcast_description",
+ ":used_ids",
+ "../api:field_trials_view",
+ "../api:libjingle_peerconnection_api",
+ "../api:rtp_parameters",
+ "../api:rtp_transceiver_direction",
+ "../api/crypto:options",
+ "../media:codec",
+ "../media:media_constants",
+ "../media:rid_description",
+ "../media:rtc_data_sctp_transport_internal",
+ "../media:rtc_media_base",
+ "../media:rtc_sdp_video_format_utils",
+ "../media:stream_params",
+ "../p2p:rtc_p2p",
+ "../rtc_base:checks",
+ "../rtc_base:logging",
+ "../rtc_base:ssl",
+ "../rtc_base:stringutils",
+ "../rtc_base:unique_id_generator",
+ "../rtc_base/memory:always_valid_pointer",
+ "../rtc_base/third_party/base64",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/algorithm:container",
+ "//third_party/abseil-cpp/absl/strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+}
+
+rtc_source_set("media_stream_proxy") {
+ visibility = [ ":*" ]
+ sources = [ "media_stream_proxy.h" ]
+ deps = [
+ ":proxy",
+ "../api:media_stream_interface",
+ ]
+}
+
+rtc_source_set("media_stream_track_proxy") {
+ visibility = [ ":*" ]
+ sources = [ "media_stream_track_proxy.h" ]
+ deps = [
+ ":proxy",
+ "../api:media_stream_interface",
+ ]
+}
+
+rtc_source_set("peer_connection_factory_proxy") {
+ visibility = [ ":*" ]
+ sources = [ "peer_connection_factory_proxy.h" ]
+ deps = [
+ ":proxy",
+ "../api:libjingle_peerconnection_api",
+ ]
+}
+
+rtc_source_set("peer_connection_proxy") {
+ visibility = [ ":*" ]
+ sources = [ "peer_connection_proxy.h" ]
+ deps = [
+ ":proxy",
+ "../api:libjingle_peerconnection_api",
+ ]
+}
+
+rtc_source_set("rtcp_mux_filter") {
+ visibility = [ ":*" ]
+ sources = [
+ "rtcp_mux_filter.cc",
+ "rtcp_mux_filter.h",
+ ]
+ deps = [
+ ":session_description",
+ "../rtc_base:logging",
+ ]
+}
+
+rtc_source_set("rtp_media_utils") {
+ visibility = [ ":*" ]
+ sources = [
+ "rtp_media_utils.cc",
+ "rtp_media_utils.h",
+ ]
+ deps = [
+ "../api:rtp_transceiver_direction",
+ "../rtc_base:checks",
+ ]
+}
+
+rtc_source_set("rtp_receiver_proxy") {
+ visibility = [ ":*" ]
+ sources = [ "rtp_receiver_proxy.h" ]
+ deps = [
+ ":proxy",
+ "../api:libjingle_peerconnection_api",
+ ]
+}
+
+rtc_source_set("rtp_sender_proxy") {
+ visibility = [ ":*" ]
+ sources = [ "rtp_sender_proxy.h" ]
+ deps = [
+ ":proxy",
+ "../api:libjingle_peerconnection_api",
+ "../api:rtp_sender_interface",
+ ]
+}
+
+rtc_source_set("rtp_transport") {
+ visibility = [ ":*" ]
+ sources = [
+ "rtp_transport.cc",
+ "rtp_transport.h",
+ ]
+ deps = [
+ ":rtp_transport_internal",
+ ":session_description",
+ "../api:array_view",
+ "../api/task_queue:pending_task_safety_flag",
+ "../api/units:timestamp",
+ "../call:rtp_receiver",
+ "../call:video_stream_api",
+ "../media:rtc_media_base",
+ "../media:rtp_utils",
+ "../modules/rtp_rtcp:rtp_rtcp_format",
+ "../p2p:rtc_p2p",
+ "../rtc_base:async_packet_socket",
+ "../rtc_base:checks",
+ "../rtc_base:copy_on_write_buffer",
+ "../rtc_base:event_tracer",
+ "../rtc_base:logging",
+ "../rtc_base:network_route",
+ "../rtc_base:socket",
+ "../rtc_base/network:sent_packet",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+}
+
+rtc_source_set("rtp_transport_internal") {
+ visibility = [
+ ":*",
+ "../test/peer_scenario",
+ ]
+ sources = [ "rtp_transport_internal.h" ]
+ deps = [
+ ":session_description",
+ "../call:rtp_receiver",
+ "../p2p:rtc_p2p",
+ "../rtc_base:callback_list",
+ "../rtc_base:network_route",
+ "../rtc_base:ssl",
+ ]
+}
+
+rtc_source_set("sctp_transport") {
+ visibility = [ ":*" ]
+ sources = [
+ "sctp_transport.cc",
+ "sctp_transport.h",
+ ]
+ deps = [
+ ":dtls_transport",
+ "../api:dtls_transport_interface",
+ "../api:libjingle_peerconnection_api",
+ "../api:scoped_refptr",
+ "../api:sequence_checker",
+ "../api/transport:datagram_transport_interface",
+ "../media:rtc_data_sctp_transport_internal",
+ "../p2p:rtc_p2p",
+ "../rtc_base:checks",
+ "../rtc_base:logging",
+ "../rtc_base:macromagic",
+ "../rtc_base:threading",
+ "../rtc_base/third_party/sigslot",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
+}
+
+rtc_source_set("sctp_utils") {
+ visibility = [
+ ":*",
+ "../test/fuzzers:*",
+ ]
+ sources = [
+ "sctp_utils.cc",
+ "sctp_utils.h",
+ ]
+ deps = [
+ "../api:libjingle_peerconnection_api",
+ "../api:priority",
+ "../api/transport:datagram_transport_interface",
+ "../media:media_channel",
+ "../media:rtc_data_sctp_transport_internal",
+ "../media:rtc_media_base",
+ "../net/dcsctp/public:types",
+ "../rtc_base:byte_buffer",
+ "../rtc_base:copy_on_write_buffer",
+ "../rtc_base:logging",
+ "../rtc_base:ssl",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
+}
+rtc_source_set("srtp_filter") {
+ visibility = [ ":*" ]
+ sources = [
+ "srtp_filter.cc",
+ "srtp_filter.h",
+ ]
+ deps = [
+ ":session_description",
+ "../api:array_view",
+ "../api:libjingle_peerconnection_api",
+ "../api:sequence_checker",
+ "../rtc_base:buffer",
+ "../rtc_base:logging",
+ "../rtc_base:ssl",
+ "../rtc_base:zero_memory",
+ "../rtc_base/third_party/base64",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+}
+
+rtc_source_set("srtp_session") {
+ visibility = [ ":*" ]
+ sources = [
+ "srtp_session.cc",
+ "srtp_session.h",
+ ]
+ deps = [
+ ":external_hmac",
+ "../api:array_view",
+ "../api:field_trials_view",
+ "../api:scoped_refptr",
+ "../api:sequence_checker",
+ "../modules/rtp_rtcp:rtp_rtcp_format",
+ "../rtc_base:byte_order",
+ "../rtc_base:checks",
+ "../rtc_base:logging",
+ "../rtc_base:macromagic",
+ "../rtc_base:ssl",
+ "../rtc_base:stringutils",
+ "../rtc_base:timeutils",
+ "../rtc_base/synchronization:mutex",
+ "../system_wrappers:metrics",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/base:core_headers",
+ "//third_party/abseil-cpp/absl/strings:strings",
+ ]
+ if (rtc_build_libsrtp) {
+ deps += [ "//third_party/libsrtp" ]
+ }
+}
+rtc_source_set("srtp_transport") {
+ visibility = [ ":*" ]
+ sources = [
+ "srtp_transport.cc",
+ "srtp_transport.h",
+ ]
+ deps = [
+ ":rtp_transport",
+ ":srtp_session",
+ "../api:field_trials_view",
+ "../api:libjingle_peerconnection_api",
+ "../api:rtc_error",
+ "../media:rtc_media_base",
+ "../media:rtp_utils",
+ "../modules/rtp_rtcp:rtp_rtcp_format",
+ "../p2p:rtc_p2p",
+ "../rtc_base:async_packet_socket",
+ "../rtc_base:buffer",
+ "../rtc_base:checks",
+ "../rtc_base:copy_on_write_buffer",
+ "../rtc_base:event_tracer",
+ "../rtc_base:logging",
+ "../rtc_base:network_route",
+ "../rtc_base:safe_conversions",
+ "../rtc_base:ssl",
+ "../rtc_base:zero_memory",
+ "../rtc_base/third_party/base64",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+}
+
+rtc_source_set("transport_stats") {
+ visibility = [ ":*" ]
+ sources = [
+ "transport_stats.cc",
+ "transport_stats.h",
+ ]
+ deps = [
+ "../api:dtls_transport_interface",
+ "../api:libjingle_peerconnection_api",
+ "../p2p:rtc_p2p",
+ "../rtc_base:ssl",
+ ]
+}
+
+rtc_source_set("used_ids") {
+ visibility = [ ":*" ]
+ sources = [ "used_ids.h" ]
+ deps = [
+ "../api:rtp_parameters",
+ "../media:codec",
+ "../media:rtc_media_base",
+ "../rtc_base:checks",
+ "../rtc_base:logging",
+ ]
+}
+
+rtc_source_set("video_track_source_proxy") {
+ visibility = [ "*" ] # Used by Chrome
+ sources = [
+ "video_track_source_proxy.cc",
+ "video_track_source_proxy.h",
+ ]
+ deps = [
+ ":proxy",
+ "../api:libjingle_peerconnection_api",
+ "../api:media_stream_interface",
+ "../api:scoped_refptr",
+ "../api:video_track_source_constraints",
+ "../api/video:recordable_encoded_frame",
+ "../api/video:video_frame",
+ "../rtc_base:threading",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
+}
+
+rtc_source_set("session_description") {
+ # TODO(bugs.webrtc.org/13661): Reduce visibility if possible
+ visibility = [ "*" ] # Used by Chrome and others
+
+ sources = [
+ "session_description.cc",
+ "session_description.h",
+ ]
+ deps = [
+ ":media_protocol_names",
+ ":simulcast_description",
+ "../api:libjingle_peerconnection_api",
+ "../api:rtp_parameters",
+ "../api:rtp_transceiver_direction",
+ "../media:codec",
+ "../media:media_channel",
+ "../media:media_constants",
+ "../media:rid_description",
+ "../media:rtc_media_base",
+ "../media:stream_params",
+ "../p2p:rtc_p2p",
+ "../rtc_base:checks",
+ "../rtc_base:socket_address",
+ "../rtc_base:stringutils",
+ "../rtc_base/system:rtc_export",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/algorithm:container",
+ "//third_party/abseil-cpp/absl/memory:memory",
+ "//third_party/abseil-cpp/absl/strings",
+ ]
+}
+
+rtc_source_set("simulcast_description") {
+ sources = [
+ "simulcast_description.cc",
+ "simulcast_description.h",
+ ]
+ deps = [
+ "../rtc_base:checks",
+ "../rtc_base:socket_address",
+ "../rtc_base/system:rtc_export",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
+}
+
+rtc_source_set("rtc_pc") {
+ if (build_with_chromium) {
+ visibility = [ "*" ]
+ }
+ allow_poison = [ "audio_codecs" ] # TODO(bugs.webrtc.org/8396): Remove.
+ deps = [ "../media:rtc_audio_video" ]
+}
+
+rtc_library("media_protocol_names") {
+ visibility = [ ":*" ]
+ sources = [
+ "media_protocol_names.cc",
+ "media_protocol_names.h",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
+}
+
+rtc_source_set("peerconnection") {
+ # TODO(bugs.webrtc.org/13661): Reduce visibility if possible
+ visibility = [ "*" ] # Used by Chromium and others
+ cflags = []
+ sources = []
+
+ deps = [
+ ":audio_rtp_receiver",
+ ":audio_track",
+ ":connection_context",
+ ":data_channel_controller",
+ ":data_channel_utils",
+ ":dtmf_sender",
+ ":ice_server_parsing",
+ ":jitter_buffer_delay",
+ ":jsep_ice_candidate",
+ ":jsep_session_description",
+ ":legacy_stats_collector",
+ ":legacy_stats_collector_interface",
+ ":local_audio_source",
+ ":media_protocol_names",
+ ":media_stream",
+ ":media_stream_observer",
+ ":peer_connection",
+ ":peer_connection_factory",
+ ":peer_connection_internal",
+ ":peer_connection_message_handler",
+ ":proxy",
+ ":remote_audio_source",
+ ":rtc_stats_collector",
+ ":rtc_stats_traversal",
+ ":rtp_parameters_conversion",
+ ":rtp_receiver",
+ ":rtp_sender",
+ ":rtp_transceiver",
+ ":rtp_transmission_manager",
+ ":sctp_data_channel",
+ ":sdp_offer_answer",
+ ":sdp_state_provider",
+ ":sdp_utils",
+ ":session_description",
+ ":simulcast_description",
+ ":simulcast_sdp_serializer",
+ ":stream_collection",
+ ":track_media_info_map",
+ ":transceiver_list",
+ ":usage_pattern",
+ ":video_rtp_receiver",
+ ":video_track",
+ ":video_track_source",
+ ":webrtc_sdp",
+ ":webrtc_session_description_factory",
+ "../api:array_view",
+ "../api:async_dns_resolver",
+ "../api:audio_options_api",
+ "../api:call_api",
+ "../api:callfactory_api",
+ "../api:fec_controller_api",
+ "../api:field_trials_view",
+ "../api:frame_transformer_interface",
+ "../api:ice_transport_factory",
+ "../api:libjingle_logging_api",
+ "../api:libjingle_peerconnection_api",
+ "../api:media_stream_interface",
+ "../api:network_state_predictor_api",
+ "../api:packet_socket_factory",
+ "../api:priority",
+ "../api:rtc_error",
+ "../api:rtc_event_log_output_file",
+ "../api:rtc_stats_api",
+ "../api:rtp_parameters",
+ "../api:rtp_transceiver_direction",
+ "../api:scoped_refptr",
+ "../api:sequence_checker",
+ "../api/adaptation:resource_adaptation_api",
+ "../api/audio_codecs:audio_codecs_api",
+ "../api/crypto:frame_decryptor_interface",
+ "../api/crypto:options",
+ "../api/neteq:neteq_api",
+ "../api/rtc_event_log",
+ "../api/task_queue",
+ "../api/task_queue:pending_task_safety_flag",
+ "../api/transport:bitrate_settings",
+ "../api/transport:datagram_transport_interface",
+ "../api/transport:enums",
+ "../api/transport:field_trial_based_config",
+ "../api/transport:network_control",
+ "../api/transport:sctp_transport_factory_interface",
+ "../api/units:data_rate",
+ "../api/video:builtin_video_bitrate_allocator_factory",
+ "../api/video:video_bitrate_allocator_factory",
+ "../api/video:video_codec_constants",
+ "../api/video:video_frame",
+ "../api/video:video_rtp_headers",
+ "../api/video_codecs:video_codecs_api",
+ "../call:call_interfaces",
+ "../call:rtp_interfaces",
+ "../call:rtp_sender",
+ "../common_video",
+ "../logging:ice_log",
+ "../media:rtc_data_sctp_transport_internal",
+ "../media:rtc_media_base",
+ "../media:rtc_media_config",
+ "../modules/audio_processing:audio_processing_statistics",
+ "../modules/rtp_rtcp:rtp_rtcp_format",
+ "../p2p:rtc_p2p",
+ "../rtc_base:callback_list",
+ "../rtc_base:checks",
+ "../rtc_base:ip_address",
+ "../rtc_base:network_constants",
+ "../rtc_base:rtc_operations_chain",
+ "../rtc_base:safe_minmax",
+ "../rtc_base:socket_address",
+ "../rtc_base:threading",
+ "../rtc_base:weak_ptr",
+ "../rtc_base/experiments:field_trial_parser",
+ "../rtc_base/network:sent_packet",
+ "../rtc_base/synchronization:mutex",
+ "../rtc_base/system:file_wrapper",
+ "../rtc_base/system:no_unique_address",
+ "../rtc_base/system:rtc_export",
+ "../rtc_base/system:unused",
+ "../rtc_base/third_party/base64",
+ "../rtc_base/third_party/sigslot",
+ "../stats",
+ "../system_wrappers",
+ "../system_wrappers:field_trial",
+ "../system_wrappers:metrics",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/algorithm:container",
+ "//third_party/abseil-cpp/absl/strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+}
+
+rtc_library("sctp_data_channel") {
+ visibility = [ ":*" ]
+ sources = [
+ "sctp_data_channel.cc",
+ "sctp_data_channel.h",
+ ]
+ deps = [
+ ":data_channel_utils",
+ ":proxy",
+ ":sctp_utils",
+ "../api:libjingle_peerconnection_api",
+ "../api:priority",
+ "../api:rtc_error",
+ "../api:scoped_refptr",
+ "../api:sequence_checker",
+ "../api/task_queue:pending_task_safety_flag",
+ "../api/transport:datagram_transport_interface",
+ "../media:media_channel",
+ "../media:rtc_data_sctp_transport_internal",
+ "../media:rtc_media_base",
+ "../rtc_base:checks",
+ "../rtc_base:copy_on_write_buffer",
+ "../rtc_base:logging",
+ "../rtc_base:macromagic",
+ "../rtc_base:ssl",
+ "../rtc_base:threading",
+ "../rtc_base:weak_ptr",
+ "../rtc_base/containers:flat_set",
+ "../rtc_base/system:no_unique_address",
+ "../rtc_base/system:unused",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
+}
+
+rtc_library("data_channel_utils") {
+ # TODO(bugs.webrtc.org/13661): Reduce visibility if possible
+ visibility = [ "*" ] # Known to be used externally
+
+ sources = [
+ "data_channel_utils.cc",
+ "data_channel_utils.h",
+ ]
+ deps = [
+ "../api:libjingle_peerconnection_api",
+ "../media:rtc_media_base",
+ "../rtc_base:checks",
+ ]
+}
+
+rtc_library("connection_context") {
+ visibility = [ ":*" ]
+ sources = [
+ "connection_context.cc",
+ "connection_context.h",
+ ]
+ deps = [
+ "../api:callfactory_api",
+ "../api:field_trials_view",
+ "../api:libjingle_peerconnection_api",
+ "../api:media_stream_interface",
+ "../api:refcountedbase",
+ "../api:scoped_refptr",
+ "../api:sequence_checker",
+ "../api/neteq:neteq_api",
+ "../api/transport:field_trial_based_config",
+ "../api/transport:sctp_transport_factory_interface",
+ "../media:rtc_data_sctp_transport_factory",
+ "../media:rtc_media_base",
+ "../p2p:rtc_p2p",
+ "../rtc_base:checks",
+ "../rtc_base:macromagic",
+ "../rtc_base:network",
+ "../rtc_base:rtc_certificate_generator",
+ "../rtc_base:socket_factory",
+ "../rtc_base:socket_server",
+ "../rtc_base:ssl",
+ "../rtc_base:threading",
+ "../rtc_base:timeutils",
+ "../rtc_base/memory:always_valid_pointer",
+ ]
+}
+
+rtc_source_set("data_channel_controller") {
+ visibility = [ ":*" ]
+ sources = [
+ "data_channel_controller.cc",
+ "data_channel_controller.h",
+ ]
+ deps = [
+ ":data_channel_utils",
+ ":peer_connection_internal",
+ ":sctp_data_channel",
+ ":sctp_utils",
+ "../api:libjingle_peerconnection_api",
+ "../api:rtc_error",
+ "../api:scoped_refptr",
+ "../api:sequence_checker",
+ "../api/task_queue:pending_task_safety_flag",
+ "../api/transport:datagram_transport_interface",
+ "../media:media_channel",
+ "../media:rtc_media_base",
+ "../rtc_base:checks",
+ "../rtc_base:copy_on_write_buffer",
+ "../rtc_base:logging",
+ "../rtc_base:macromagic",
+ "../rtc_base:ssl",
+ "../rtc_base:threading",
+ "../rtc_base:weak_ptr",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/algorithm:container",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+}
+
+rtc_source_set("peer_connection_internal") {
+ visibility = [ ":*" ]
+ sources = [ "peer_connection_internal.h" ]
+ deps = [
+ ":jsep_transport_controller",
+ ":peer_connection_message_handler",
+ ":rtp_transceiver",
+ ":rtp_transmission_manager",
+ ":sctp_data_channel",
+ "../api:libjingle_peerconnection_api",
+ "../call:call_interfaces",
+ "../modules/audio_device",
+ ]
+
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
+}
+
+rtc_source_set("rtc_stats_collector") {
+ visibility = [
+ ":*",
+ "../api:*",
+ ]
+ sources = [
+ "rtc_stats_collector.cc",
+ "rtc_stats_collector.h",
+ ]
+ deps = [
+ ":channel",
+ ":channel_interface",
+ ":data_channel_utils",
+ ":peer_connection_internal",
+ ":rtc_stats_traversal",
+ ":rtp_receiver",
+ ":rtp_receiver_proxy",
+ ":rtp_sender",
+ ":rtp_sender_proxy",
+ ":rtp_transceiver",
+ ":sctp_data_channel",
+ ":track_media_info_map",
+ ":transport_stats",
+ ":webrtc_sdp",
+ "../api:array_view",
+ "../api:candidate",
+ "../api:dtls_transport_interface",
+ "../api:libjingle_peerconnection_api",
+ "../api:media_stream_interface",
+ "../api:rtc_stats_api",
+ "../api:rtp_parameters",
+ "../api:scoped_refptr",
+ "../api:sequence_checker",
+ "../api/task_queue:task_queue",
+ "../api/units:time_delta",
+ "../api/video:video_rtp_headers",
+ "../api/video_codecs:scalability_mode",
+ "../call:call_interfaces",
+ "../common_video:common_video",
+ "../media:media_channel",
+ "../media:media_channel_impl",
+ "../media:rtc_media_base",
+ "../modules/audio_device",
+ "../modules/audio_processing:audio_processing_statistics",
+ "../modules/rtp_rtcp:rtp_rtcp_format",
+ "../p2p:rtc_p2p",
+ "../rtc_base:checks",
+ "../rtc_base:event_tracer",
+ "../rtc_base:ip_address",
+ "../rtc_base:logging",
+ "../rtc_base:network_constants",
+ "../rtc_base:refcount",
+ "../rtc_base:rtc_event",
+ "../rtc_base:socket_address",
+ "../rtc_base:ssl",
+ "../rtc_base:stringutils",
+ "../rtc_base:threading",
+ "../rtc_base:timeutils",
+ "../rtc_base/containers:flat_set",
+ "../rtc_base/synchronization:mutex",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/functional:bind_front",
+ "//third_party/abseil-cpp/absl/strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+}
+
+rtc_source_set("rtc_stats_traversal") {
+ visibility = [ ":*" ]
+ sources = [
+ "rtc_stats_traversal.cc",
+ "rtc_stats_traversal.h",
+ ]
+ deps = [
+ "../api:rtc_stats_api",
+ "../api:scoped_refptr",
+ "../rtc_base:checks",
+ ]
+}
+
+rtc_source_set("sdp_offer_answer") {
+ visibility = [ ":*" ]
+ sources = [
+ "sdp_offer_answer.cc", # TODO: Make separate target when not circular
+ "sdp_offer_answer.h", # dependent on peerconnection.h
+ ]
+ deps = [
+ ":channel",
+ ":channel_interface",
+ ":connection_context",
+ ":data_channel_controller",
+ ":dtls_transport",
+ ":jsep_transport_controller",
+ ":legacy_stats_collector",
+ ":media_session",
+ ":media_stream",
+ ":media_stream_observer",
+ ":media_stream_proxy",
+ ":peer_connection_internal",
+ ":peer_connection_message_handler",
+ ":rtp_media_utils",
+ ":rtp_receiver",
+ ":rtp_receiver_proxy",
+ ":rtp_sender",
+ ":rtp_sender_proxy",
+ ":rtp_transceiver",
+ ":rtp_transmission_manager",
+ ":sdp_state_provider",
+ ":session_description",
+ ":simulcast_description",
+ ":stream_collection",
+ ":transceiver_list",
+ ":usage_pattern",
+ ":used_ids",
+ ":webrtc_session_description_factory",
+ "../api:array_view",
+ "../api:audio_options_api",
+ "../api:candidate",
+ "../api:dtls_transport_interface",
+ "../api:field_trials_view",
+ "../api:libjingle_peerconnection_api",
+ "../api:media_stream_interface",
+ "../api:rtc_error",
+ "../api:rtp_parameters",
+ "../api:rtp_sender_interface",
+ "../api:rtp_transceiver_direction",
+ "../api:scoped_refptr",
+ "../api:sequence_checker",
+ "../api/crypto:options",
+ "../api/video:builtin_video_bitrate_allocator_factory",
+ "../api/video:video_bitrate_allocator_factory",
+ "../media:codec",
+ "../media:media_channel",
+ "../media:rid_description",
+ "../media:rtc_media_base",
+ "../media:stream_params",
+ "../p2p:rtc_p2p",
+ "../rtc_base:checks",
+ "../rtc_base:event_tracer",
+ "../rtc_base:logging",
+ "../rtc_base:macromagic",
+ "../rtc_base:rtc_operations_chain",
+ "../rtc_base:ssl",
+ "../rtc_base:stringutils",
+ "../rtc_base:threading",
+ "../rtc_base:unique_id_generator",
+ "../rtc_base:weak_ptr",
+ "../system_wrappers:metrics",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/algorithm:container",
+ "//third_party/abseil-cpp/absl/memory:memory",
+ "//third_party/abseil-cpp/absl/strings:strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+}
+rtc_source_set("jsep_ice_candidate") {
+ visibility = [ ":*" ]
+}
+rtc_source_set("jsep_session_description") {
+ visibility = [ ":*" ]
+}
+rtc_source_set("local_audio_source") {
+ visibility = [ ":*" ]
+ sources = [
+ "local_audio_source.cc",
+ "local_audio_source.h",
+ ]
+ deps = [
+ "../api:audio_options_api",
+ "../api:media_stream_interface",
+ "../api:scoped_refptr",
+ ]
+}
+rtc_source_set("peer_connection") {
+ visibility = [ ":*" ]
+ sources = [
+ "peer_connection.cc",
+ "peer_connection.h",
+ ]
+ deps = [
+ ":channel",
+ ":channel_interface",
+ ":connection_context",
+ ":data_channel_controller",
+ ":data_channel_utils",
+ ":dtls_transport",
+ ":ice_server_parsing",
+ ":jsep_transport_controller",
+ ":legacy_stats_collector",
+ ":peer_connection_internal",
+ ":peer_connection_message_handler",
+ ":rtc_stats_collector",
+ ":rtp_receiver",
+ ":rtp_receiver_proxy",
+ ":rtp_sender",
+ ":rtp_sender_proxy",
+ ":rtp_transceiver",
+ ":rtp_transmission_manager",
+ ":rtp_transport_internal",
+ ":sctp_data_channel",
+ ":sctp_transport",
+ ":sdp_offer_answer",
+ ":session_description",
+ ":simulcast_description",
+ ":transceiver_list",
+ ":transport_stats",
+ ":usage_pattern",
+ ":webrtc_session_description_factory",
+ "../api:async_dns_resolver",
+ "../api:candidate",
+ "../api:dtls_transport_interface",
+ "../api:field_trials_view",
+ "../api:ice_transport_interface",
+ "../api:libjingle_logging_api",
+ "../api:libjingle_peerconnection_api",
+ "../api:media_stream_interface",
+ "../api:rtc_error",
+ "../api:rtc_stats_api",
+ "../api:rtp_parameters",
+ "../api:rtp_sender_interface",
+ "../api:rtp_transceiver_direction",
+ "../api:scoped_refptr",
+ "../api:sequence_checker",
+ "../api:turn_customizer",
+ "../api/adaptation:resource_adaptation_api",
+ "../api/crypto:options",
+ "../api/rtc_event_log",
+ "../api/task_queue:pending_task_safety_flag",
+ "../api/transport:bitrate_settings",
+ "../api/transport:datagram_transport_interface",
+ "../api/transport:enums",
+ "../api/video:video_codec_constants",
+ "../call:call_interfaces",
+ "../media:media_channel",
+ "../media:rid_description",
+ "../media:rtc_media_base",
+ "../media:rtc_media_config",
+ "../media:stream_params",
+ "../modules/rtp_rtcp:rtp_rtcp_format",
+ "../p2p:rtc_p2p",
+ "../rtc_base:checks",
+ "../rtc_base:copy_on_write_buffer",
+ "../rtc_base:event_tracer",
+ "../rtc_base:ip_address",
+ "../rtc_base:logging",
+ "../rtc_base:macromagic",
+ "../rtc_base:net_helper",
+ "../rtc_base:network",
+ "../rtc_base:network_constants",
+ "../rtc_base:socket_address",
+ "../rtc_base:ssl",
+ "../rtc_base:stringutils",
+ "../rtc_base:threading",
+ "../rtc_base:unique_id_generator",
+ "../rtc_base:weak_ptr",
+ "../system_wrappers:metrics",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/algorithm:container",
+ "//third_party/abseil-cpp/absl/strings:strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+}
+
+rtc_source_set("simulcast_sdp_serializer") {
+ visibility = [ ":*" ]
+ sources = [
+ "simulcast_sdp_serializer.cc",
+ "simulcast_sdp_serializer.h",
+ ]
+ deps = [
+ ":session_description",
+ ":simulcast_description",
+ "../api:rtc_error",
+ "../media:rid_description",
+ "../media:rtc_media_base",
+ "../modules/rtp_rtcp:rtp_rtcp_format",
+ "../rtc_base:checks",
+ "../rtc_base:stringutils",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/algorithm:container",
+ "//third_party/abseil-cpp/absl/strings:strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+}
+rtc_source_set("sdp_utils") {
+ sources = [
+ "sdp_utils.cc",
+ "sdp_utils.h",
+ ]
+ deps = [
+ ":session_description",
+ "../api:libjingle_peerconnection_api",
+ "../p2p:rtc_p2p",
+ "../rtc_base:checks",
+ "../rtc_base/system:rtc_export",
+ ]
+}
+rtc_source_set("legacy_stats_collector") {
+ visibility = [ ":*" ]
+ sources = [
+ "legacy_stats_collector.cc",
+ "legacy_stats_collector.h",
+ ]
+ deps = [
+ ":channel",
+ ":channel_interface",
+ ":data_channel_utils",
+ ":legacy_stats_collector_interface",
+ ":peer_connection_internal",
+ ":rtp_receiver",
+ ":rtp_receiver_proxy",
+ ":rtp_sender_proxy",
+ ":rtp_transceiver",
+ ":transport_stats",
+ "../api:candidate",
+ "../api:field_trials_view",
+ "../api:libjingle_peerconnection_api",
+ "../api:media_stream_interface",
+ "../api:rtp_parameters",
+ "../api:rtp_sender_interface",
+ "../api:scoped_refptr",
+ "../api:sequence_checker",
+ "../api/audio_codecs:audio_codecs_api",
+ "../api/video:video_rtp_headers",
+ "../call:call_interfaces",
+ "../media:media_channel",
+ "../media:rtc_media_base",
+ "../modules/audio_processing:audio_processing_statistics",
+ "../p2p:rtc_p2p",
+ "../rtc_base:checks",
+ "../rtc_base:event_tracer",
+ "../rtc_base:ip_address",
+ "../rtc_base:logging",
+ "../rtc_base:macromagic",
+ "../rtc_base:network_constants",
+ "../rtc_base:socket_address",
+ "../rtc_base:ssl",
+ "../rtc_base:stringutils",
+ "../rtc_base:threading",
+ "../rtc_base:timeutils",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+}
+rtc_source_set("stream_collection") {
+ visibility = [ ":*" ]
+ sources = [ "stream_collection.h" ]
+ deps = [ "../api:libjingle_peerconnection_api" ]
+}
+rtc_source_set("track_media_info_map") {
+ visibility = [ ":*" ]
+ sources = [
+ "track_media_info_map.cc",
+ "track_media_info_map.h",
+ ]
+ deps = [
+ ":rtp_receiver",
+ ":rtp_sender",
+ "../api:array_view",
+ "../api:media_stream_interface",
+ "../api:rtp_parameters",
+ "../api:scoped_refptr",
+ "../media:media_channel",
+ "../media:rtc_media_base",
+ "../media:stream_params",
+ "../rtc_base:checks",
+ "../rtc_base:refcount",
+ "../rtc_base:threading",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
+}
+rtc_source_set("webrtc_sdp") {
+ # TODO(bugs.webrtc.org/13661): Reduce visibility if possible
+ visibility = [ "*" ] # Used by Chrome and more
+
+ sources = [
+ "jsep_ice_candidate.cc",
+ "jsep_session_description.cc",
+ "webrtc_sdp.cc",
+ "webrtc_sdp.h",
+ ]
+ deps = [
+ ":media_protocol_names",
+ ":media_session",
+ ":session_description",
+ ":simulcast_description",
+ ":simulcast_sdp_serializer",
+ "../api:candidate",
+ "../api:libjingle_peerconnection_api",
+ "../api:rtc_error",
+ "../api:rtp_parameters",
+ "../api:rtp_transceiver_direction",
+ "../media:codec",
+ "../media:media_constants",
+ "../media:rid_description",
+ "../media:rtc_data_sctp_transport_internal",
+ "../media:rtc_media_base",
+ "../media:rtp_utils",
+ "../media:stream_params",
+ "../p2p:rtc_p2p",
+ "../rtc_base:checks",
+ "../rtc_base:ip_address",
+ "../rtc_base:logging",
+ "../rtc_base:macromagic",
+ "../rtc_base:net_helper",
+ "../rtc_base:network_constants",
+ "../rtc_base:socket_address",
+ "../rtc_base:ssl",
+ "../rtc_base:stringutils",
+ "../rtc_base/system:rtc_export",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/algorithm:container",
+ "//third_party/abseil-cpp/absl/strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+}
+rtc_source_set("webrtc_session_description_factory") {
+ visibility = [ ":*" ]
+ sources = [
+ "webrtc_session_description_factory.cc",
+ "webrtc_session_description_factory.h",
+ ]
+ deps = [
+ ":connection_context",
+ ":media_session",
+ ":sdp_state_provider",
+ ":session_description",
+ "../api:libjingle_peerconnection_api",
+ "../api:rtc_error",
+ "../api:scoped_refptr",
+ "../api:sequence_checker",
+ "../api/task_queue",
+ "../p2p:rtc_p2p",
+ "../rtc_base:checks",
+ "../rtc_base:logging",
+ "../rtc_base:rtc_certificate_generator",
+ "../rtc_base:ssl",
+ "../rtc_base:stringutils",
+ "../rtc_base:unique_id_generator",
+ "../rtc_base:weak_ptr",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/algorithm:container",
+ "//third_party/abseil-cpp/absl/functional:any_invocable",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+}
+
+rtc_library("ice_server_parsing") {
+ # TODO(bugs.webrtc.org/13661): Reduce visibility if possible
+ visibility = [ "*" ] # Known to be used externally
+
+ sources = [
+ "ice_server_parsing.cc",
+ "ice_server_parsing.h",
+ ]
+ deps = [
+ "../api:libjingle_peerconnection_api",
+ "../api:rtc_error",
+ "../p2p:rtc_p2p",
+ "../rtc_base:checks",
+ "../rtc_base:ip_address",
+ "../rtc_base:logging",
+ "../rtc_base:macromagic",
+ "../rtc_base:socket_address",
+ "../rtc_base:stringutils",
+ "../rtc_base/system:rtc_export",
+ ]
+}
+
+rtc_library("media_stream_observer") {
+ sources = [
+ "media_stream_observer.cc",
+ "media_stream_observer.h",
+ ]
+ deps = [
+ "../api:media_stream_interface",
+ "../api:scoped_refptr",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container" ]
+}
+rtc_source_set("peer_connection_factory") {
+ # TODO(bugs.webrtc.org/13661): Reduce visibility if possible
+ visibility = [ "*" ] # Known to be used externally
+ sources = [
+ "peer_connection_factory.cc",
+ "peer_connection_factory.h",
+ ]
+ deps = [
+ ":local_audio_source",
+ ":media_stream_proxy",
+ ":media_stream_track_proxy",
+ ":peer_connection",
+ ":peer_connection_factory_proxy",
+ ":peer_connection_proxy",
+ "../api:audio_options_api",
+ "../api:callfactory_api",
+ "../api:fec_controller_api",
+ "../api:field_trials_view",
+ "../api:ice_transport_interface",
+ "../api:libjingle_peerconnection_api",
+ "../api:media_stream_interface",
+ "../api:network_state_predictor_api",
+ "../api:packet_socket_factory",
+ "../api:rtc_error",
+ "../api:rtp_parameters",
+ "../api:scoped_refptr",
+ "../api:sequence_checker",
+ "../api/metronome",
+ "../api/neteq:neteq_api",
+ "../api/rtc_event_log:rtc_event_log",
+ "../api/task_queue:task_queue",
+ "../api/transport:bitrate_settings",
+ "../api/transport:network_control",
+ "../api/transport:sctp_transport_factory_interface",
+ "../api/units:data_rate",
+ "../call:call_interfaces",
+ "../call:rtp_interfaces",
+ "../call:rtp_sender",
+ "../media:rtc_media_base",
+ "../p2p:rtc_p2p",
+ "../pc:audio_track",
+ "../pc:connection_context",
+ "../pc:media_stream",
+ "../pc:rtp_parameters_conversion",
+ "../pc:session_description",
+ "../pc:video_track",
+ "../rtc_base:checks",
+ "../rtc_base:logging",
+ "../rtc_base:macromagic",
+ "../rtc_base:rtc_certificate_generator",
+ "../rtc_base:safe_conversions",
+ "../rtc_base:threading",
+ "../rtc_base/experiments:field_trial_parser",
+ "../rtc_base/system:file_wrapper",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/strings:strings" ]
+}
+
+rtc_library("peer_connection_message_handler") {
+ visibility = [ ":*" ]
+ sources = [
+ "peer_connection_message_handler.cc",
+ "peer_connection_message_handler.h",
+ ]
+ deps = [
+ ":legacy_stats_collector_interface",
+ "../api:libjingle_peerconnection_api",
+ "../api:media_stream_interface",
+ "../api:rtc_error",
+ "../api:scoped_refptr",
+ "../api:sequence_checker",
+ "../api/task_queue",
+ "../api/task_queue:pending_task_safety_flag",
+ "../rtc_base:checks",
+ ]
+}
+
+rtc_library("usage_pattern") {
+ visibility = [ ":*" ]
+ sources = [
+ "usage_pattern.cc",
+ "usage_pattern.h",
+ ]
+ deps = [
+ "../api:libjingle_peerconnection_api",
+ "../rtc_base:logging",
+ "../system_wrappers:metrics",
+ ]
+}
+
+rtc_library("rtp_transceiver") {
+ visibility = [ ":*" ]
+ sources = [
+ "rtp_transceiver.cc",
+ "rtp_transceiver.h",
+ ]
+ deps = [
+ ":channel",
+ ":channel_interface",
+ ":connection_context",
+ ":proxy",
+ ":rtp_media_utils",
+ ":rtp_parameters_conversion",
+ ":rtp_receiver",
+ ":rtp_receiver_proxy",
+ ":rtp_sender",
+ ":rtp_sender_proxy",
+ ":rtp_transport_internal",
+ ":session_description",
+ "../api:array_view",
+ "../api:audio_options_api",
+ "../api:field_trials_view",
+ "../api:libjingle_peerconnection_api",
+ "../api:rtc_error",
+ "../api:rtp_parameters",
+ "../api:rtp_sender_interface",
+ "../api:rtp_transceiver_direction",
+ "../api:scoped_refptr",
+ "../api:sequence_checker",
+ "../api/audio_codecs:audio_codecs_api",
+ "../api/crypto:options",
+ "../api/task_queue",
+ "../api/task_queue:pending_task_safety_flag",
+ "../api/video:video_bitrate_allocator_factory",
+ "../media:codec",
+ "../media:media_channel",
+ "../media:media_channel_impl",
+ "../media:media_constants",
+ "../media:rtc_media_base",
+ "../media:rtc_media_config",
+ "../rtc_base:checks",
+ "../rtc_base:logging",
+ "../rtc_base:macromagic",
+ "../rtc_base:threading",
+ "../rtc_base/third_party/sigslot",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/algorithm:container",
+ "//third_party/abseil-cpp/absl/memory",
+ "//third_party/abseil-cpp/absl/strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+}
+
+rtc_library("rtp_transmission_manager") {
+ visibility = [ ":*" ]
+ sources = [
+ "rtp_transmission_manager.cc",
+ "rtp_transmission_manager.h",
+ ]
+ deps = [
+ ":audio_rtp_receiver",
+ ":channel",
+ ":channel_interface",
+ ":legacy_stats_collector_interface",
+ ":rtp_receiver",
+ ":rtp_receiver_proxy",
+ ":rtp_sender",
+ ":rtp_sender_proxy",
+ ":rtp_transceiver",
+ ":transceiver_list",
+ ":usage_pattern",
+ ":video_rtp_receiver",
+ "../api:libjingle_peerconnection_api",
+ "../api:media_stream_interface",
+ "../api:rtc_error",
+ "../api:rtp_parameters",
+ "../api:rtp_sender_interface",
+ "../api:rtp_transceiver_direction",
+ "../api:scoped_refptr",
+ "../api:sequence_checker",
+ "../media:media_channel",
+ "../media:rtc_media_base",
+ "../rtc_base:checks",
+ "../rtc_base:logging",
+ "../rtc_base:macromagic",
+ "../rtc_base:ssl",
+ "../rtc_base:threading",
+ "../rtc_base:weak_ptr",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/algorithm:container",
+ "//third_party/abseil-cpp/absl/strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+}
+
+rtc_library("transceiver_list") {
+ visibility = [ ":*" ]
+ sources = [
+ "transceiver_list.cc",
+ "transceiver_list.h",
+ ]
+ deps = [
+ ":rtp_transceiver",
+ "../api:libjingle_peerconnection_api",
+ "../api:rtc_error",
+ "../api:rtp_parameters",
+ "../api:rtp_sender_interface",
+ "../api:scoped_refptr",
+ "../api:sequence_checker",
+ "../rtc_base:checks",
+ "../rtc_base:macromagic",
+ "../rtc_base/system:no_unique_address",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
+}
+
+rtc_library("rtp_receiver") {
+ visibility = [ ":*" ]
+ sources = [
+ "rtp_receiver.cc",
+ "rtp_receiver.h",
+ ]
+ deps = [
+ ":media_stream",
+ ":media_stream_proxy",
+ ":video_track_source",
+ "../api:dtls_transport_interface",
+ "../api:libjingle_peerconnection_api",
+ "../api:media_stream_interface",
+ "../api:rtp_parameters",
+ "../api:scoped_refptr",
+ "../api/crypto:frame_decryptor_interface",
+ "../api/video:video_frame",
+ "../media:media_channel",
+ "../media:rtc_media_base",
+ "../rtc_base:checks",
+ "../rtc_base:logging",
+ "../rtc_base:threading",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/algorithm:container",
+ "//third_party/abseil-cpp/absl/strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+}
+
+rtc_library("audio_rtp_receiver") {
+ visibility = [ ":*" ]
+ sources = [
+ "audio_rtp_receiver.cc",
+ "audio_rtp_receiver.h",
+ ]
+ deps = [
+ ":audio_track",
+ ":jitter_buffer_delay",
+ ":media_stream",
+ ":media_stream_track_proxy",
+ ":remote_audio_source",
+ ":rtp_receiver",
+ "../api:dtls_transport_interface",
+ "../api:frame_transformer_interface",
+ "../api:libjingle_peerconnection_api",
+ "../api:media_stream_interface",
+ "../api:rtp_parameters",
+ "../api:scoped_refptr",
+ "../api:sequence_checker",
+ "../api/crypto:frame_decryptor_interface",
+ "../api/task_queue:pending_task_safety_flag",
+ "../api/transport/rtp:rtp_source",
+ "../media:media_channel",
+ "../media:rtc_media_base",
+ "../rtc_base:checks",
+ "../rtc_base:macromagic",
+ "../rtc_base:threading",
+ "../rtc_base/system:no_unique_address",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/algorithm:container",
+ "//third_party/abseil-cpp/absl/strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+}
+
+rtc_library("video_rtp_receiver") {
+ visibility = [ ":*" ]
+ sources = [
+ "video_rtp_receiver.cc",
+ "video_rtp_receiver.h",
+ ]
+ deps = [
+ ":jitter_buffer_delay",
+ ":media_stream",
+ ":media_stream_track_proxy",
+ ":rtp_receiver",
+ ":video_rtp_track_source",
+ ":video_track",
+ "../api:dtls_transport_interface",
+ "../api:frame_transformer_interface",
+ "../api:libjingle_peerconnection_api",
+ "../api:media_stream_interface",
+ "../api:rtp_parameters",
+ "../api:scoped_refptr",
+ "../api:sequence_checker",
+ "../api/crypto:frame_decryptor_interface",
+ "../api/transport/rtp:rtp_source",
+ "../api/video:recordable_encoded_frame",
+ "../api/video:video_frame",
+ "../media:media_channel",
+ "../media:rtc_media_base",
+ "../rtc_base:checks",
+ "../rtc_base:logging",
+ "../rtc_base:macromagic",
+ "../rtc_base:threading",
+ "../rtc_base/system:no_unique_address",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/algorithm:container",
+ "//third_party/abseil-cpp/absl/strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+}
+
+rtc_library("video_rtp_track_source") {
+ visibility = [ ":*" ]
+ sources = [
+ "video_rtp_track_source.cc",
+ "video_rtp_track_source.h",
+ ]
+ deps = [
+ ":video_track_source",
+ "../api:sequence_checker",
+ "../api/video:recordable_encoded_frame",
+ "../api/video:video_frame",
+ "../media:rtc_media_base",
+ "../rtc_base:checks",
+ "../rtc_base:macromagic",
+ "../rtc_base/synchronization:mutex",
+ "../rtc_base/system:no_unique_address",
+ ]
+}
+
+rtc_library("audio_track") {
+ visibility = [ ":*" ]
+ sources = [
+ "audio_track.cc",
+ "audio_track.h",
+ ]
+ deps = [
+ "../api:media_stream_interface",
+ "../api:scoped_refptr",
+ "../api:sequence_checker",
+ "../rtc_base:checks",
+ "../rtc_base/system:no_unique_address",
+ ]
+}
+
+rtc_library("video_track") {
+ visibility = [ ":*" ]
+ sources = [
+ "video_track.cc",
+ "video_track.h",
+ ]
+ deps = [
+ ":video_track_source_proxy",
+ "../api:media_stream_interface",
+ "../api:scoped_refptr",
+ "../api:sequence_checker",
+ "../api/video:video_frame",
+ "../media:rtc_media_base",
+ "../rtc_base:checks",
+ "../rtc_base:macromagic",
+ "../rtc_base:threading",
+ "../rtc_base/system:no_unique_address",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
+}
+
+rtc_source_set("sdp_state_provider") {
+ visibility = [ ":*" ]
+ sources = [ "sdp_state_provider.h" ]
+ deps = [ "../api:libjingle_peerconnection_api" ]
+}
+
+rtc_library("jitter_buffer_delay") {
+ visibility = [ ":*" ]
+ sources = [
+ "jitter_buffer_delay.cc",
+ "jitter_buffer_delay.h",
+ ]
+ deps = [
+ "../api:sequence_checker",
+ "../rtc_base:checks",
+ "../rtc_base:macromagic",
+ "../rtc_base:safe_conversions",
+ "../rtc_base:safe_minmax",
+ "../rtc_base/system:no_unique_address",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
+}
+
+rtc_library("remote_audio_source") {
+ visibility = [ ":*" ]
+ sources = [
+ "remote_audio_source.cc",
+ "remote_audio_source.h",
+ ]
+ deps = [
+ ":channel",
+ "../api:call_api",
+ "../api:media_stream_interface",
+ "../api:scoped_refptr",
+ "../api:sequence_checker",
+ "../api/task_queue",
+ "../media:media_channel",
+ "../media:rtc_media_base",
+ "../rtc_base:checks",
+ "../rtc_base:event_tracer",
+ "../rtc_base:logging",
+ "../rtc_base:safe_conversions",
+ "../rtc_base:stringutils",
+ "../rtc_base/synchronization:mutex",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/algorithm:container",
+ "//third_party/abseil-cpp/absl/strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+}
+
+rtc_library("rtp_sender") {
+ visibility = [ ":*" ]
+ sources = [
+ "rtp_sender.cc",
+ "rtp_sender.h",
+ ]
+ deps = [
+ ":dtmf_sender",
+ ":legacy_stats_collector_interface",
+ "../api:audio_options_api",
+ "../api:dtls_transport_interface",
+ "../api:dtmf_sender_interface",
+ "../api:frame_transformer_interface",
+ "../api:libjingle_peerconnection_api",
+ "../api:media_stream_interface",
+ "../api:priority",
+ "../api:rtc_error",
+ "../api:rtp_parameters",
+ "../api:rtp_sender_interface",
+ "../api:scoped_refptr",
+ "../api:sequence_checker",
+ "../api/crypto:frame_encryptor_interface",
+ "../media:media_channel",
+ "../media:rtc_media_base",
+ "../rtc_base:checks",
+ "../rtc_base:event_tracer",
+ "../rtc_base:logging",
+ "../rtc_base:macromagic",
+ "../rtc_base:ssl",
+ "../rtc_base:threading",
+ "../rtc_base/synchronization:mutex",
+ "../rtc_base/third_party/sigslot",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/algorithm:container",
+ "//third_party/abseil-cpp/absl/strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+}
+
+rtc_library("rtp_parameters_conversion") {
+ visibility = [ ":*" ]
+ sources = [
+ "rtp_parameters_conversion.cc",
+ "rtp_parameters_conversion.h",
+ ]
+ deps = [
+ ":session_description",
+ "../api:array_view",
+ "../api:libjingle_peerconnection_api",
+ "../api:rtc_error",
+ "../api:rtp_parameters",
+ "../media:codec",
+ "../media:media_constants",
+ "../media:rtc_media_base",
+ "../media:rtp_utils",
+ "../media:stream_params",
+ "../rtc_base:checks",
+ "../rtc_base:logging",
+ "../rtc_base:stringutils",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/algorithm:container",
+ "//third_party/abseil-cpp/absl/strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+}
+
+rtc_library("dtmf_sender") {
+ visibility = [ ":*" ]
+ sources = [
+ "dtmf_sender.cc",
+ "dtmf_sender.h",
+ ]
+ deps = [
+ ":proxy",
+ "../api:dtmf_sender_interface",
+ "../api:libjingle_peerconnection_api",
+ "../api:scoped_refptr",
+ "../api:sequence_checker",
+ "../api/task_queue",
+ "../api/task_queue:pending_task_safety_flag",
+ "../api/units:time_delta",
+ "../rtc_base:checks",
+ "../rtc_base:logging",
+ "../rtc_base:macromagic",
+ "../rtc_base:refcount",
+ "../rtc_base/third_party/sigslot",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/algorithm:container",
+ "//third_party/abseil-cpp/absl/strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+}
+
+rtc_library("media_stream") {
+ visibility = [ ":*" ]
+ sources = [
+ "media_stream.cc",
+ "media_stream.h",
+ ]
+ deps = [
+ "../api:libjingle_peerconnection_api",
+ "../api:media_stream_interface",
+ "../api:scoped_refptr",
+ "../rtc_base:checks",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/algorithm:container",
+ "//third_party/abseil-cpp/absl/strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+}
+
+rtc_library("video_track_source") {
+ sources = [
+ "video_track_source.cc",
+ "video_track_source.h",
+ ]
+ deps = [
+ "../api:media_stream_interface",
+ "../api:sequence_checker",
+ "../api/video:recordable_encoded_frame",
+ "../api/video:video_frame",
+ "../media:media_channel",
+ "../media:rtc_media_base",
+ "../rtc_base:checks",
+ "../rtc_base:macromagic",
+ "../rtc_base/system:no_unique_address",
+ "../rtc_base/system:rtc_export",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
+}
+
+rtc_source_set("legacy_stats_collector_interface") {
+ visibility = [ ":*" ]
+ sources = [ "legacy_stats_collector_interface.h" ]
+ deps = [
+ "../api:libjingle_peerconnection_api",
+ "../api:media_stream_interface",
+ ]
+}
+
+rtc_source_set("libjingle_peerconnection") {
+ # TODO(bugs.webrtc.org/13661): Reduce visibility if possible
+ visibility = [ "*" ] # Used by Chrome and others
+
+ deps = [
+ ":peerconnection",
+ "../api:libjingle_peerconnection_api",
+ ]
+}
+
+if (rtc_include_tests && !build_with_chromium) {
+ rtc_test("rtc_pc_unittests") {
+ testonly = true
+
+ sources = [
+ "audio_rtp_receiver_unittest.cc",
+ "channel_unittest.cc",
+ "dtls_srtp_transport_unittest.cc",
+ "dtls_transport_unittest.cc",
+ "ice_transport_unittest.cc",
+ "jsep_transport_controller_unittest.cc",
+ "jsep_transport_unittest.cc",
+ "media_session_unittest.cc",
+ "rtcp_mux_filter_unittest.cc",
+ "rtp_transport_unittest.cc",
+ "sctp_transport_unittest.cc",
+ "session_description_unittest.cc",
+ "srtp_filter_unittest.cc",
+ "srtp_session_unittest.cc",
+ "srtp_transport_unittest.cc",
+ "test/rtp_transport_test_util.h",
+ "test/srtp_test_util.h",
+ "used_ids_unittest.cc",
+ "video_rtp_receiver_unittest.cc",
+ ]
+
+ include_dirs = [ "//third_party/libsrtp/srtp" ]
+
+ if (is_win) {
+ libs = [ "strmiids.lib" ]
+ }
+
+ deps = [
+ ":audio_rtp_receiver",
+ ":channel",
+ ":dtls_srtp_transport",
+ ":dtls_transport",
+ ":ice_transport",
+ ":jsep_transport",
+ ":jsep_transport_controller",
+ ":libjingle_peerconnection",
+ ":media_protocol_names",
+ ":media_session",
+ ":pc_test_utils",
+ ":peerconnection",
+ ":rtc_pc",
+ ":rtcp_mux_filter",
+ ":rtp_media_utils",
+ ":rtp_parameters_conversion",
+ ":rtp_transport",
+ ":rtp_transport_internal",
+ ":sctp_transport",
+ ":session_description",
+ ":srtp_filter",
+ ":srtp_session",
+ ":srtp_transport",
+ ":used_ids",
+ ":video_rtp_receiver",
+ "../api:array_view",
+ "../api:audio_options_api",
+ "../api:candidate",
+ "../api:dtls_transport_interface",
+ "../api:ice_transport_factory",
+ "../api:libjingle_peerconnection_api",
+ "../api:make_ref_counted",
+ "../api:make_ref_counted",
+ "../api:rtc_error",
+ "../api:rtp_headers",
+ "../api:rtp_parameters",
+ "../api:scoped_refptr",
+ "../api:sequence_checker",
+ "../api/task_queue:pending_task_safety_flag",
+ "../api/task_queue:task_queue",
+ "../api/transport:datagram_transport_interface",
+ "../api/transport:enums",
+ "../api/video:builtin_video_bitrate_allocator_factory",
+ "../api/video:recordable_encoded_frame",
+ "../api/video/test:mock_recordable_encoded_frame",
+ "../call:rtp_interfaces",
+ "../call:rtp_receiver",
+ "../media:codec",
+ "../media:media_channel",
+ "../media:media_constants",
+ "../media:rid_description",
+ "../media:rtc_data_sctp_transport_internal",
+ "../media:rtc_media_base",
+ "../media:rtc_media_tests_utils",
+ "../modules/rtp_rtcp:rtp_rtcp_format",
+ "../p2p:fake_ice_transport",
+ "../p2p:fake_port_allocator",
+ "../p2p:p2p_test_utils",
+ "../p2p:rtc_p2p",
+ "../rtc_base:async_packet_socket",
+ "../rtc_base:buffer",
+ "../rtc_base:byte_order",
+ "../rtc_base:checks",
+ "../rtc_base:copy_on_write_buffer",
+ "../rtc_base:gunit_helpers",
+ "../rtc_base:logging",
+ "../rtc_base:macromagic",
+ "../rtc_base:net_helper",
+ "../rtc_base:rtc_base_tests_utils",
+ "../rtc_base:socket_address",
+ "../rtc_base:ssl",
+ "../rtc_base:stringutils",
+ "../rtc_base:task_queue_for_test",
+ "../rtc_base:threading",
+ "../rtc_base:unique_id_generator",
+ "../rtc_base/containers:flat_set",
+ "../rtc_base/third_party/sigslot",
+ "../system_wrappers:metrics",
+ "../test:explicit_key_value_config",
+ "../test:run_loop",
+ "../test:scoped_key_value_config",
+ "../test:test_main",
+ "../test:test_support",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/algorithm:container",
+ "//third_party/abseil-cpp/absl/functional:any_invocable",
+ "//third_party/abseil-cpp/absl/memory",
+ "//third_party/abseil-cpp/absl/strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+
+ if (rtc_build_libsrtp) {
+ deps += [ "//third_party/libsrtp" ]
+ }
+
+ if (is_android) {
+ use_default_launcher = false
+ deps += [
+ "//build/android/gtest_apk:native_test_instrumentation_test_runner_java",
+ "//testing/android/native_test:native_test_java",
+ "//testing/android/native_test:native_test_support",
+ ]
+ }
+ }
+
+ rtc_library("peerconnection_perf_tests") {
+ testonly = true
+ sources = [ "peer_connection_rampup_tests.cc" ]
+ deps = [
+ ":pc_test_utils",
+ ":peer_connection",
+ ":peerconnection",
+ ":peerconnection_wrapper",
+ "../api:audio_options_api",
+ "../api:create_peerconnection_factory",
+ "../api:libjingle_peerconnection_api",
+ "../api:media_stream_interface",
+ "../api:rtc_error",
+ "../api:rtc_stats_api",
+ "../api:scoped_refptr",
+ "../api/audio:audio_mixer_api",
+ "../api/audio_codecs:audio_codecs_api",
+ "../api/audio_codecs:builtin_audio_decoder_factory",
+ "../api/audio_codecs:builtin_audio_encoder_factory",
+ "../api/test/metrics:global_metrics_logger_and_exporter",
+ "../api/test/metrics:metric",
+ "../api/video_codecs:video_codecs_api",
+ "../api/video_codecs:video_decoder_factory_template",
+ "../api/video_codecs:video_decoder_factory_template_dav1d_adapter",
+ "../api/video_codecs:video_decoder_factory_template_libvpx_vp8_adapter",
+ "../api/video_codecs:video_decoder_factory_template_libvpx_vp9_adapter",
+ "../api/video_codecs:video_decoder_factory_template_open_h264_adapter",
+ "../api/video_codecs:video_encoder_factory_template",
+ "../api/video_codecs:video_encoder_factory_template_libaom_av1_adapter",
+ "../api/video_codecs:video_encoder_factory_template_libvpx_vp8_adapter",
+ "../api/video_codecs:video_encoder_factory_template_libvpx_vp9_adapter",
+ "../api/video_codecs:video_encoder_factory_template_open_h264_adapter",
+ "../media:rtc_media_tests_utils",
+ "../modules/audio_device:audio_device_api",
+ "../modules/audio_processing:api",
+ "../p2p:p2p_test_utils",
+ "../p2p:rtc_p2p",
+ "../rtc_base:checks",
+ "../rtc_base:gunit_helpers",
+ "../rtc_base:rtc_base_tests_utils",
+ "../rtc_base:socket_address",
+ "../rtc_base:socket_factory",
+ "../rtc_base:ssl",
+ "../rtc_base:task_queue_for_test",
+ "../rtc_base:threading",
+ "../system_wrappers",
+ "../test:test_support",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
+ }
+
+ rtc_library("peerconnection_wrapper") {
+ testonly = true
+ sources = [
+ "peer_connection_wrapper.cc",
+ "peer_connection_wrapper.h",
+ ]
+ deps = [
+ ":pc_test_utils",
+ ":peerconnection",
+ ":sdp_utils",
+ "../api:function_view",
+ "../api:libjingle_peerconnection_api",
+ "../api:media_stream_interface",
+ "../api:rtc_error",
+ "../api:rtc_stats_api",
+ "../api:rtp_parameters",
+ "../api:rtp_sender_interface",
+ "../api:scoped_refptr",
+ "../rtc_base:checks",
+ "../rtc_base:gunit_helpers",
+ "../rtc_base:logging",
+ "../test:test_support",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
+ }
+
+ rtc_test("slow_peer_connection_unittests") {
+ testonly = true
+ sources = [ "slow_peer_connection_integration_test.cc" ]
+ deps = [
+ ":integration_test_helpers",
+ ":pc_test_utils",
+ "../api:dtmf_sender_interface",
+ "../api:libjingle_peerconnection_api",
+ "../api:scoped_refptr",
+ "../api/units:time_delta",
+ "../p2p:p2p_server_utils",
+ "../p2p:p2p_test_utils",
+ "../p2p:rtc_p2p",
+ "../rtc_base:gunit_helpers",
+ "../rtc_base:logging",
+ "../rtc_base:rtc_base_tests_utils",
+ "../rtc_base:socket_address",
+ "../rtc_base:ssl",
+ "../test:test_main",
+ "../test:test_support",
+ "../test/time_controller:time_controller",
+ "//third_party/abseil-cpp/absl/algorithm:container",
+ "//third_party/abseil-cpp/absl/strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+ }
+
+ rtc_test("peerconnection_unittests") {
+ testonly = true
+ sources = [
+ "data_channel_integrationtest.cc",
+ "data_channel_unittest.cc",
+ "dtmf_sender_unittest.cc",
+ "ice_server_parsing_unittest.cc",
+ "jitter_buffer_delay_unittest.cc",
+ "jsep_session_description_unittest.cc",
+ "legacy_stats_collector_unittest.cc",
+ "local_audio_source_unittest.cc",
+ "media_stream_unittest.cc",
+ "peer_connection_adaptation_integrationtest.cc",
+ "peer_connection_bundle_unittest.cc",
+ "peer_connection_crypto_unittest.cc",
+ "peer_connection_data_channel_unittest.cc",
+ "peer_connection_encodings_integrationtest.cc",
+ "peer_connection_end_to_end_unittest.cc",
+ "peer_connection_factory_unittest.cc",
+ "peer_connection_field_trial_tests.cc",
+ "peer_connection_header_extension_unittest.cc",
+ "peer_connection_histogram_unittest.cc",
+ "peer_connection_ice_unittest.cc",
+ "peer_connection_integrationtest.cc",
+ "peer_connection_interface_unittest.cc",
+ "peer_connection_jsep_unittest.cc",
+ "peer_connection_media_unittest.cc",
+ "peer_connection_rtp_unittest.cc",
+ "peer_connection_signaling_unittest.cc",
+ "peer_connection_simulcast_unittest.cc",
+ "peer_connection_svc_integrationtest.cc",
+ "peer_connection_wrapper.cc",
+ "peer_connection_wrapper.h",
+ "proxy_unittest.cc",
+ "rtc_stats_collector_unittest.cc",
+ "rtc_stats_integrationtest.cc",
+ "rtc_stats_traversal_unittest.cc",
+ "rtp_media_utils_unittest.cc",
+ "rtp_parameters_conversion_unittest.cc",
+ "rtp_sender_receiver_unittest.cc",
+ "rtp_transceiver_unittest.cc",
+ "sctp_utils_unittest.cc",
+ "sdp_offer_answer_unittest.cc",
+ "simulcast_sdp_serializer_unittest.cc",
+ "test/fake_audio_capture_module_unittest.cc",
+ "test/test_sdp_strings.h",
+ "track_media_info_map_unittest.cc",
+ "video_rtp_track_source_unittest.cc",
+ "video_track_unittest.cc",
+ "webrtc_sdp_unittest.cc",
+ ]
+
+ deps = [
+ ":audio_rtp_receiver",
+ ":audio_track",
+ ":channel",
+ ":channel_interface",
+ ":data_channel_controller_unittest",
+ ":dtls_srtp_transport",
+ ":dtls_transport",
+ ":dtmf_sender",
+ ":ice_server_parsing",
+ ":integration_test_helpers",
+ ":jitter_buffer_delay",
+ ":legacy_stats_collector",
+ ":local_audio_source",
+ ":media_protocol_names",
+ ":media_session",
+ ":media_stream",
+ ":peer_connection",
+ ":peer_connection_factory",
+ ":peer_connection_proxy",
+ ":proxy",
+ ":rtc_stats_collector",
+ ":rtc_stats_traversal",
+ ":rtp_media_utils",
+ ":rtp_parameters_conversion",
+ ":rtp_receiver",
+ ":rtp_sender",
+ ":rtp_sender_proxy",
+ ":rtp_transceiver",
+ ":rtp_transport_internal",
+ ":sctp_data_channel",
+ ":sctp_transport",
+ ":sctp_utils",
+ ":sdp_utils",
+ ":session_description",
+ ":simulcast_description",
+ ":simulcast_sdp_serializer",
+ ":stream_collection",
+ ":track_media_info_map",
+ ":transport_stats",
+ ":usage_pattern",
+ ":video_rtp_receiver",
+ ":video_rtp_track_source",
+ ":video_track",
+ ":video_track_source",
+ ":webrtc_sdp",
+ "../api:array_view",
+ "../api:audio_options_api",
+ "../api:candidate",
+ "../api:create_peerconnection_factory",
+ "../api:dtls_transport_interface",
+ "../api:dtmf_sender_interface",
+ "../api:fake_frame_decryptor",
+ "../api:fake_frame_encryptor",
+ "../api:field_trials_view",
+ "../api:function_view",
+ "../api:ice_transport_interface",
+ "../api:libjingle_logging_api",
+ "../api:libjingle_peerconnection_api",
+ "../api:make_ref_counted",
+ "../api:media_stream_interface",
+ "../api:mock_async_dns_resolver",
+ "../api:mock_encoder_selector",
+ "../api:mock_packet_socket_factory",
+ "../api:mock_video_track",
+ "../api:packet_socket_factory",
+ "../api:priority",
+ "../api:rtc_error",
+ "../api:rtp_sender_interface",
+ "../api:rtp_transceiver_direction",
+ "../api:scoped_refptr",
+ "../api/adaptation:resource_adaptation_api",
+ "../api/audio:audio_mixer_api",
+ "../api/crypto:frame_decryptor_interface",
+ "../api/crypto:frame_encryptor_interface",
+ "../api/crypto:options",
+ "../api/rtc_event_log",
+ "../api/rtc_event_log:rtc_event_log_factory",
+ "../api/task_queue",
+ "../api/task_queue:default_task_queue_factory",
+ "../api/transport:datagram_transport_interface",
+ "../api/transport:field_trial_based_config",
+ "../api/transport:sctp_transport_factory_interface",
+ "../api/transport/rtp:rtp_source",
+ "../api/units:data_rate",
+ "../api/units:time_delta",
+ "../api/units:timestamp",
+ "../api/video:builtin_video_bitrate_allocator_factory",
+ "../api/video:encoded_image",
+ "../api/video:recordable_encoded_frame",
+ "../api/video:video_bitrate_allocator_factory",
+ "../api/video:video_codec_constants",
+ "../api/video:video_frame",
+ "../api/video:video_rtp_headers",
+ "../api/video_codecs:scalability_mode",
+ "../call/adaptation:resource_adaptation_test_utilities",
+ "../common_video",
+ "../logging:fake_rtc_event_log",
+ "../media:codec",
+ "../media:media_channel",
+ "../media:media_constants",
+ "../media:rid_description",
+ "../media:rtc_data_sctp_transport_internal",
+ "../media:rtc_media_config",
+ "../media:rtc_media_engine_defaults",
+ "../media:stream_params",
+ "../modules/audio_device:audio_device_api",
+ "../modules/audio_processing:audio_processing_statistics",
+ "../modules/rtp_rtcp:rtp_rtcp_format",
+ "../p2p:fake_port_allocator",
+ "../p2p:p2p_server_utils",
+ "../rtc_base:byte_buffer",
+ "../rtc_base:checks",
+ "../rtc_base:copy_on_write_buffer",
+ "../rtc_base:event_tracer",
+ "../rtc_base:gunit_helpers",
+ "../rtc_base:ip_address",
+ "../rtc_base:logging",
+ "../rtc_base:macromagic",
+ "../rtc_base:mdns_responder_interface",
+ "../rtc_base:net_helper",
+ "../rtc_base:network",
+ "../rtc_base:network_constants",
+ "../rtc_base:null_socket_server",
+ "../rtc_base:refcount",
+ "../rtc_base:rtc_base_tests_utils",
+ "../rtc_base:rtc_certificate_generator",
+ "../rtc_base:rtc_json",
+ "../rtc_base:socket_address",
+ "../rtc_base:ssl",
+ "../rtc_base:stringutils",
+ "../rtc_base:task_queue_for_test",
+ "../rtc_base:threading",
+ "../rtc_base:timeutils",
+ "../rtc_base:unique_id_generator",
+ "../rtc_base/synchronization:mutex",
+ "../rtc_base/third_party/base64",
+ "../rtc_base/third_party/sigslot",
+ "../system_wrappers:metrics",
+ "../test:field_trial",
+ "../test:rtc_expect_death",
+ "../test:run_loop",
+ "../test:scoped_key_value_config",
+ "../test/pc/sctp:fake_sctp_transport",
+ "//third_party/abseil-cpp/absl/algorithm:container",
+ "//third_party/abseil-cpp/absl/memory",
+ "//third_party/abseil-cpp/absl/strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+
+ if (is_android) {
+ use_default_launcher = false
+ deps += [
+ ":android_black_magic",
+
+ # We need to depend on this one directly, or classloads will fail for
+ # the voice engine BuildInfo, for instance.
+ "//sdk/android:libjingle_peerconnection_java",
+ "//sdk/android:native_test_jni_onload",
+ ]
+ shard_timeout = 900
+ }
+
+ deps += [
+ ":libjingle_peerconnection",
+ ":pc_test_utils",
+ ":rtc_pc",
+ "../api:callfactory_api",
+ "../api:rtc_event_log_output_file",
+ "../api:rtc_stats_api",
+ "../api:rtp_parameters",
+ "../api/audio_codecs:audio_codecs_api",
+ "../api/audio_codecs:builtin_audio_decoder_factory",
+ "../api/audio_codecs:builtin_audio_encoder_factory",
+ "../api/audio_codecs:opus_audio_decoder_factory",
+ "../api/audio_codecs:opus_audio_encoder_factory",
+ "../api/audio_codecs/L16:audio_decoder_L16",
+ "../api/audio_codecs/L16:audio_encoder_L16",
+ "../api/video_codecs:builtin_video_decoder_factory",
+ "../api/video_codecs:builtin_video_encoder_factory",
+ "../api/video_codecs:video_codecs_api",
+ "../api/video_codecs:video_decoder_factory_template",
+ "../api/video_codecs:video_decoder_factory_template_dav1d_adapter",
+ "../api/video_codecs:video_decoder_factory_template_libvpx_vp8_adapter",
+ "../api/video_codecs:video_decoder_factory_template_libvpx_vp9_adapter",
+ "../api/video_codecs:video_decoder_factory_template_open_h264_adapter",
+ "../api/video_codecs:video_encoder_factory_template",
+ "../api/video_codecs:video_encoder_factory_template_libaom_av1_adapter",
+ "../api/video_codecs:video_encoder_factory_template_libvpx_vp8_adapter",
+ "../api/video_codecs:video_encoder_factory_template_libvpx_vp9_adapter",
+ "../api/video_codecs:video_encoder_factory_template_open_h264_adapter",
+ "../call:call_interfaces",
+ "../media:rtc_audio_video",
+ "../media:rtc_media_base",
+ "../media:rtc_media_tests_utils",
+ "../modules/audio_processing",
+ "../modules/audio_processing:api",
+ "../p2p:p2p_test_utils",
+ "../p2p:rtc_p2p",
+ "../rtc_base:rtc_task_queue",
+ "../rtc_base:safe_conversions",
+ "../test:audio_codec_mocks",
+ "../test:test_main",
+ "../test:test_support",
+ ]
+ }
+
+ rtc_library("data_channel_controller_unittest") {
+ testonly = true
+ sources = [ "data_channel_controller_unittest.cc" ]
+ deps = [
+ ":data_channel_controller",
+ ":pc_test_utils",
+ ":peer_connection_internal",
+ ":sctp_data_channel",
+ "../rtc_base:null_socket_server",
+ "../test:run_loop",
+ "../test:test_support",
+ ]
+ }
+
+ if (is_android) {
+ rtc_library("android_black_magic") {
+ # The android code uses hacky includes to ssl code. Having this in a
+ # separate target enables us to keep the peerconnection unit tests clean.
+ testonly = true
+ sources = [
+ "test/android_test_initializer.cc",
+ "test/android_test_initializer.h",
+ ]
+ deps = [
+ "../rtc_base:ssl",
+ "../sdk/android:internal_jni",
+ "../sdk/android:libjingle_peerconnection_jni",
+ "//modules/utility:utility",
+ "//rtc_base:checks",
+ "//testing/android/native_test:native_test_support",
+ ]
+ }
+ }
+
+ rtc_library("integration_test_helpers") {
+ testonly = true
+ sources = [
+ "test/integration_test_helpers.cc",
+ "test/integration_test_helpers.h",
+ ]
+ deps = [
+ ":audio_rtp_receiver",
+ ":audio_track",
+ ":dtmf_sender",
+ ":jitter_buffer_delay",
+ ":local_audio_source",
+ ":media_session",
+ ":media_stream",
+ ":pc_test_utils",
+ ":peer_connection",
+ ":peer_connection_factory",
+ ":peer_connection_proxy",
+ ":peerconnection",
+ ":remote_audio_source",
+ ":rtp_media_utils",
+ ":rtp_parameters_conversion",
+ ":rtp_receiver",
+ ":rtp_sender",
+ ":rtp_transceiver",
+ ":session_description",
+ ":usage_pattern",
+ ":video_rtp_receiver",
+ ":video_rtp_track_source",
+ ":video_track",
+ ":video_track_source",
+ "../api:array_view",
+ "../api:audio_options_api",
+ "../api:callfactory_api",
+ "../api:candidate",
+ "../api:create_peerconnection_factory",
+ "../api:fake_frame_decryptor",
+ "../api:fake_frame_encryptor",
+ "../api:field_trials_view",
+ "../api:function_view",
+ "../api:ice_transport_interface",
+ "../api:libjingle_logging_api",
+ "../api:libjingle_peerconnection_api",
+ "../api:media_stream_interface",
+ "../api:mock_async_dns_resolver",
+ "../api:mock_rtp",
+ "../api:packet_socket_factory",
+ "../api:rtc_error",
+ "../api:rtc_stats_api",
+ "../api:rtp_parameters",
+ "../api:rtp_sender_interface",
+ "../api:rtp_transceiver_direction",
+ "../api:scoped_refptr",
+ "../api/audio:audio_mixer_api",
+ "../api/crypto:frame_decryptor_interface",
+ "../api/crypto:frame_encryptor_interface",
+ "../api/crypto:options",
+ "../api/rtc_event_log",
+ "../api/rtc_event_log:rtc_event_log_factory",
+ "../api/task_queue",
+ "../api/task_queue:default_task_queue_factory",
+ "../api/task_queue:pending_task_safety_flag",
+ "../api/transport:field_trial_based_config",
+ "../api/transport/rtp:rtp_source",
+ "../api/units:time_delta",
+ "../api/video:builtin_video_bitrate_allocator_factory",
+ "../api/video:video_rtp_headers",
+ "../api/video_codecs:video_codecs_api",
+ "../call:call_interfaces",
+ "../call/adaptation:resource_adaptation_test_utilities",
+ "../logging:fake_rtc_event_log",
+ "../media:rtc_audio_video",
+ "../media:rtc_media_base",
+ "../media:rtc_media_config",
+ "../media:rtc_media_engine_defaults",
+ "../media:rtc_media_tests_utils",
+ "../media:stream_params",
+ "../modules/audio_device:audio_device_api",
+ "../modules/audio_processing:api",
+ "../modules/audio_processing:audio_processing_statistics",
+ "../modules/audio_processing:audioproc_test_utils",
+ "../modules/rtp_rtcp:rtp_rtcp_format",
+ "../p2p:fake_ice_transport",
+ "../p2p:fake_port_allocator",
+ "../p2p:p2p_server_utils",
+ "../p2p:p2p_test_utils",
+ "../p2p:rtc_p2p",
+ "../rtc_base:checks",
+ "../rtc_base:gunit_helpers",
+ "../rtc_base:ip_address",
+ "../rtc_base:logging",
+ "../rtc_base:macromagic",
+ "../rtc_base:mdns_responder_interface",
+ "../rtc_base:null_socket_server",
+ "../rtc_base:rtc_base_tests_utils",
+ "../rtc_base:rtc_certificate_generator",
+ "../rtc_base:rtc_event",
+ "../rtc_base:rtc_json",
+ "../rtc_base:safe_conversions",
+ "../rtc_base:socket_address",
+ "../rtc_base:ssl",
+ "../rtc_base:task_queue_for_test",
+ "../rtc_base:threading",
+ "../rtc_base:timeutils",
+ "../rtc_base/synchronization:mutex",
+ "../rtc_base/task_utils:repeating_task",
+ "../rtc_base/third_party/base64",
+ "../rtc_base/third_party/sigslot",
+ "../system_wrappers:metrics",
+ "../test:explicit_key_value_config",
+ "../test:fileutils",
+ "../test:rtp_test_utils",
+ "../test:scoped_key_value_config",
+ "../test:test_support",
+ "../test/pc/sctp:fake_sctp_transport",
+ "../test/time_controller",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/algorithm:container",
+ "//third_party/abseil-cpp/absl/memory",
+ "//third_party/abseil-cpp/absl/strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+ }
+
+ rtc_library("pc_test_utils") {
+ testonly = true
+ sources = [
+ "test/fake_audio_capture_module.cc",
+ "test/fake_audio_capture_module.h",
+ "test/fake_data_channel_controller.h",
+ "test/fake_peer_connection_base.h",
+ "test/fake_peer_connection_for_stats.h",
+ "test/fake_periodic_video_source.h",
+ "test/fake_periodic_video_track_source.h",
+ "test/fake_rtc_certificate_generator.h",
+ "test/fake_video_track_renderer.h",
+ "test/fake_video_track_source.h",
+ "test/frame_generator_capturer_video_track_source.h",
+ "test/mock_channel_interface.h",
+ "test/mock_data_channel.h",
+ "test/mock_peer_connection_internal.h",
+ "test/mock_peer_connection_observers.h",
+ "test/mock_rtp_receiver_internal.h",
+ "test/mock_rtp_sender_internal.h",
+ "test/mock_voice_media_receive_channel_interface.h",
+ "test/peer_connection_test_wrapper.cc",
+ "test/peer_connection_test_wrapper.h",
+ "test/rtc_stats_obtainer.h",
+ "test/simulcast_layer_util.cc",
+ "test/simulcast_layer_util.h",
+ "test/test_sdp_strings.h",
+ ]
+
+ deps = [
+ ":channel",
+ ":channel_interface",
+ ":jitter_buffer_delay",
+ ":libjingle_peerconnection",
+ ":peer_connection_internal",
+ ":peerconnection",
+ ":rtp_receiver",
+ ":rtp_sender",
+ ":sctp_data_channel",
+ ":session_description",
+ ":simulcast_description",
+ ":stream_collection",
+ ":video_track_source",
+ "../api:audio_options_api",
+ "../api:call_api",
+ "../api:create_frame_generator",
+ "../api:create_peerconnection_factory",
+ "../api:field_trials_view",
+ "../api:field_trials_view",
+ "../api:libjingle_peerconnection_api",
+ "../api:make_ref_counted",
+ "../api:media_stream_interface",
+ "../api:rtc_error",
+ "../api:rtc_stats_api",
+ "../api:rtp_parameters",
+ "../api:scoped_refptr",
+ "../api:sequence_checker",
+ "../api/audio:audio_mixer_api",
+ "../api/audio_codecs:audio_codecs_api",
+ "../api/task_queue",
+ "../api/task_queue:default_task_queue_factory",
+ "../api/units:time_delta",
+ "../api/video:builtin_video_bitrate_allocator_factory",
+ "../api/video:resolution",
+ "../api/video:video_frame",
+ "../api/video:video_rtp_headers",
+ "../api/video_codecs:video_codecs_api",
+ "../api/video_codecs:video_decoder_factory_template",
+ "../api/video_codecs:video_decoder_factory_template_dav1d_adapter",
+ "../api/video_codecs:video_decoder_factory_template_libvpx_vp8_adapter",
+ "../api/video_codecs:video_decoder_factory_template_libvpx_vp9_adapter",
+ "../api/video_codecs:video_decoder_factory_template_open_h264_adapter",
+ "../api/video_codecs:video_encoder_factory_template",
+ "../api/video_codecs:video_encoder_factory_template_libaom_av1_adapter",
+ "../api/video_codecs:video_encoder_factory_template_libvpx_vp8_adapter",
+ "../api/video_codecs:video_encoder_factory_template_libvpx_vp9_adapter",
+ "../api/video_codecs:video_encoder_factory_template_open_h264_adapter",
+ "../call:call_interfaces",
+ "../media:media_channel",
+ "../media:media_channel_impl",
+ "../media:rtc_media",
+ "../media:rtc_media_base",
+ "../media:rtc_media_tests_utils",
+ "../media:rtc_simulcast_encoder_adapter",
+ "../modules/audio_device",
+ "../modules/audio_processing",
+ "../modules/audio_processing:api",
+ "../modules/rtp_rtcp:rtp_rtcp_format",
+ "../p2p:fake_port_allocator",
+ "../p2p:p2p_test_utils",
+ "../p2p:rtc_p2p",
+ "../rtc_base:checks",
+ "../rtc_base:gunit_helpers",
+ "../rtc_base:logging",
+ "../rtc_base:macromagic",
+ "../rtc_base:rtc_certificate_generator",
+ "../rtc_base:rtc_task_queue",
+ "../rtc_base:ssl",
+ "../rtc_base:stringutils",
+ "../rtc_base:task_queue_for_test",
+ "../rtc_base:threading",
+ "../rtc_base:timeutils",
+ "../rtc_base:weak_ptr",
+ "../rtc_base/synchronization:mutex",
+ "../rtc_base/task_utils:repeating_task",
+ "../rtc_base/third_party/sigslot",
+ "../test:frame_generator_capturer",
+ "../test:scoped_key_value_config",
+ "../test:test_support",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/algorithm:container",
+ "//third_party/abseil-cpp/absl/strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+ }
+
+ svc_tests_resources = [
+ "../resources/difficult_photo_1850_1110.yuv",
+ "../resources/photo_1850_1110.yuv",
+ "../resources/presentation_1850_1110.yuv",
+ "../resources/web_screenshot_1850_1110.yuv",
+ ]
+
+ if (is_ios) {
+ bundle_data("svc_tests_bundle_data") {
+ testonly = true
+ sources = svc_tests_resources
+ outputs = [ "{{bundle_resources_dir}}/{{source_file_part}}" ]
+ }
+ }
+
+ rtc_test("svc_tests") {
+ sources = [ "test/svc_e2e_tests.cc" ]
+ data = svc_tests_resources
+ deps = [
+ "../api:create_network_emulation_manager",
+ "../api:create_peer_connection_quality_test_frame_generator",
+ "../api:create_peerconnection_quality_test_fixture",
+ "../api:frame_generator_api",
+ "../api:media_stream_interface",
+ "../api:network_emulation_manager_api",
+ "../api:peer_connection_quality_test_fixture_api",
+ "../api:rtc_stats_api",
+ "../api:simulated_network_api",
+ "../api:time_controller",
+ "../api/test/metrics:global_metrics_logger_and_exporter",
+ "../api/test/pclf:media_configuration",
+ "../api/test/pclf:media_quality_test_params",
+ "../api/test/pclf:peer_configurer",
+ "../api/video_codecs:video_codecs_api",
+ "../call:simulated_network",
+ "../modules/video_coding:webrtc_vp9",
+ "../modules/video_coding/svc:scalability_mode_util",
+ "../rtc_base/containers:flat_map",
+ "../system_wrappers:field_trial",
+ "../test:field_trial",
+ "../test:fileutils",
+ "../test:test_main",
+ "../test:test_support",
+ "../test/pc/e2e:network_quality_metrics_reporter",
+ "../test/pc/e2e/analyzer/video:default_video_quality_analyzer",
+ ]
+
+ if (is_ios) {
+ deps += [ ":svc_tests_bundle_data" ]
+ }
+ }
+}
diff --git a/third_party/libwebrtc/pc/DEPS b/third_party/libwebrtc/pc/DEPS
new file mode 100644
index 0000000000..80a702d716
--- /dev/null
+++ b/third_party/libwebrtc/pc/DEPS
@@ -0,0 +1,26 @@
+include_rules = [
+ "+third_party/libsrtp",
+ "+call",
+ "+common_video",
+ "+logging/rtc_event_log",
+ "+logging/rtc_event_log",
+ "+media",
+ "+modules/audio_device",
+ "+modules/audio_processing",
+ "+modules/congestion_controller",
+ "+modules/rtp_rtcp",
+ "+modules/video_coding",
+ "+modules/video_render",
+ "+net/dcsctp",
+ "+p2p",
+ "+system_wrappers",
+]
+
+specific_include_rules = {
+ "androidtestinitializer\.cc": [
+ "+base/android", # Allowed only for Android tests.
+ ],
+ "srtpfilter_unittest\.cc": [
+ "+crypto",
+ ],
+}
diff --git a/third_party/libwebrtc/pc/OWNERS b/third_party/libwebrtc/pc/OWNERS
new file mode 100644
index 0000000000..8ceb1f6c63
--- /dev/null
+++ b/third_party/libwebrtc/pc/OWNERS
@@ -0,0 +1,11 @@
+hbos@webrtc.org
+hta@webrtc.org
+perkj@webrtc.org
+tommi@webrtc.org
+deadbeef@webrtc.org
+orphis@webrtc.org
+
+# Adding features via SDP munging requires approval from SDP owners
+per-file webrtc_sdp.cc = set noparent
+per-file webrtc_sdp.cc = hta@webrtc.org
+per-file webrtc_sdp.cc = hbos@webrtc.org
diff --git a/third_party/libwebrtc/pc/audio_rtp_receiver.cc b/third_party/libwebrtc/pc/audio_rtp_receiver.cc
new file mode 100644
index 0000000000..a8659de5f9
--- /dev/null
+++ b/third_party/libwebrtc/pc/audio_rtp_receiver.cc
@@ -0,0 +1,347 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/audio_rtp_receiver.h"
+
+#include <stddef.h>
+
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "api/sequence_checker.h"
+#include "pc/audio_track.h"
+#include "pc/media_stream_track_proxy.h"
+#include "rtc_base/checks.h"
+
+namespace webrtc {
+
+AudioRtpReceiver::AudioRtpReceiver(
+ rtc::Thread* worker_thread,
+ std::string receiver_id,
+ std::vector<std::string> stream_ids,
+ bool is_unified_plan,
+ cricket::VoiceMediaReceiveChannelInterface* voice_channel /*= nullptr*/)
+ : AudioRtpReceiver(worker_thread,
+ receiver_id,
+ CreateStreamsFromIds(std::move(stream_ids)),
+ is_unified_plan,
+ voice_channel) {}
+
+AudioRtpReceiver::AudioRtpReceiver(
+ rtc::Thread* worker_thread,
+ const std::string& receiver_id,
+ const std::vector<rtc::scoped_refptr<MediaStreamInterface>>& streams,
+ bool is_unified_plan,
+ cricket::VoiceMediaReceiveChannelInterface* voice_channel /*= nullptr*/)
+ : worker_thread_(worker_thread),
+ id_(receiver_id),
+ source_(rtc::make_ref_counted<RemoteAudioSource>(
+ worker_thread,
+ is_unified_plan
+ ? RemoteAudioSource::OnAudioChannelGoneAction::kSurvive
+ : RemoteAudioSource::OnAudioChannelGoneAction::kEnd)),
+ track_(AudioTrackProxyWithInternal<AudioTrack>::Create(
+ rtc::Thread::Current(),
+ AudioTrack::Create(receiver_id, source_))),
+ media_channel_(voice_channel),
+ cached_track_enabled_(track_->internal()->enabled()),
+ attachment_id_(GenerateUniqueId()),
+ worker_thread_safety_(PendingTaskSafetyFlag::CreateDetachedInactive()) {
+ RTC_DCHECK(worker_thread_);
+ RTC_DCHECK(track_->GetSource()->remote());
+ track_->RegisterObserver(this);
+ track_->GetSource()->RegisterAudioObserver(this);
+ SetStreams(streams);
+}
+
+AudioRtpReceiver::~AudioRtpReceiver() {
+ RTC_DCHECK_RUN_ON(&signaling_thread_checker_);
+ RTC_DCHECK(!media_channel_);
+
+ track_->GetSource()->UnregisterAudioObserver(this);
+ track_->UnregisterObserver(this);
+}
+
+void AudioRtpReceiver::OnChanged() {
+ RTC_DCHECK_RUN_ON(&signaling_thread_checker_);
+ const bool enabled = track_->internal()->enabled();
+ if (cached_track_enabled_ == enabled)
+ return;
+ cached_track_enabled_ = enabled;
+ worker_thread_->PostTask(SafeTask(worker_thread_safety_, [this, enabled]() {
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ Reconfigure(enabled);
+ }));
+}
+
+void AudioRtpReceiver::SetOutputVolume_w(double volume) {
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ RTC_DCHECK_GE(volume, 0.0);
+ RTC_DCHECK_LE(volume, 10.0);
+
+ if (!media_channel_)
+ return;
+
+ signaled_ssrc_ ? media_channel_->SetOutputVolume(*signaled_ssrc_, volume)
+ : media_channel_->SetDefaultOutputVolume(volume);
+}
+
+void AudioRtpReceiver::OnSetVolume(double volume) {
+ RTC_DCHECK_RUN_ON(&signaling_thread_checker_);
+ RTC_DCHECK_GE(volume, 0);
+ RTC_DCHECK_LE(volume, 10);
+
+ bool track_enabled = track_->internal()->enabled();
+ worker_thread_->BlockingCall([&]() {
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ // Update the cached_volume_ even when stopped, to allow clients to set
+ // the volume before starting/restarting, eg see crbug.com/1272566.
+ cached_volume_ = volume;
+ // When the track is disabled, the volume of the source, which is the
+ // corresponding WebRtc Voice Engine channel will be 0. So we do not
+ // allow setting the volume to the source when the track is disabled.
+ if (track_enabled)
+ SetOutputVolume_w(volume);
+ });
+}
+
+rtc::scoped_refptr<DtlsTransportInterface> AudioRtpReceiver::dtls_transport()
+ const {
+ RTC_DCHECK_RUN_ON(&signaling_thread_checker_);
+ return dtls_transport_;
+}
+
+std::vector<std::string> AudioRtpReceiver::stream_ids() const {
+ RTC_DCHECK_RUN_ON(&signaling_thread_checker_);
+ std::vector<std::string> stream_ids(streams_.size());
+ for (size_t i = 0; i < streams_.size(); ++i)
+ stream_ids[i] = streams_[i]->id();
+ return stream_ids;
+}
+
+std::vector<rtc::scoped_refptr<MediaStreamInterface>>
+AudioRtpReceiver::streams() const {
+ RTC_DCHECK_RUN_ON(&signaling_thread_checker_);
+ return streams_;
+}
+
+RtpParameters AudioRtpReceiver::GetParameters() const {
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ if (!media_channel_)
+ return RtpParameters();
+ auto current_ssrc = ssrc();
+ return current_ssrc.has_value()
+ ? media_channel_->GetRtpReceiverParameters(current_ssrc.value())
+ : media_channel_->GetDefaultRtpReceiveParameters();
+}
+
+void AudioRtpReceiver::SetFrameDecryptor(
+ rtc::scoped_refptr<FrameDecryptorInterface> frame_decryptor) {
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ frame_decryptor_ = std::move(frame_decryptor);
+ // Special Case: Set the frame decryptor to any value on any existing channel.
+ if (media_channel_ && signaled_ssrc_) {
+ media_channel_->SetFrameDecryptor(*signaled_ssrc_, frame_decryptor_);
+ }
+}
+
+rtc::scoped_refptr<FrameDecryptorInterface>
+AudioRtpReceiver::GetFrameDecryptor() const {
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ return frame_decryptor_;
+}
+
+void AudioRtpReceiver::Stop() {
+ RTC_DCHECK_RUN_ON(&signaling_thread_checker_);
+ source_->SetState(MediaSourceInterface::kEnded);
+ track_->internal()->set_ended();
+}
+
+void AudioRtpReceiver::RestartMediaChannel(absl::optional<uint32_t> ssrc) {
+ RTC_DCHECK_RUN_ON(&signaling_thread_checker_);
+ bool enabled = track_->internal()->enabled();
+ MediaSourceInterface::SourceState state = source_->state();
+ worker_thread_->BlockingCall([&]() {
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ RestartMediaChannel_w(std::move(ssrc), enabled, state);
+ });
+ source_->SetState(MediaSourceInterface::kLive);
+}
+
+void AudioRtpReceiver::RestartMediaChannel_w(
+ absl::optional<uint32_t> ssrc,
+ bool track_enabled,
+ MediaSourceInterface::SourceState state) {
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ if (!media_channel_)
+ return; // Can't restart.
+
+ // Make sure the safety flag is marked as `alive` for cases where the media
+ // channel was provided via the ctor and not an explicit call to
+ // SetMediaChannel.
+ worker_thread_safety_->SetAlive();
+
+ if (state != MediaSourceInterface::kInitializing) {
+ if (signaled_ssrc_ == ssrc)
+ return;
+ source_->Stop(media_channel_, signaled_ssrc_);
+ }
+
+ signaled_ssrc_ = std::move(ssrc);
+ source_->Start(media_channel_, signaled_ssrc_);
+ if (signaled_ssrc_) {
+ media_channel_->SetBaseMinimumPlayoutDelayMs(*signaled_ssrc_,
+ delay_.GetMs());
+ }
+
+ Reconfigure(track_enabled);
+}
+
+void AudioRtpReceiver::SetupMediaChannel(uint32_t ssrc) {
+ RTC_DCHECK_RUN_ON(&signaling_thread_checker_);
+ RestartMediaChannel(ssrc);
+}
+
+void AudioRtpReceiver::SetupUnsignaledMediaChannel() {
+ RTC_DCHECK_RUN_ON(&signaling_thread_checker_);
+ RestartMediaChannel(absl::nullopt);
+}
+
+absl::optional<uint32_t> AudioRtpReceiver::ssrc() const {
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ if (!signaled_ssrc_.has_value() && media_channel_) {
+ return media_channel_->GetUnsignaledSsrc();
+ }
+ return signaled_ssrc_;
+}
+
+void AudioRtpReceiver::set_stream_ids(std::vector<std::string> stream_ids) {
+ RTC_DCHECK_RUN_ON(&signaling_thread_checker_);
+ SetStreams(CreateStreamsFromIds(std::move(stream_ids)));
+}
+
+void AudioRtpReceiver::set_transport(
+ rtc::scoped_refptr<DtlsTransportInterface> dtls_transport) {
+ RTC_DCHECK_RUN_ON(&signaling_thread_checker_);
+ dtls_transport_ = std::move(dtls_transport);
+}
+
+void AudioRtpReceiver::SetStreams(
+ const std::vector<rtc::scoped_refptr<MediaStreamInterface>>& streams) {
+ RTC_DCHECK_RUN_ON(&signaling_thread_checker_);
+ // Remove remote track from any streams that are going away.
+ for (const auto& existing_stream : streams_) {
+ bool removed = true;
+ for (const auto& stream : streams) {
+ if (existing_stream->id() == stream->id()) {
+ RTC_DCHECK_EQ(existing_stream.get(), stream.get());
+ removed = false;
+ break;
+ }
+ }
+ if (removed) {
+ existing_stream->RemoveTrack(audio_track());
+ }
+ }
+ // Add remote track to any streams that are new.
+ for (const auto& stream : streams) {
+ bool added = true;
+ for (const auto& existing_stream : streams_) {
+ if (stream->id() == existing_stream->id()) {
+ RTC_DCHECK_EQ(stream.get(), existing_stream.get());
+ added = false;
+ break;
+ }
+ }
+ if (added) {
+ stream->AddTrack(audio_track());
+ }
+ }
+ streams_ = streams;
+}
+
+std::vector<RtpSource> AudioRtpReceiver::GetSources() const {
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ auto current_ssrc = ssrc();
+ if (!media_channel_ || !current_ssrc.has_value()) {
+ return {};
+ }
+ return media_channel_->GetSources(current_ssrc.value());
+}
+
+void AudioRtpReceiver::SetDepacketizerToDecoderFrameTransformer(
+ rtc::scoped_refptr<webrtc::FrameTransformerInterface> frame_transformer) {
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ if (media_channel_) {
+ media_channel_->SetDepacketizerToDecoderFrameTransformer(
+ signaled_ssrc_.value_or(0), frame_transformer);
+ }
+ frame_transformer_ = std::move(frame_transformer);
+}
+
+void AudioRtpReceiver::Reconfigure(bool track_enabled) {
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ RTC_DCHECK(media_channel_);
+
+ SetOutputVolume_w(track_enabled ? cached_volume_ : 0);
+
+ if (signaled_ssrc_ && frame_decryptor_) {
+ // Reattach the frame decryptor if we were reconfigured.
+ media_channel_->SetFrameDecryptor(*signaled_ssrc_, frame_decryptor_);
+ }
+
+ if (frame_transformer_) {
+ media_channel_->SetDepacketizerToDecoderFrameTransformer(
+ signaled_ssrc_.value_or(0), frame_transformer_);
+ }
+}
+
+void AudioRtpReceiver::SetObserver(RtpReceiverObserverInterface* observer) {
+ RTC_DCHECK_RUN_ON(&signaling_thread_checker_);
+ observer_ = observer;
+ // Deliver any notifications the observer may have missed by being set late.
+ if (received_first_packet_ && observer_) {
+ observer_->OnFirstPacketReceived(media_type());
+ }
+}
+
+void AudioRtpReceiver::SetJitterBufferMinimumDelay(
+ absl::optional<double> delay_seconds) {
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ delay_.Set(delay_seconds);
+ if (media_channel_ && signaled_ssrc_)
+ media_channel_->SetBaseMinimumPlayoutDelayMs(*signaled_ssrc_,
+ delay_.GetMs());
+}
+
+void AudioRtpReceiver::SetMediaChannel(
+ cricket::MediaReceiveChannelInterface* media_channel) {
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ RTC_DCHECK(media_channel == nullptr ||
+ media_channel->media_type() == media_type());
+ if (!media_channel && media_channel_)
+ SetOutputVolume_w(0.0);
+
+ media_channel ? worker_thread_safety_->SetAlive()
+ : worker_thread_safety_->SetNotAlive();
+ media_channel_ =
+ static_cast<cricket::VoiceMediaReceiveChannelInterface*>(media_channel);
+}
+
+void AudioRtpReceiver::NotifyFirstPacketReceived() {
+ RTC_DCHECK_RUN_ON(&signaling_thread_checker_);
+ if (observer_) {
+ observer_->OnFirstPacketReceived(media_type());
+ }
+ received_first_packet_ = true;
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/audio_rtp_receiver.h b/third_party/libwebrtc/pc/audio_rtp_receiver.h
new file mode 100644
index 0000000000..86c42d532a
--- /dev/null
+++ b/third_party/libwebrtc/pc/audio_rtp_receiver.h
@@ -0,0 +1,165 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_AUDIO_RTP_RECEIVER_H_
+#define PC_AUDIO_RTP_RECEIVER_H_
+
+#include <stdint.h>
+
+#include <string>
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "api/crypto/frame_decryptor_interface.h"
+#include "api/dtls_transport_interface.h"
+#include "api/frame_transformer_interface.h"
+#include "api/media_stream_interface.h"
+#include "api/media_types.h"
+#include "api/rtp_parameters.h"
+#include "api/rtp_receiver_interface.h"
+#include "api/scoped_refptr.h"
+#include "api/sequence_checker.h"
+#include "api/task_queue/pending_task_safety_flag.h"
+#include "api/transport/rtp/rtp_source.h"
+#include "media/base/media_channel.h"
+#include "pc/audio_track.h"
+#include "pc/jitter_buffer_delay.h"
+#include "pc/media_stream_track_proxy.h"
+#include "pc/remote_audio_source.h"
+#include "pc/rtp_receiver.h"
+#include "rtc_base/system/no_unique_address.h"
+#include "rtc_base/thread.h"
+#include "rtc_base/thread_annotations.h"
+
+namespace webrtc {
+
+class AudioRtpReceiver : public ObserverInterface,
+ public AudioSourceInterface::AudioObserver,
+ public RtpReceiverInternal {
+ public:
+ // The constructor supports optionally passing the voice channel to the
+ // instance at construction time without having to call `SetMediaChannel()`
+ // on the worker thread straight after construction.
+ // However, when using that, the assumption is that right after construction,
+ // a call to either `SetupUnsignaledMediaChannel` or `SetupMediaChannel`
+ // will be made, which will internally start the source on the worker thread.
+ AudioRtpReceiver(
+ rtc::Thread* worker_thread,
+ std::string receiver_id,
+ std::vector<std::string> stream_ids,
+ bool is_unified_plan,
+ cricket::VoiceMediaReceiveChannelInterface* voice_channel = nullptr);
+ // TODO(https://crbug.com/webrtc/9480): Remove this when streams() is removed.
+ AudioRtpReceiver(
+ rtc::Thread* worker_thread,
+ const std::string& receiver_id,
+ const std::vector<rtc::scoped_refptr<MediaStreamInterface>>& streams,
+ bool is_unified_plan,
+ cricket::VoiceMediaReceiveChannelInterface* media_channel = nullptr);
+ virtual ~AudioRtpReceiver();
+
+ // ObserverInterface implementation
+ void OnChanged() override;
+
+ // AudioSourceInterface::AudioObserver implementation
+ void OnSetVolume(double volume) override;
+
+ rtc::scoped_refptr<AudioTrackInterface> audio_track() const { return track_; }
+
+ // RtpReceiverInterface implementation
+ rtc::scoped_refptr<MediaStreamTrackInterface> track() const override {
+ return track_;
+ }
+ rtc::scoped_refptr<DtlsTransportInterface> dtls_transport() const override;
+ std::vector<std::string> stream_ids() const override;
+ std::vector<rtc::scoped_refptr<MediaStreamInterface>> streams()
+ const override;
+
+ cricket::MediaType media_type() const override {
+ return cricket::MEDIA_TYPE_AUDIO;
+ }
+
+ std::string id() const override { return id_; }
+
+ RtpParameters GetParameters() const override;
+
+ void SetFrameDecryptor(
+ rtc::scoped_refptr<FrameDecryptorInterface> frame_decryptor) override;
+
+ rtc::scoped_refptr<FrameDecryptorInterface> GetFrameDecryptor()
+ const override;
+
+ // RtpReceiverInternal implementation.
+ void Stop() override;
+ void SetupMediaChannel(uint32_t ssrc) override;
+ void SetupUnsignaledMediaChannel() override;
+ absl::optional<uint32_t> ssrc() const override;
+ void NotifyFirstPacketReceived() override;
+ void set_stream_ids(std::vector<std::string> stream_ids) override;
+ void set_transport(
+ rtc::scoped_refptr<DtlsTransportInterface> dtls_transport) override;
+ void SetStreams(const std::vector<rtc::scoped_refptr<MediaStreamInterface>>&
+ streams) override;
+ void SetObserver(RtpReceiverObserverInterface* observer) override;
+
+ void SetJitterBufferMinimumDelay(
+ absl::optional<double> delay_seconds) override;
+
+ void SetMediaChannel(
+ cricket::MediaReceiveChannelInterface* media_channel) override;
+
+ std::vector<RtpSource> GetSources() const override;
+ int AttachmentId() const override { return attachment_id_; }
+ void SetDepacketizerToDecoderFrameTransformer(
+ rtc::scoped_refptr<webrtc::FrameTransformerInterface> frame_transformer)
+ override;
+
+ private:
+ void RestartMediaChannel(absl::optional<uint32_t> ssrc)
+ RTC_RUN_ON(&signaling_thread_checker_);
+ void RestartMediaChannel_w(absl::optional<uint32_t> ssrc,
+ bool track_enabled,
+ MediaSourceInterface::SourceState state)
+ RTC_RUN_ON(worker_thread_);
+ void Reconfigure(bool track_enabled) RTC_RUN_ON(worker_thread_);
+ void SetOutputVolume_w(double volume) RTC_RUN_ON(worker_thread_);
+
+ RTC_NO_UNIQUE_ADDRESS SequenceChecker signaling_thread_checker_;
+ rtc::Thread* const worker_thread_;
+ const std::string id_;
+ const rtc::scoped_refptr<RemoteAudioSource> source_;
+ const rtc::scoped_refptr<AudioTrackProxyWithInternal<AudioTrack>> track_;
+ cricket::VoiceMediaReceiveChannelInterface* media_channel_
+ RTC_GUARDED_BY(worker_thread_) = nullptr;
+ absl::optional<uint32_t> signaled_ssrc_ RTC_GUARDED_BY(worker_thread_);
+ std::vector<rtc::scoped_refptr<MediaStreamInterface>> streams_
+ RTC_GUARDED_BY(&signaling_thread_checker_);
+ bool cached_track_enabled_ RTC_GUARDED_BY(&signaling_thread_checker_);
+ double cached_volume_ RTC_GUARDED_BY(worker_thread_) = 1.0;
+ RtpReceiverObserverInterface* observer_
+ RTC_GUARDED_BY(&signaling_thread_checker_) = nullptr;
+ bool received_first_packet_ RTC_GUARDED_BY(&signaling_thread_checker_) =
+ false;
+ const int attachment_id_;
+ rtc::scoped_refptr<FrameDecryptorInterface> frame_decryptor_
+ RTC_GUARDED_BY(worker_thread_);
+ rtc::scoped_refptr<DtlsTransportInterface> dtls_transport_
+ RTC_GUARDED_BY(&signaling_thread_checker_);
+ // Stores and updates the playout delay. Handles caching cases if
+ // `SetJitterBufferMinimumDelay` is called before start.
+ JitterBufferDelay delay_ RTC_GUARDED_BY(worker_thread_);
+ rtc::scoped_refptr<FrameTransformerInterface> frame_transformer_
+ RTC_GUARDED_BY(worker_thread_);
+ const rtc::scoped_refptr<PendingTaskSafetyFlag> worker_thread_safety_;
+};
+
+} // namespace webrtc
+
+#endif // PC_AUDIO_RTP_RECEIVER_H_
diff --git a/third_party/libwebrtc/pc/audio_rtp_receiver_unittest.cc b/third_party/libwebrtc/pc/audio_rtp_receiver_unittest.cc
new file mode 100644
index 0000000000..9eb20c982f
--- /dev/null
+++ b/third_party/libwebrtc/pc/audio_rtp_receiver_unittest.cc
@@ -0,0 +1,127 @@
+/*
+ * Copyright 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/audio_rtp_receiver.h"
+
+#include <atomic>
+
+#include "pc/test/mock_voice_media_receive_channel_interface.h"
+#include "rtc_base/gunit.h"
+#include "rtc_base/thread.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+#include "test/run_loop.h"
+
+using ::testing::_;
+using ::testing::InvokeWithoutArgs;
+using ::testing::Mock;
+
+static const int kTimeOut = 100;
+static const double kDefaultVolume = 1;
+static const double kVolume = 3.7;
+static const double kVolumeMuted = 0.0;
+static const uint32_t kSsrc = 3;
+
+namespace webrtc {
+class AudioRtpReceiverTest : public ::testing::Test {
+ protected:
+ AudioRtpReceiverTest()
+ : worker_(rtc::Thread::Current()),
+ receiver_(
+ rtc::make_ref_counted<AudioRtpReceiver>(worker_,
+ std::string(),
+ std::vector<std::string>(),
+ false)) {
+ EXPECT_CALL(receive_channel_, SetRawAudioSink(kSsrc, _));
+ EXPECT_CALL(receive_channel_, SetBaseMinimumPlayoutDelayMs(kSsrc, _));
+ }
+
+ ~AudioRtpReceiverTest() {
+ EXPECT_CALL(receive_channel_, SetOutputVolume(kSsrc, kVolumeMuted));
+ receiver_->SetMediaChannel(nullptr);
+ }
+
+ rtc::AutoThread main_thread_;
+ rtc::Thread* worker_;
+ rtc::scoped_refptr<AudioRtpReceiver> receiver_;
+ cricket::MockVoiceMediaReceiveChannelInterface receive_channel_;
+};
+
+TEST_F(AudioRtpReceiverTest, SetOutputVolumeIsCalled) {
+ std::atomic_int set_volume_calls(0);
+
+ EXPECT_CALL(receive_channel_, SetOutputVolume(kSsrc, kDefaultVolume))
+ .WillOnce(InvokeWithoutArgs([&] {
+ set_volume_calls++;
+ return true;
+ }));
+
+ receiver_->track();
+ receiver_->track()->set_enabled(true);
+ receiver_->SetMediaChannel(&receive_channel_);
+ EXPECT_CALL(receive_channel_, SetDefaultRawAudioSink(_)).Times(0);
+ receiver_->SetupMediaChannel(kSsrc);
+
+ EXPECT_CALL(receive_channel_, SetOutputVolume(kSsrc, kVolume))
+ .WillOnce(InvokeWithoutArgs([&] {
+ set_volume_calls++;
+ return true;
+ }));
+
+ receiver_->OnSetVolume(kVolume);
+ EXPECT_TRUE_WAIT(set_volume_calls == 2, kTimeOut);
+}
+
+TEST_F(AudioRtpReceiverTest, VolumesSetBeforeStartingAreRespected) {
+ // Set the volume before setting the media channel. It should still be used
+ // as the initial volume.
+ receiver_->OnSetVolume(kVolume);
+
+ receiver_->track()->set_enabled(true);
+ receiver_->SetMediaChannel(&receive_channel_);
+
+ // The previosly set initial volume should be propagated to the provided
+ // media_channel_ as soon as SetupMediaChannel is called.
+ EXPECT_CALL(receive_channel_, SetOutputVolume(kSsrc, kVolume));
+
+ receiver_->SetupMediaChannel(kSsrc);
+}
+
+// Tests that OnChanged notifications are processed correctly on the worker
+// thread when a media channel pointer is passed to the receiver via the
+// constructor.
+TEST(AudioRtpReceiver, OnChangedNotificationsAfterConstruction) {
+ webrtc::test::RunLoop loop;
+ auto* thread = rtc::Thread::Current(); // Points to loop's thread.
+ cricket::MockVoiceMediaReceiveChannelInterface receive_channel;
+ auto receiver = rtc::make_ref_counted<AudioRtpReceiver>(
+ thread, std::string(), std::vector<std::string>(), true,
+ &receive_channel);
+
+ EXPECT_CALL(receive_channel, SetDefaultRawAudioSink(_)).Times(1);
+ EXPECT_CALL(receive_channel, SetDefaultOutputVolume(kDefaultVolume)).Times(1);
+ receiver->SetupUnsignaledMediaChannel();
+ loop.Flush();
+
+ // Mark the track as disabled.
+ receiver->track()->set_enabled(false);
+
+ // When the track was marked as disabled, an async notification was queued
+ // for the worker thread. This notification should trigger the volume
+ // of the media channel to be set to kVolumeMuted.
+ // Flush the worker thread, but set the expectation first for the call.
+ EXPECT_CALL(receive_channel, SetDefaultOutputVolume(kVolumeMuted)).Times(1);
+ loop.Flush();
+
+ EXPECT_CALL(receive_channel, SetDefaultOutputVolume(kVolumeMuted)).Times(1);
+ receiver->SetMediaChannel(nullptr);
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/audio_track.cc b/third_party/libwebrtc/pc/audio_track.cc
new file mode 100644
index 0000000000..c012442d13
--- /dev/null
+++ b/third_party/libwebrtc/pc/audio_track.cc
@@ -0,0 +1,70 @@
+/*
+ * Copyright 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/audio_track.h"
+
+#include "rtc_base/checks.h"
+
+namespace webrtc {
+
+// static
+rtc::scoped_refptr<AudioTrack> AudioTrack::Create(
+ absl::string_view id,
+ const rtc::scoped_refptr<AudioSourceInterface>& source) {
+ return rtc::make_ref_counted<AudioTrack>(id, source);
+}
+
+AudioTrack::AudioTrack(absl::string_view label,
+ const rtc::scoped_refptr<AudioSourceInterface>& source)
+ : MediaStreamTrack<AudioTrackInterface>(label), audio_source_(source) {
+ if (audio_source_) {
+ audio_source_->RegisterObserver(this);
+ OnChanged();
+ }
+}
+
+AudioTrack::~AudioTrack() {
+ RTC_DCHECK_RUN_ON(&signaling_thread_checker_);
+ set_state(MediaStreamTrackInterface::kEnded);
+ if (audio_source_)
+ audio_source_->UnregisterObserver(this);
+}
+
+std::string AudioTrack::kind() const {
+ return kAudioKind;
+}
+
+AudioSourceInterface* AudioTrack::GetSource() const {
+ // Callable from any thread.
+ return audio_source_.get();
+}
+
+void AudioTrack::AddSink(AudioTrackSinkInterface* sink) {
+ RTC_DCHECK_RUN_ON(&signaling_thread_checker_);
+ if (audio_source_)
+ audio_source_->AddSink(sink);
+}
+
+void AudioTrack::RemoveSink(AudioTrackSinkInterface* sink) {
+ RTC_DCHECK_RUN_ON(&signaling_thread_checker_);
+ if (audio_source_)
+ audio_source_->RemoveSink(sink);
+}
+
+void AudioTrack::OnChanged() {
+ RTC_DCHECK_RUN_ON(&signaling_thread_checker_);
+ if (audio_source_->state() == MediaSourceInterface::kEnded) {
+ set_state(kEnded);
+ } else {
+ set_state(kLive);
+ }
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/audio_track.h b/third_party/libwebrtc/pc/audio_track.h
new file mode 100644
index 0000000000..ae326b304b
--- /dev/null
+++ b/third_party/libwebrtc/pc/audio_track.h
@@ -0,0 +1,66 @@
+/*
+ * Copyright 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_AUDIO_TRACK_H_
+#define PC_AUDIO_TRACK_H_
+
+#include <string>
+
+#include "api/media_stream_interface.h"
+#include "api/media_stream_track.h"
+#include "api/scoped_refptr.h"
+#include "api/sequence_checker.h"
+#include "rtc_base/system/no_unique_address.h"
+
+namespace webrtc {
+
+// TODO(tommi): Instead of inheriting from `MediaStreamTrack<>`, implement the
+// properties directly in this class. `MediaStreamTrack` doesn't guard against
+// conflicting access, so we'd need to override those methods anyway in this
+// class in order to make sure things are correctly checked.
+class AudioTrack : public MediaStreamTrack<AudioTrackInterface>,
+ public ObserverInterface {
+ protected:
+ // Protected ctor to force use of factory method.
+ AudioTrack(absl::string_view label,
+ const rtc::scoped_refptr<AudioSourceInterface>& source);
+
+ AudioTrack() = delete;
+ AudioTrack(const AudioTrack&) = delete;
+ AudioTrack& operator=(const AudioTrack&) = delete;
+
+ ~AudioTrack() override;
+
+ public:
+ static rtc::scoped_refptr<AudioTrack> Create(
+ absl::string_view id,
+ const rtc::scoped_refptr<AudioSourceInterface>& source);
+
+ // MediaStreamTrack implementation.
+ std::string kind() const override;
+
+ // AudioTrackInterface implementation.
+ AudioSourceInterface* GetSource() const override;
+
+ void AddSink(AudioTrackSinkInterface* sink) override;
+ void RemoveSink(AudioTrackSinkInterface* sink) override;
+
+ private:
+ // ObserverInterface implementation.
+ void OnChanged() override;
+
+ private:
+ const rtc::scoped_refptr<AudioSourceInterface> audio_source_;
+ RTC_NO_UNIQUE_ADDRESS webrtc::SequenceChecker signaling_thread_checker_;
+};
+
+} // namespace webrtc
+
+#endif // PC_AUDIO_TRACK_H_
diff --git a/third_party/libwebrtc/pc/channel.cc b/third_party/libwebrtc/pc/channel.cc
new file mode 100644
index 0000000000..0024ba0e35
--- /dev/null
+++ b/third_party/libwebrtc/pc/channel.cc
@@ -0,0 +1,1219 @@
+/*
+ * Copyright 2004 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/channel.h"
+
+#include <algorithm>
+#include <cstdint>
+#include <string>
+#include <type_traits>
+#include <utility>
+
+#include "absl/algorithm/container.h"
+#include "absl/strings/string_view.h"
+#include "api/rtp_parameters.h"
+#include "api/sequence_checker.h"
+#include "api/task_queue/pending_task_safety_flag.h"
+#include "api/units/timestamp.h"
+#include "media/base/codec.h"
+#include "media/base/rid_description.h"
+#include "media/base/rtp_utils.h"
+#include "modules/rtp_rtcp/source/rtp_packet_received.h"
+#include "p2p/base/dtls_transport_internal.h"
+#include "pc/rtp_media_utils.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/copy_on_write_buffer.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/network_route.h"
+#include "rtc_base/strings/string_format.h"
+#include "rtc_base/trace_event.h"
+
+namespace cricket {
+namespace {
+
+using ::rtc::StringFormat;
+using ::rtc::UniqueRandomIdGenerator;
+using ::webrtc::PendingTaskSafetyFlag;
+using ::webrtc::SdpType;
+
+// Finds a stream based on target's Primary SSRC or RIDs.
+// This struct is used in BaseChannel::UpdateLocalStreams_w.
+struct StreamFinder {
+ explicit StreamFinder(const StreamParams* target) : target_(target) {
+ RTC_DCHECK(target);
+ }
+
+ bool operator()(const StreamParams& sp) const {
+ if (target_->has_ssrcs() && sp.has_ssrcs()) {
+ return sp.has_ssrc(target_->first_ssrc());
+ }
+
+ if (!target_->has_rids() && !sp.has_rids()) {
+ return false;
+ }
+
+ const std::vector<RidDescription>& target_rids = target_->rids();
+ const std::vector<RidDescription>& source_rids = sp.rids();
+ if (source_rids.size() != target_rids.size()) {
+ return false;
+ }
+
+ // Check that all RIDs match.
+ return std::equal(source_rids.begin(), source_rids.end(),
+ target_rids.begin(),
+ [](const RidDescription& lhs, const RidDescription& rhs) {
+ return lhs.rid == rhs.rid;
+ });
+ }
+
+ const StreamParams* target_;
+};
+
+} // namespace
+
+void MediaChannelParametersFromMediaDescription(
+ const RtpMediaContentDescription* desc,
+ const RtpHeaderExtensions& extensions,
+ bool is_stream_active,
+ MediaChannelParameters* params) {
+ RTC_DCHECK(desc->type() == MEDIA_TYPE_AUDIO ||
+ desc->type() == MEDIA_TYPE_VIDEO);
+ params->is_stream_active = is_stream_active;
+ params->codecs = desc->codecs();
+ // TODO(bugs.webrtc.org/11513): See if we really need
+ // rtp_header_extensions_set() and remove it if we don't.
+ if (desc->rtp_header_extensions_set()) {
+ params->extensions = extensions;
+ }
+ params->rtcp.reduced_size = desc->rtcp_reduced_size();
+ params->rtcp.remote_estimate = desc->remote_estimate();
+}
+
+void RtpSendParametersFromMediaDescription(
+ const RtpMediaContentDescription* desc,
+ webrtc::RtpExtension::Filter extensions_filter,
+ SenderParameters* send_params) {
+ RtpHeaderExtensions extensions =
+ webrtc::RtpExtension::DeduplicateHeaderExtensions(
+ desc->rtp_header_extensions(), extensions_filter);
+ const bool is_stream_active =
+ webrtc::RtpTransceiverDirectionHasRecv(desc->direction());
+ MediaChannelParametersFromMediaDescription(desc, extensions, is_stream_active,
+ send_params);
+ send_params->max_bandwidth_bps = desc->bandwidth();
+ send_params->extmap_allow_mixed = desc->extmap_allow_mixed();
+}
+
+BaseChannel::BaseChannel(
+ webrtc::TaskQueueBase* worker_thread,
+ rtc::Thread* network_thread,
+ webrtc::TaskQueueBase* signaling_thread,
+ std::unique_ptr<MediaSendChannelInterface> send_media_channel_impl,
+ std::unique_ptr<MediaReceiveChannelInterface> receive_media_channel_impl,
+ absl::string_view mid,
+ bool srtp_required,
+ webrtc::CryptoOptions crypto_options,
+ UniqueRandomIdGenerator* ssrc_generator)
+ : media_send_channel_(std::move(send_media_channel_impl)),
+ media_receive_channel_(std::move(receive_media_channel_impl)),
+ worker_thread_(worker_thread),
+ network_thread_(network_thread),
+ signaling_thread_(signaling_thread),
+ alive_(PendingTaskSafetyFlag::Create()),
+ srtp_required_(srtp_required),
+ extensions_filter_(
+ crypto_options.srtp.enable_encrypted_rtp_header_extensions
+ ? webrtc::RtpExtension::kPreferEncryptedExtension
+ : webrtc::RtpExtension::kDiscardEncryptedExtension),
+ demuxer_criteria_(mid),
+ ssrc_generator_(ssrc_generator) {
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ RTC_DCHECK(media_send_channel_);
+ RTC_DCHECK(media_receive_channel_);
+ RTC_DCHECK(ssrc_generator_);
+ RTC_DLOG(LS_INFO) << "Created channel: " << ToString();
+}
+
+BaseChannel::~BaseChannel() {
+ TRACE_EVENT0("webrtc", "BaseChannel::~BaseChannel");
+ RTC_DCHECK_RUN_ON(worker_thread_);
+
+ // Eats any outstanding messages or packets.
+ alive_->SetNotAlive();
+ // The media channel is destroyed at the end of the destructor, since it
+ // is a std::unique_ptr. The transport channel (rtp_transport) must outlive
+ // the media channel.
+}
+
+std::string BaseChannel::ToString() const {
+ return StringFormat(
+ "{mid: %s, media_type: %s}", mid().c_str(),
+ MediaTypeToString(media_send_channel_->media_type()).c_str());
+}
+
+bool BaseChannel::ConnectToRtpTransport_n() {
+ RTC_DCHECK(rtp_transport_);
+ RTC_DCHECK(media_send_channel());
+
+ // We don't need to call OnDemuxerCriteriaUpdatePending/Complete because
+ // there's no previous criteria to worry about.
+ if (!rtp_transport_->RegisterRtpDemuxerSink(demuxer_criteria_, this)) {
+ return false;
+ }
+ rtp_transport_->SubscribeReadyToSend(
+ this, [this](bool ready) { OnTransportReadyToSend(ready); });
+ rtp_transport_->SubscribeNetworkRouteChanged(
+ this, [this](absl::optional<rtc::NetworkRoute> route) {
+ OnNetworkRouteChanged(route);
+ });
+ rtp_transport_->SubscribeWritableState(
+ this, [this](bool state) { OnWritableState(state); });
+ rtp_transport_->SubscribeSentPacket(
+ this,
+ [this](const rtc::SentPacket& packet) { SignalSentPacket_n(packet); });
+ return true;
+}
+
+void BaseChannel::DisconnectFromRtpTransport_n() {
+ RTC_DCHECK(rtp_transport_);
+ RTC_DCHECK(media_send_channel());
+ rtp_transport_->UnregisterRtpDemuxerSink(this);
+ rtp_transport_->UnsubscribeReadyToSend(this);
+ rtp_transport_->UnsubscribeNetworkRouteChanged(this);
+ rtp_transport_->UnsubscribeWritableState(this);
+ rtp_transport_->UnsubscribeSentPacket(this);
+ rtp_transport_ = nullptr;
+ media_send_channel()->SetInterface(nullptr);
+ media_receive_channel()->SetInterface(nullptr);
+}
+
+bool BaseChannel::SetRtpTransport(webrtc::RtpTransportInternal* rtp_transport) {
+ TRACE_EVENT0("webrtc", "BaseChannel::SetRtpTransport");
+ RTC_DCHECK_RUN_ON(network_thread());
+ if (rtp_transport == rtp_transport_) {
+ return true;
+ }
+
+ if (rtp_transport_) {
+ DisconnectFromRtpTransport_n();
+ // Clear the cached header extensions on the worker.
+ worker_thread_->PostTask(SafeTask(alive_, [this] {
+ RTC_DCHECK_RUN_ON(worker_thread());
+ rtp_header_extensions_.clear();
+ }));
+ }
+
+ rtp_transport_ = rtp_transport;
+ if (rtp_transport_) {
+ if (!ConnectToRtpTransport_n()) {
+ return false;
+ }
+
+ RTC_DCHECK(!media_send_channel()->HasNetworkInterface());
+ media_send_channel()->SetInterface(this);
+ media_receive_channel()->SetInterface(this);
+
+ media_send_channel()->OnReadyToSend(rtp_transport_->IsReadyToSend());
+ UpdateWritableState_n();
+
+ // Set the cached socket options.
+ for (const auto& pair : socket_options_) {
+ rtp_transport_->SetRtpOption(pair.first, pair.second);
+ }
+ if (!rtp_transport_->rtcp_mux_enabled()) {
+ for (const auto& pair : rtcp_socket_options_) {
+ rtp_transport_->SetRtcpOption(pair.first, pair.second);
+ }
+ }
+ }
+
+ return true;
+}
+
+void BaseChannel::Enable(bool enable) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+
+ if (enable == enabled_s_)
+ return;
+
+ enabled_s_ = enable;
+
+ worker_thread_->PostTask(SafeTask(alive_, [this, enable] {
+ RTC_DCHECK_RUN_ON(worker_thread());
+ // Sanity check to make sure that enabled_ and enabled_s_
+ // stay in sync.
+ RTC_DCHECK_NE(enabled_, enable);
+ if (enable) {
+ EnableMedia_w();
+ } else {
+ DisableMedia_w();
+ }
+ }));
+}
+
+bool BaseChannel::SetLocalContent(const MediaContentDescription* content,
+ SdpType type,
+ std::string& error_desc) {
+ RTC_DCHECK_RUN_ON(worker_thread());
+ TRACE_EVENT0("webrtc", "BaseChannel::SetLocalContent");
+ return SetLocalContent_w(content, type, error_desc);
+}
+
+bool BaseChannel::SetRemoteContent(const MediaContentDescription* content,
+ SdpType type,
+ std::string& error_desc) {
+ RTC_DCHECK_RUN_ON(worker_thread());
+ TRACE_EVENT0("webrtc", "BaseChannel::SetRemoteContent");
+ return SetRemoteContent_w(content, type, error_desc);
+}
+
+bool BaseChannel::SetPayloadTypeDemuxingEnabled(bool enabled) {
+ // TODO(bugs.webrtc.org/11993): The demuxer state needs to be managed on the
+ // network thread. At the moment there's a workaround for inconsistent state
+ // between the worker and network thread because of this (see
+ // OnDemuxerCriteriaUpdatePending elsewhere in this file) and
+ // SetPayloadTypeDemuxingEnabled_w has a BlockingCall over to the network
+ // thread to apply state updates.
+ RTC_DCHECK_RUN_ON(worker_thread());
+ TRACE_EVENT0("webrtc", "BaseChannel::SetPayloadTypeDemuxingEnabled");
+ return SetPayloadTypeDemuxingEnabled_w(enabled);
+}
+
+bool BaseChannel::IsReadyToSendMedia_w() const {
+ // Send outgoing data if we are enabled, have local and remote content,
+ // and we have had some form of connectivity.
+ return enabled_ &&
+ webrtc::RtpTransceiverDirectionHasRecv(remote_content_direction_) &&
+ webrtc::RtpTransceiverDirectionHasSend(local_content_direction_) &&
+ was_ever_writable_;
+}
+
+bool BaseChannel::SendPacket(rtc::CopyOnWriteBuffer* packet,
+ const rtc::PacketOptions& options) {
+ return SendPacket(false, packet, options);
+}
+
+bool BaseChannel::SendRtcp(rtc::CopyOnWriteBuffer* packet,
+ const rtc::PacketOptions& options) {
+ return SendPacket(true, packet, options);
+}
+
+int BaseChannel::SetOption(SocketType type,
+ rtc::Socket::Option opt,
+ int value) {
+ RTC_DCHECK_RUN_ON(network_thread());
+ RTC_DCHECK(network_initialized());
+ RTC_DCHECK(rtp_transport_);
+ switch (type) {
+ case ST_RTP:
+ socket_options_.push_back(
+ std::pair<rtc::Socket::Option, int>(opt, value));
+ return rtp_transport_->SetRtpOption(opt, value);
+ case ST_RTCP:
+ rtcp_socket_options_.push_back(
+ std::pair<rtc::Socket::Option, int>(opt, value));
+ return rtp_transport_->SetRtcpOption(opt, value);
+ }
+ return -1;
+}
+
+void BaseChannel::OnWritableState(bool writable) {
+ RTC_DCHECK_RUN_ON(network_thread());
+ RTC_DCHECK(network_initialized());
+ if (writable) {
+ ChannelWritable_n();
+ } else {
+ ChannelNotWritable_n();
+ }
+}
+
+void BaseChannel::OnNetworkRouteChanged(
+ absl::optional<rtc::NetworkRoute> network_route) {
+ RTC_DCHECK_RUN_ON(network_thread());
+ RTC_DCHECK(network_initialized());
+
+ RTC_LOG(LS_INFO) << "Network route changed for " << ToString();
+
+ rtc::NetworkRoute new_route;
+ if (network_route) {
+ new_route = *(network_route);
+ }
+ // Note: When the RTCP-muxing is not enabled, RTCP transport and RTP transport
+ // use the same transport name and MediaChannel::OnNetworkRouteChanged cannot
+ // work correctly. Intentionally leave it broken to simplify the code and
+ // encourage the users to stop using non-muxing RTCP.
+ media_send_channel()->OnNetworkRouteChanged(transport_name(), new_route);
+}
+
+void BaseChannel::SetFirstPacketReceivedCallback(
+ std::function<void()> callback) {
+ RTC_DCHECK_RUN_ON(network_thread());
+ RTC_DCHECK(!on_first_packet_received_ || !callback);
+
+ // TODO(bugs.webrtc.org/11992): Rename SetFirstPacketReceivedCallback to
+ // something that indicates network thread initialization/uninitialization and
+ // call Init_n() / Deinit_n() respectively.
+ // if (!callback)
+ // Deinit_n();
+
+ on_first_packet_received_ = std::move(callback);
+}
+
+void BaseChannel::OnTransportReadyToSend(bool ready) {
+ RTC_DCHECK_RUN_ON(network_thread());
+ RTC_DCHECK(network_initialized());
+ media_send_channel()->OnReadyToSend(ready);
+}
+
+bool BaseChannel::SendPacket(bool rtcp,
+ rtc::CopyOnWriteBuffer* packet,
+ const rtc::PacketOptions& options) {
+ RTC_DCHECK_RUN_ON(network_thread());
+ RTC_DCHECK(network_initialized());
+ TRACE_EVENT0("webrtc", "BaseChannel::SendPacket");
+
+ // Until all the code is migrated to use RtpPacketType instead of bool.
+ RtpPacketType packet_type = rtcp ? RtpPacketType::kRtcp : RtpPacketType::kRtp;
+
+ // Ensure we have a place to send this packet before doing anything. We might
+ // get RTCP packets that we don't intend to send. If we've negotiated RTCP
+ // mux, send RTCP over the RTP transport.
+ if (!rtp_transport_ || !rtp_transport_->IsWritable(rtcp)) {
+ return false;
+ }
+
+ // Protect ourselves against crazy data.
+ if (!IsValidRtpPacketSize(packet_type, packet->size())) {
+ RTC_LOG(LS_ERROR) << "Dropping outgoing " << ToString() << " "
+ << RtpPacketTypeToString(packet_type)
+ << " packet: wrong size=" << packet->size();
+ return false;
+ }
+
+ if (!srtp_active()) {
+ if (srtp_required_) {
+ // The audio/video engines may attempt to send RTCP packets as soon as the
+ // streams are created, so don't treat this as an error for RTCP.
+ // See: https://bugs.chromium.org/p/webrtc/issues/detail?id=6809
+ // However, there shouldn't be any RTP packets sent before SRTP is set
+ // up (and SetSend(true) is called).
+ RTC_DCHECK(rtcp) << "Can't send outgoing RTP packet for " << ToString()
+ << " when SRTP is inactive and crypto is required";
+ return false;
+ }
+
+ RTC_DLOG(LS_WARNING) << "Sending an " << (rtcp ? "RTCP" : "RTP")
+ << " packet without encryption for " << ToString()
+ << ".";
+ }
+
+ return rtcp ? rtp_transport_->SendRtcpPacket(packet, options, PF_SRTP_BYPASS)
+ : rtp_transport_->SendRtpPacket(packet, options, PF_SRTP_BYPASS);
+}
+
+void BaseChannel::OnRtpPacket(const webrtc::RtpPacketReceived& parsed_packet) {
+ RTC_DCHECK_RUN_ON(network_thread());
+ RTC_DCHECK(network_initialized());
+
+ if (on_first_packet_received_) {
+ on_first_packet_received_();
+ on_first_packet_received_ = nullptr;
+ }
+
+ if (!srtp_active() && srtp_required_) {
+ // Our session description indicates that SRTP is required, but we got a
+ // packet before our SRTP filter is active. This means either that
+ // a) we got SRTP packets before we received the SDES keys, in which case
+ // we can't decrypt it anyway, or
+ // b) we got SRTP packets before DTLS completed on both the RTP and RTCP
+ // transports, so we haven't yet extracted keys, even if DTLS did
+ // complete on the transport that the packets are being sent on. It's
+ // really good practice to wait for both RTP and RTCP to be good to go
+ // before sending media, to prevent weird failure modes, so it's fine
+ // for us to just eat packets here. This is all sidestepped if RTCP mux
+ // is used anyway.
+ RTC_LOG(LS_WARNING) << "Can't process incoming RTP packet when "
+ "SRTP is inactive and crypto is required "
+ << ToString();
+ return;
+ }
+ media_receive_channel()->OnPacketReceived(parsed_packet);
+}
+
+bool BaseChannel::MaybeUpdateDemuxerAndRtpExtensions_w(
+ bool update_demuxer,
+ absl::optional<RtpHeaderExtensions> extensions,
+ std::string& error_desc) {
+ if (extensions) {
+ if (rtp_header_extensions_ == extensions) {
+ extensions.reset(); // No need to update header extensions.
+ } else {
+ rtp_header_extensions_ = *extensions;
+ }
+ }
+
+ if (!update_demuxer && !extensions)
+ return true; // No update needed.
+
+ // TODO(bugs.webrtc.org/13536): See if we can do this asynchronously.
+
+ if (update_demuxer)
+ media_receive_channel()->OnDemuxerCriteriaUpdatePending();
+
+ bool success = network_thread()->BlockingCall([&]() mutable {
+ RTC_DCHECK_RUN_ON(network_thread());
+ // NOTE: This doesn't take the BUNDLE case in account meaning the RTP header
+ // extension maps are not merged when BUNDLE is enabled. This is fine
+ // because the ID for MID should be consistent among all the RTP transports.
+ if (extensions)
+ rtp_transport_->UpdateRtpHeaderExtensionMap(*extensions);
+
+ if (!update_demuxer)
+ return true;
+
+ if (!rtp_transport_->RegisterRtpDemuxerSink(demuxer_criteria_, this)) {
+ error_desc =
+ StringFormat("Failed to apply demuxer criteria for '%s': '%s'.",
+ mid().c_str(), demuxer_criteria_.ToString().c_str());
+ return false;
+ }
+ return true;
+ });
+
+ if (update_demuxer)
+ media_receive_channel()->OnDemuxerCriteriaUpdateComplete();
+
+ return success;
+}
+
+bool BaseChannel::RegisterRtpDemuxerSink_w() {
+ media_receive_channel()->OnDemuxerCriteriaUpdatePending();
+ // Copy demuxer criteria, since they're a worker-thread variable
+ // and we want to pass them to the network thread
+ bool ret = network_thread_->BlockingCall(
+ [this, demuxer_criteria = demuxer_criteria_] {
+ RTC_DCHECK_RUN_ON(network_thread());
+ if (!rtp_transport_) {
+ // Transport was disconnected before attempting to update the
+ // criteria. This can happen while setting the remote description.
+ // See chromium:1295469 for an example.
+ return false;
+ }
+ // Note that RegisterRtpDemuxerSink first unregisters the sink if
+ // already registered. So this will change the state of the class
+ // whether the call succeeds or not.
+ return rtp_transport_->RegisterRtpDemuxerSink(demuxer_criteria, this);
+ });
+
+ media_receive_channel()->OnDemuxerCriteriaUpdateComplete();
+
+ return ret;
+}
+
+void BaseChannel::EnableMedia_w() {
+ if (enabled_)
+ return;
+
+ RTC_LOG(LS_INFO) << "Channel enabled: " << ToString();
+ enabled_ = true;
+ UpdateMediaSendRecvState_w();
+}
+
+void BaseChannel::DisableMedia_w() {
+ if (!enabled_)
+ return;
+
+ RTC_LOG(LS_INFO) << "Channel disabled: " << ToString();
+ enabled_ = false;
+ UpdateMediaSendRecvState_w();
+}
+
+void BaseChannel::UpdateWritableState_n() {
+ TRACE_EVENT0("webrtc", "BaseChannel::UpdateWritableState_n");
+ if (rtp_transport_->IsWritable(/*rtcp=*/true) &&
+ rtp_transport_->IsWritable(/*rtcp=*/false)) {
+ ChannelWritable_n();
+ } else {
+ ChannelNotWritable_n();
+ }
+}
+
+void BaseChannel::ChannelWritable_n() {
+ TRACE_EVENT0("webrtc", "BaseChannel::ChannelWritable_n");
+ if (writable_) {
+ return;
+ }
+ writable_ = true;
+ RTC_LOG(LS_INFO) << "Channel writable (" << ToString() << ")"
+ << (was_ever_writable_n_ ? "" : " for the first time");
+ // We only have to do this PostTask once, when first transitioning to
+ // writable.
+ if (!was_ever_writable_n_) {
+ worker_thread_->PostTask(SafeTask(alive_, [this] {
+ RTC_DCHECK_RUN_ON(worker_thread());
+ was_ever_writable_ = true;
+ UpdateMediaSendRecvState_w();
+ }));
+ }
+ was_ever_writable_n_ = true;
+}
+
+void BaseChannel::ChannelNotWritable_n() {
+ TRACE_EVENT0("webrtc", "BaseChannel::ChannelNotWritable_n");
+ if (!writable_) {
+ return;
+ }
+ writable_ = false;
+ RTC_LOG(LS_INFO) << "Channel not writable (" << ToString() << ")";
+}
+
+bool BaseChannel::SetPayloadTypeDemuxingEnabled_w(bool enabled) {
+ RTC_LOG_THREAD_BLOCK_COUNT();
+
+ if (enabled == payload_type_demuxing_enabled_) {
+ return true;
+ }
+
+ payload_type_demuxing_enabled_ = enabled;
+
+ bool config_changed = false;
+
+ if (!enabled) {
+ // TODO(crbug.com/11477): This will remove *all* unsignaled streams (those
+ // without an explicitly signaled SSRC), which may include streams that
+ // were matched to this channel by MID or RID. Ideally we'd remove only the
+ // streams that were matched based on payload type alone, but currently
+ // there is no straightforward way to identify those streams.
+ media_receive_channel()->ResetUnsignaledRecvStream();
+ if (!demuxer_criteria_.payload_types().empty()) {
+ config_changed = true;
+ demuxer_criteria_.payload_types().clear();
+ }
+ } else if (!payload_types_.empty()) {
+ for (const auto& type : payload_types_) {
+ if (demuxer_criteria_.payload_types().insert(type).second) {
+ config_changed = true;
+ }
+ }
+ } else {
+ RTC_DCHECK(demuxer_criteria_.payload_types().empty());
+ }
+
+ RTC_DCHECK_BLOCK_COUNT_NO_MORE_THAN(0);
+
+ if (!config_changed)
+ return true;
+
+ // Note: This synchronously hops to the network thread.
+ return RegisterRtpDemuxerSink_w();
+}
+
+bool BaseChannel::UpdateLocalStreams_w(const std::vector<StreamParams>& streams,
+ SdpType type,
+ std::string& error_desc) {
+ // In the case of RIDs (where SSRCs are not negotiated), this method will
+ // generate an SSRC for each layer in StreamParams. That representation will
+ // be stored internally in `local_streams_`.
+ // In subsequent offers, the same stream can appear in `streams` again
+ // (without the SSRCs), so it should be looked up using RIDs (if available)
+ // and then by primary SSRC.
+ // In both scenarios, it is safe to assume that the media channel will be
+ // created with a StreamParams object with SSRCs. However, it is not safe to
+ // assume that `local_streams_` will always have SSRCs as there are scenarios
+ // in which niether SSRCs or RIDs are negotiated.
+
+ // Check for streams that have been removed.
+ bool ret = true;
+ for (const StreamParams& old_stream : local_streams_) {
+ if (!old_stream.has_ssrcs() ||
+ GetStream(streams, StreamFinder(&old_stream))) {
+ continue;
+ }
+ if (!media_send_channel()->RemoveSendStream(old_stream.first_ssrc())) {
+ error_desc = StringFormat(
+ "Failed to remove send stream with ssrc %u from m-section with "
+ "mid='%s'.",
+ old_stream.first_ssrc(), mid().c_str());
+ ret = false;
+ }
+ }
+ // Check for new streams.
+ std::vector<StreamParams> all_streams;
+ for (const StreamParams& stream : streams) {
+ StreamParams* existing = GetStream(local_streams_, StreamFinder(&stream));
+ if (existing) {
+ // Parameters cannot change for an existing stream.
+ all_streams.push_back(*existing);
+ continue;
+ }
+
+ all_streams.push_back(stream);
+ StreamParams& new_stream = all_streams.back();
+
+ if (!new_stream.has_ssrcs() && !new_stream.has_rids()) {
+ continue;
+ }
+
+ RTC_DCHECK(new_stream.has_ssrcs() || new_stream.has_rids());
+ if (new_stream.has_ssrcs() && new_stream.has_rids()) {
+ error_desc = StringFormat(
+ "Failed to add send stream: %u into m-section with mid='%s'. Stream "
+ "has both SSRCs and RIDs.",
+ new_stream.first_ssrc(), mid().c_str());
+ ret = false;
+ continue;
+ }
+
+ // At this point we use the legacy simulcast group in StreamParams to
+ // indicate that we want multiple layers to the media channel.
+ if (!new_stream.has_ssrcs()) {
+ // TODO(bugs.webrtc.org/10250): Indicate if flex is desired here.
+ new_stream.GenerateSsrcs(new_stream.rids().size(), /* rtx = */ true,
+ /* flex_fec = */ false, ssrc_generator_);
+ }
+
+ if (media_send_channel()->AddSendStream(new_stream)) {
+ RTC_LOG(LS_INFO) << "Add send stream ssrc: " << new_stream.ssrcs[0]
+ << " into " << ToString();
+ } else {
+ error_desc = StringFormat(
+ "Failed to add send stream ssrc: %u into m-section with mid='%s'",
+ new_stream.first_ssrc(), mid().c_str());
+ ret = false;
+ }
+ }
+ local_streams_ = all_streams;
+ return ret;
+}
+
+bool BaseChannel::UpdateRemoteStreams_w(const MediaContentDescription* content,
+ SdpType type,
+ std::string& error_desc) {
+ RTC_LOG_THREAD_BLOCK_COUNT();
+ bool needs_re_registration = false;
+ if (!webrtc::RtpTransceiverDirectionHasSend(content->direction())) {
+ RTC_DLOG(LS_VERBOSE) << "UpdateRemoteStreams_w: remote side will not send "
+ "- disable payload type demuxing for "
+ << ToString();
+ if (ClearHandledPayloadTypes()) {
+ needs_re_registration = payload_type_demuxing_enabled_;
+ }
+ }
+
+ const std::vector<StreamParams>& streams = content->streams();
+ const bool new_has_unsignaled_ssrcs = HasStreamWithNoSsrcs(streams);
+ const bool old_has_unsignaled_ssrcs = HasStreamWithNoSsrcs(remote_streams_);
+
+ // Check for streams that have been removed.
+ for (const StreamParams& old_stream : remote_streams_) {
+ // If we no longer have an unsignaled stream, we would like to remove
+ // the unsignaled stream params that are cached.
+ if (!old_stream.has_ssrcs() && !new_has_unsignaled_ssrcs) {
+ media_receive_channel()->ResetUnsignaledRecvStream();
+ RTC_LOG(LS_INFO) << "Reset unsignaled remote stream for " << ToString()
+ << ".";
+ } else if (old_stream.has_ssrcs() &&
+ !GetStreamBySsrc(streams, old_stream.first_ssrc())) {
+ if (media_receive_channel()->RemoveRecvStream(old_stream.first_ssrc())) {
+ RTC_LOG(LS_INFO) << "Remove remote ssrc: " << old_stream.first_ssrc()
+ << " from " << ToString() << ".";
+ } else {
+ error_desc = StringFormat(
+ "Failed to remove remote stream with ssrc %u from m-section with "
+ "mid='%s'.",
+ old_stream.first_ssrc(), mid().c_str());
+ return false;
+ }
+ }
+ }
+
+ // Check for new streams.
+ webrtc::flat_set<uint32_t> ssrcs;
+ for (const StreamParams& new_stream : streams) {
+ // We allow a StreamParams with an empty list of SSRCs, in which case the
+ // MediaChannel will cache the parameters and use them for any unsignaled
+ // stream received later.
+ if ((!new_stream.has_ssrcs() && !old_has_unsignaled_ssrcs) ||
+ !GetStreamBySsrc(remote_streams_, new_stream.first_ssrc())) {
+ if (media_receive_channel()->AddRecvStream(new_stream)) {
+ RTC_LOG(LS_INFO) << "Add remote ssrc: "
+ << (new_stream.has_ssrcs()
+ ? std::to_string(new_stream.first_ssrc())
+ : "unsignaled")
+ << " to " << ToString();
+ } else {
+ error_desc =
+ StringFormat("Failed to add remote stream ssrc: %s to %s",
+ new_stream.has_ssrcs()
+ ? std::to_string(new_stream.first_ssrc()).c_str()
+ : "unsignaled",
+ ToString().c_str());
+ return false;
+ }
+ }
+ // Update the receiving SSRCs.
+ ssrcs.insert(new_stream.ssrcs.begin(), new_stream.ssrcs.end());
+ }
+
+ if (demuxer_criteria_.ssrcs() != ssrcs) {
+ demuxer_criteria_.ssrcs() = std::move(ssrcs);
+ needs_re_registration = true;
+ }
+
+ RTC_DCHECK_BLOCK_COUNT_NO_MORE_THAN(0);
+
+ // Re-register the sink to update after changing the demuxer criteria.
+ if (needs_re_registration && !RegisterRtpDemuxerSink_w()) {
+ error_desc = StringFormat("Failed to set up audio demuxing for mid='%s'.",
+ mid().c_str());
+ return false;
+ }
+
+ remote_streams_ = streams;
+
+ set_remote_content_direction(content->direction());
+ UpdateMediaSendRecvState_w();
+
+ RTC_DCHECK_BLOCK_COUNT_NO_MORE_THAN(1);
+
+ return true;
+}
+
+RtpHeaderExtensions BaseChannel::GetDeduplicatedRtpHeaderExtensions(
+ const RtpHeaderExtensions& extensions) {
+ return webrtc::RtpExtension::DeduplicateHeaderExtensions(extensions,
+ extensions_filter_);
+}
+
+bool BaseChannel::MaybeAddHandledPayloadType(int payload_type) {
+ bool demuxer_criteria_modified = false;
+ if (payload_type_demuxing_enabled_) {
+ demuxer_criteria_modified = demuxer_criteria_.payload_types()
+ .insert(static_cast<uint8_t>(payload_type))
+ .second;
+ }
+ // Even if payload type demuxing is currently disabled, we need to remember
+ // the payload types in case it's re-enabled later.
+ payload_types_.insert(static_cast<uint8_t>(payload_type));
+ return demuxer_criteria_modified;
+}
+
+bool BaseChannel::ClearHandledPayloadTypes() {
+ const bool was_empty = demuxer_criteria_.payload_types().empty();
+ demuxer_criteria_.payload_types().clear();
+ payload_types_.clear();
+ return !was_empty;
+}
+
+void BaseChannel::SignalSentPacket_n(const rtc::SentPacket& sent_packet) {
+ RTC_DCHECK_RUN_ON(network_thread());
+ RTC_DCHECK(network_initialized());
+ media_send_channel()->OnPacketSent(sent_packet);
+}
+
+VoiceChannel::VoiceChannel(
+ webrtc::TaskQueueBase* worker_thread,
+ rtc::Thread* network_thread,
+ webrtc::TaskQueueBase* signaling_thread,
+ std::unique_ptr<VoiceMediaSendChannelInterface> media_send_channel,
+ std::unique_ptr<VoiceMediaReceiveChannelInterface> media_receive_channel,
+ absl::string_view mid,
+ bool srtp_required,
+ webrtc::CryptoOptions crypto_options,
+ UniqueRandomIdGenerator* ssrc_generator)
+ : BaseChannel(worker_thread,
+ network_thread,
+ signaling_thread,
+ std::move(media_send_channel),
+ std::move(media_receive_channel),
+ mid,
+ srtp_required,
+ crypto_options,
+ ssrc_generator) {}
+
+VoiceChannel::~VoiceChannel() {
+ TRACE_EVENT0("webrtc", "VoiceChannel::~VoiceChannel");
+ // this can't be done in the base class, since it calls a virtual
+ DisableMedia_w();
+}
+
+void VoiceChannel::UpdateMediaSendRecvState_w() {
+ // Render incoming data if we're the active call, and we have the local
+ // content. We receive data on the default channel and multiplexed streams.
+ bool receive = enabled() && webrtc::RtpTransceiverDirectionHasRecv(
+ local_content_direction());
+ media_receive_channel()->SetPlayout(receive);
+
+ // Send outgoing data if we're the active call, we have the remote content,
+ // and we have had some form of connectivity.
+ bool send = IsReadyToSendMedia_w();
+ media_send_channel()->SetSend(send);
+
+ RTC_LOG(LS_INFO) << "Changing voice state, recv=" << receive
+ << " send=" << send << " for " << ToString();
+}
+
+bool VoiceChannel::SetLocalContent_w(const MediaContentDescription* content,
+ SdpType type,
+ std::string& error_desc) {
+ TRACE_EVENT0("webrtc", "VoiceChannel::SetLocalContent_w");
+ RTC_DLOG(LS_INFO) << "Setting local voice description for " << ToString();
+
+ RTC_LOG_THREAD_BLOCK_COUNT();
+
+ RtpHeaderExtensions header_extensions =
+ GetDeduplicatedRtpHeaderExtensions(content->rtp_header_extensions());
+ bool update_header_extensions = true;
+ media_send_channel()->SetExtmapAllowMixed(content->extmap_allow_mixed());
+
+ AudioReceiverParameters recv_params = last_recv_params_;
+ MediaChannelParametersFromMediaDescription(
+ content->as_audio(), header_extensions,
+ webrtc::RtpTransceiverDirectionHasRecv(content->direction()),
+ &recv_params);
+
+ if (!media_receive_channel()->SetReceiverParameters(recv_params)) {
+ error_desc = StringFormat(
+ "Failed to set local audio description recv parameters for m-section "
+ "with mid='%s'.",
+ mid().c_str());
+ return false;
+ }
+
+ bool criteria_modified = false;
+ if (webrtc::RtpTransceiverDirectionHasRecv(content->direction())) {
+ for (const Codec& codec : content->codecs()) {
+ if (MaybeAddHandledPayloadType(codec.id)) {
+ criteria_modified = true;
+ }
+ }
+ }
+
+ last_recv_params_ = recv_params;
+
+ if (!UpdateLocalStreams_w(content->streams(), type, error_desc)) {
+ RTC_DCHECK(!error_desc.empty());
+ return false;
+ }
+
+ set_local_content_direction(content->direction());
+ UpdateMediaSendRecvState_w();
+
+ RTC_DCHECK_BLOCK_COUNT_NO_MORE_THAN(0);
+
+ bool success = MaybeUpdateDemuxerAndRtpExtensions_w(
+ criteria_modified,
+ update_header_extensions
+ ? absl::optional<RtpHeaderExtensions>(std::move(header_extensions))
+ : absl::nullopt,
+ error_desc);
+
+ RTC_DCHECK_BLOCK_COUNT_NO_MORE_THAN(1);
+
+ return success;
+}
+
+bool VoiceChannel::SetRemoteContent_w(const MediaContentDescription* content,
+ SdpType type,
+ std::string& error_desc) {
+ TRACE_EVENT0("webrtc", "VoiceChannel::SetRemoteContent_w");
+ RTC_LOG(LS_INFO) << "Setting remote voice description for " << ToString();
+
+ AudioSenderParameter send_params = last_send_params_;
+ RtpSendParametersFromMediaDescription(content->as_audio(),
+ extensions_filter(), &send_params);
+ send_params.mid = mid();
+
+ bool parameters_applied =
+ media_send_channel()->SetSenderParameters(send_params);
+ if (!parameters_applied) {
+ error_desc = StringFormat(
+ "Failed to set remote audio description send parameters for m-section "
+ "with mid='%s'.",
+ mid().c_str());
+ return false;
+ }
+ // Update Receive channel based on Send channel's codec information.
+ // TODO(bugs.webrtc.org/14911): This is silly. Stop doing it.
+ media_receive_channel()->SetReceiveNackEnabled(
+ media_send_channel()->SenderNackEnabled());
+ media_receive_channel()->SetReceiveNonSenderRttEnabled(
+ media_send_channel()->SenderNonSenderRttEnabled());
+ last_send_params_ = send_params;
+
+ return UpdateRemoteStreams_w(content, type, error_desc);
+}
+
+VideoChannel::VideoChannel(
+ webrtc::TaskQueueBase* worker_thread,
+ rtc::Thread* network_thread,
+ webrtc::TaskQueueBase* signaling_thread,
+ std::unique_ptr<VideoMediaSendChannelInterface> media_send_channel,
+ std::unique_ptr<VideoMediaReceiveChannelInterface> media_receive_channel,
+ absl::string_view mid,
+ bool srtp_required,
+ webrtc::CryptoOptions crypto_options,
+ UniqueRandomIdGenerator* ssrc_generator)
+ : BaseChannel(worker_thread,
+ network_thread,
+ signaling_thread,
+ std::move(media_send_channel),
+ std::move(media_receive_channel),
+ mid,
+ srtp_required,
+ crypto_options,
+ ssrc_generator) {
+ // TODO(bugs.webrtc.org/13931): Remove when values are set
+ // in a more sensible fashion
+ send_channel()->SetSendCodecChangedCallback([this]() {
+ // Adjust receive streams based on send codec.
+ receive_channel()->SetReceiverFeedbackParameters(
+ send_channel()->SendCodecHasLntf(), send_channel()->SendCodecHasNack(),
+ send_channel()->SendCodecRtcpMode(),
+ send_channel()->SendCodecRtxTime());
+ });
+}
+
+VideoChannel::~VideoChannel() {
+ TRACE_EVENT0("webrtc", "VideoChannel::~VideoChannel");
+ // this can't be done in the base class, since it calls a virtual
+ DisableMedia_w();
+}
+
+void VideoChannel::UpdateMediaSendRecvState_w() {
+ // Send outgoing data if we're the active call, we have the remote content,
+ // and we have had some form of connectivity.
+ bool receive = enabled() && webrtc::RtpTransceiverDirectionHasRecv(
+ local_content_direction());
+ media_receive_channel()->SetReceive(receive);
+
+ bool send = IsReadyToSendMedia_w();
+ media_send_channel()->SetSend(send);
+ RTC_LOG(LS_INFO) << "Changing video state, recv=" << receive
+ << " send=" << send << " for " << ToString();
+}
+
+bool VideoChannel::SetLocalContent_w(const MediaContentDescription* content,
+ SdpType type,
+ std::string& error_desc) {
+ TRACE_EVENT0("webrtc", "VideoChannel::SetLocalContent_w");
+ RTC_DLOG(LS_INFO) << "Setting local video description for " << ToString();
+
+ RTC_LOG_THREAD_BLOCK_COUNT();
+
+ RtpHeaderExtensions header_extensions =
+ GetDeduplicatedRtpHeaderExtensions(content->rtp_header_extensions());
+ bool update_header_extensions = true;
+ media_send_channel()->SetExtmapAllowMixed(content->extmap_allow_mixed());
+
+ VideoReceiverParameters recv_params = last_recv_params_;
+
+ MediaChannelParametersFromMediaDescription(
+ content->as_video(), header_extensions,
+ webrtc::RtpTransceiverDirectionHasRecv(content->direction()),
+ &recv_params);
+
+ VideoSenderParameters send_params = last_send_params_;
+
+ // Ensure that there is a matching packetization for each send codec. If the
+ // other peer offered to exclusively send non-standard packetization but we
+ // only accept to receive standard packetization we effectively amend their
+ // offer by ignoring the packetiztion and fall back to standard packetization
+ // instead.
+ bool needs_send_params_update = false;
+ if (type == SdpType::kAnswer || type == SdpType::kPrAnswer) {
+ webrtc::flat_set<const VideoCodec*> matched_codecs;
+ for (VideoCodec& send_codec : send_params.codecs) {
+ if (absl::c_any_of(matched_codecs, [&](const VideoCodec* c) {
+ return send_codec.Matches(*c);
+ })) {
+ continue;
+ }
+
+ std::vector<const VideoCodec*> recv_codecs =
+ FindAllMatchingCodecs(recv_params.codecs, send_codec);
+ if (recv_codecs.empty()) {
+ continue;
+ }
+
+ bool may_ignore_packetization = false;
+ bool has_matching_packetization = false;
+ for (const VideoCodec* recv_codec : recv_codecs) {
+ if (!recv_codec->packetization.has_value() &&
+ send_codec.packetization.has_value()) {
+ may_ignore_packetization = true;
+ } else if (recv_codec->packetization == send_codec.packetization) {
+ has_matching_packetization = true;
+ break;
+ }
+ }
+
+ if (may_ignore_packetization) {
+ send_codec.packetization = absl::nullopt;
+ needs_send_params_update = true;
+ } else if (!has_matching_packetization) {
+ error_desc = StringFormat(
+ "Failed to set local answer due to incompatible codec "
+ "packetization for pt='%d' specified in m-section with mid='%s'.",
+ send_codec.id, mid().c_str());
+ return false;
+ }
+
+ if (has_matching_packetization) {
+ matched_codecs.insert(&send_codec);
+ }
+ }
+ }
+
+ if (!media_receive_channel()->SetReceiverParameters(recv_params)) {
+ error_desc = StringFormat(
+ "Failed to set local video description recv parameters for m-section "
+ "with mid='%s'.",
+ mid().c_str());
+ return false;
+ }
+
+ bool criteria_modified = false;
+ if (webrtc::RtpTransceiverDirectionHasRecv(content->direction())) {
+ for (const Codec& codec : content->codecs()) {
+ if (MaybeAddHandledPayloadType(codec.id))
+ criteria_modified = true;
+ }
+ }
+
+ last_recv_params_ = recv_params;
+
+ if (needs_send_params_update) {
+ if (!media_send_channel()->SetSenderParameters(send_params)) {
+ error_desc = StringFormat(
+ "Failed to set send parameters for m-section with mid='%s'.",
+ mid().c_str());
+ return false;
+ }
+ last_send_params_ = send_params;
+ }
+
+ if (!UpdateLocalStreams_w(content->as_video()->streams(), type, error_desc)) {
+ RTC_DCHECK(!error_desc.empty());
+ return false;
+ }
+
+ set_local_content_direction(content->direction());
+ UpdateMediaSendRecvState_w();
+
+ RTC_DCHECK_BLOCK_COUNT_NO_MORE_THAN(0);
+
+ bool success = MaybeUpdateDemuxerAndRtpExtensions_w(
+ criteria_modified,
+ update_header_extensions
+ ? absl::optional<RtpHeaderExtensions>(std::move(header_extensions))
+ : absl::nullopt,
+ error_desc);
+
+ RTC_DCHECK_BLOCK_COUNT_NO_MORE_THAN(1);
+
+ return success;
+}
+
+bool VideoChannel::SetRemoteContent_w(const MediaContentDescription* content,
+ SdpType type,
+ std::string& error_desc) {
+ TRACE_EVENT0("webrtc", "VideoChannel::SetRemoteContent_w");
+ RTC_LOG(LS_INFO) << "Setting remote video description for " << ToString();
+
+ const VideoContentDescription* video = content->as_video();
+
+ VideoSenderParameters send_params = last_send_params_;
+ RtpSendParametersFromMediaDescription(video, extensions_filter(),
+ &send_params);
+ send_params.mid = mid();
+ send_params.conference_mode = video->conference_mode();
+
+ VideoReceiverParameters recv_params = last_recv_params_;
+
+ // Ensure that there is a matching packetization for each receive codec. If we
+ // offered to exclusively receive a non-standard packetization but the other
+ // peer only accepts to send standard packetization we effectively amend our
+ // offer by ignoring the packetiztion and fall back to standard packetization
+ // instead.
+ bool needs_recv_params_update = false;
+ if (type == SdpType::kAnswer || type == SdpType::kPrAnswer) {
+ webrtc::flat_set<const VideoCodec*> matched_codecs;
+ for (VideoCodec& recv_codec : recv_params.codecs) {
+ if (absl::c_any_of(matched_codecs, [&](const VideoCodec* c) {
+ return recv_codec.Matches(*c);
+ })) {
+ continue;
+ }
+
+ std::vector<const VideoCodec*> send_codecs =
+ FindAllMatchingCodecs(send_params.codecs, recv_codec);
+ if (send_codecs.empty()) {
+ continue;
+ }
+
+ bool may_ignore_packetization = false;
+ bool has_matching_packetization = false;
+ for (const VideoCodec* send_codec : send_codecs) {
+ if (!send_codec->packetization.has_value() &&
+ recv_codec.packetization.has_value()) {
+ may_ignore_packetization = true;
+ } else if (send_codec->packetization == recv_codec.packetization) {
+ has_matching_packetization = true;
+ break;
+ }
+ }
+
+ if (may_ignore_packetization) {
+ recv_codec.packetization = absl::nullopt;
+ needs_recv_params_update = true;
+ } else if (!has_matching_packetization) {
+ error_desc = StringFormat(
+ "Failed to set remote answer due to incompatible codec "
+ "packetization for pt='%d' specified in m-section with mid='%s'.",
+ recv_codec.id, mid().c_str());
+ return false;
+ }
+
+ if (has_matching_packetization) {
+ matched_codecs.insert(&recv_codec);
+ }
+ }
+ }
+
+ if (!media_send_channel()->SetSenderParameters(send_params)) {
+ error_desc = StringFormat(
+ "Failed to set remote video description send parameters for m-section "
+ "with mid='%s'.",
+ mid().c_str());
+ return false;
+ }
+ // adjust receive streams based on send codec
+ media_receive_channel()->SetReceiverFeedbackParameters(
+ media_send_channel()->SendCodecHasLntf(),
+ media_send_channel()->SendCodecHasNack(),
+ media_send_channel()->SendCodecRtcpMode(),
+ media_send_channel()->SendCodecRtxTime());
+ last_send_params_ = send_params;
+
+ if (needs_recv_params_update) {
+ if (!media_receive_channel()->SetReceiverParameters(recv_params)) {
+ error_desc = StringFormat(
+ "Failed to set recv parameters for m-section with mid='%s'.",
+ mid().c_str());
+ return false;
+ }
+ last_recv_params_ = recv_params;
+ }
+
+ return UpdateRemoteStreams_w(content, type, error_desc);
+}
+
+} // namespace cricket
diff --git a/third_party/libwebrtc/pc/channel.h b/third_party/libwebrtc/pc/channel.h
new file mode 100644
index 0000000000..c933091e92
--- /dev/null
+++ b/third_party/libwebrtc/pc/channel.h
@@ -0,0 +1,507 @@
+/*
+ * Copyright 2004 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_CHANNEL_H_
+#define PC_CHANNEL_H_
+
+#include <stdint.h>
+
+#include <functional>
+#include <memory>
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "absl/strings/string_view.h"
+#include "absl/types/optional.h"
+#include "api/crypto/crypto_options.h"
+#include "api/jsep.h"
+#include "api/media_types.h"
+#include "api/rtp_parameters.h"
+#include "api/rtp_transceiver_direction.h"
+#include "api/scoped_refptr.h"
+#include "api/sequence_checker.h"
+#include "api/task_queue/pending_task_safety_flag.h"
+#include "call/rtp_demuxer.h"
+#include "call/rtp_packet_sink_interface.h"
+#include "media/base/media_channel.h"
+#include "media/base/media_channel_impl.h"
+#include "media/base/stream_params.h"
+#include "modules/rtp_rtcp/source/rtp_packet_received.h"
+#include "pc/channel_interface.h"
+#include "pc/rtp_transport_internal.h"
+#include "pc/session_description.h"
+#include "rtc_base/async_packet_socket.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/containers/flat_set.h"
+#include "rtc_base/copy_on_write_buffer.h"
+#include "rtc_base/network/sent_packet.h"
+#include "rtc_base/network_route.h"
+#include "rtc_base/socket.h"
+#include "rtc_base/thread.h"
+#include "rtc_base/thread_annotations.h"
+#include "rtc_base/unique_id_generator.h"
+
+namespace cricket {
+
+// BaseChannel contains logic common to voice and video, including enable,
+// marshaling calls to a worker and network threads, and connection and media
+// monitors.
+//
+// BaseChannel assumes signaling and other threads are allowed to make
+// synchronous calls to the worker thread, the worker thread makes synchronous
+// calls only to the network thread, and the network thread can't be blocked by
+// other threads.
+// All methods with _n suffix must be called on network thread,
+// methods with _w suffix on worker thread
+// and methods with _s suffix on signaling thread.
+// Network and worker threads may be the same thread.
+//
+class VideoChannel;
+class VoiceChannel;
+
+class BaseChannel : public ChannelInterface,
+ // TODO(tommi): Consider implementing these interfaces
+ // via composition.
+ public MediaChannelNetworkInterface,
+ public webrtc::RtpPacketSinkInterface {
+ public:
+ // If `srtp_required` is true, the channel will not send or receive any
+ // RTP/RTCP packets without using SRTP (either using SDES or DTLS-SRTP).
+ // The BaseChannel does not own the UniqueRandomIdGenerator so it is the
+ // responsibility of the user to ensure it outlives this object.
+ // TODO(zhihuang:) Create a BaseChannel::Config struct for the parameter lists
+ // which will make it easier to change the constructor.
+
+ // Constructor for use when the MediaChannels are split
+ BaseChannel(
+ webrtc::TaskQueueBase* worker_thread,
+ rtc::Thread* network_thread,
+ webrtc::TaskQueueBase* signaling_thread,
+ std::unique_ptr<MediaSendChannelInterface> media_send_channel,
+ std::unique_ptr<MediaReceiveChannelInterface> media_receive_channel,
+ absl::string_view mid,
+ bool srtp_required,
+ webrtc::CryptoOptions crypto_options,
+ rtc::UniqueRandomIdGenerator* ssrc_generator);
+ virtual ~BaseChannel();
+
+ webrtc::TaskQueueBase* worker_thread() const { return worker_thread_; }
+ rtc::Thread* network_thread() const { return network_thread_; }
+ const std::string& mid() const override { return demuxer_criteria_.mid(); }
+ // TODO(deadbeef): This is redundant; remove this.
+ absl::string_view transport_name() const override {
+ RTC_DCHECK_RUN_ON(network_thread());
+ if (rtp_transport_)
+ return rtp_transport_->transport_name();
+ return "";
+ }
+
+ // This function returns true if using SRTP (DTLS-based keying or SDES).
+ bool srtp_active() const {
+ RTC_DCHECK_RUN_ON(network_thread());
+ return rtp_transport_ && rtp_transport_->IsSrtpActive();
+ }
+
+ // Set an RTP level transport which could be an RtpTransport without
+ // encryption, an SrtpTransport for SDES or a DtlsSrtpTransport for DTLS-SRTP.
+ // This can be called from any thread and it hops to the network thread
+ // internally. It would replace the `SetTransports` and its variants.
+ bool SetRtpTransport(webrtc::RtpTransportInternal* rtp_transport) override;
+
+ webrtc::RtpTransportInternal* rtp_transport() const {
+ RTC_DCHECK_RUN_ON(network_thread());
+ return rtp_transport_;
+ }
+
+ // Channel control
+ bool SetLocalContent(const MediaContentDescription* content,
+ webrtc::SdpType type,
+ std::string& error_desc) override;
+ bool SetRemoteContent(const MediaContentDescription* content,
+ webrtc::SdpType type,
+ std::string& error_desc) override;
+ // Controls whether this channel will receive packets on the basis of
+ // matching payload type alone. This is needed for legacy endpoints that
+ // don't signal SSRCs or use MID/RID, but doesn't make sense if there is
+ // more than channel of specific media type, As that creates an ambiguity.
+ //
+ // This method will also remove any existing streams that were bound to this
+ // channel on the basis of payload type, since one of these streams might
+ // actually belong to a new channel. See: crbug.com/webrtc/11477
+ bool SetPayloadTypeDemuxingEnabled(bool enabled) override;
+
+ void Enable(bool enable) override;
+
+ const std::vector<StreamParams>& local_streams() const override {
+ return local_streams_;
+ }
+ const std::vector<StreamParams>& remote_streams() const override {
+ return remote_streams_;
+ }
+
+ // Used for latency measurements.
+ void SetFirstPacketReceivedCallback(std::function<void()> callback) override;
+
+ // From RtpTransport - public for testing only
+ void OnTransportReadyToSend(bool ready);
+
+ // Only public for unit tests. Otherwise, consider protected.
+ int SetOption(SocketType type, rtc::Socket::Option o, int val) override;
+
+ // RtpPacketSinkInterface overrides.
+ void OnRtpPacket(const webrtc::RtpPacketReceived& packet) override;
+
+ VideoMediaSendChannelInterface* video_media_send_channel() override {
+ RTC_CHECK(false) << "Attempt to fetch video channel from non-video";
+ return nullptr;
+ }
+ VoiceMediaSendChannelInterface* voice_media_send_channel() override {
+ RTC_CHECK(false) << "Attempt to fetch voice channel from non-voice";
+ return nullptr;
+ }
+ VideoMediaReceiveChannelInterface* video_media_receive_channel() override {
+ RTC_CHECK(false) << "Attempt to fetch video channel from non-video";
+ return nullptr;
+ }
+ VoiceMediaReceiveChannelInterface* voice_media_receive_channel() override {
+ RTC_CHECK(false) << "Attempt to fetch voice channel from non-voice";
+ return nullptr;
+ }
+
+ protected:
+ void set_local_content_direction(webrtc::RtpTransceiverDirection direction)
+ RTC_RUN_ON(worker_thread()) {
+ local_content_direction_ = direction;
+ }
+
+ webrtc::RtpTransceiverDirection local_content_direction() const
+ RTC_RUN_ON(worker_thread()) {
+ return local_content_direction_;
+ }
+
+ void set_remote_content_direction(webrtc::RtpTransceiverDirection direction)
+ RTC_RUN_ON(worker_thread()) {
+ remote_content_direction_ = direction;
+ }
+
+ webrtc::RtpTransceiverDirection remote_content_direction() const
+ RTC_RUN_ON(worker_thread()) {
+ return remote_content_direction_;
+ }
+
+ webrtc::RtpExtension::Filter extensions_filter() const {
+ return extensions_filter_;
+ }
+
+ bool network_initialized() RTC_RUN_ON(network_thread()) {
+ return media_send_channel()->HasNetworkInterface();
+ }
+
+ bool enabled() const RTC_RUN_ON(worker_thread()) { return enabled_; }
+ webrtc::TaskQueueBase* signaling_thread() const { return signaling_thread_; }
+
+ // Call to verify that:
+ // * The required content description directions have been set.
+ // * The channel is enabled.
+ // * The SRTP filter is active if it's needed.
+ // * The transport has been writable before, meaning it should be at least
+ // possible to succeed in sending a packet.
+ //
+ // When any of these properties change, UpdateMediaSendRecvState_w should be
+ // called.
+ bool IsReadyToSendMedia_w() const RTC_RUN_ON(worker_thread());
+
+ // NetworkInterface implementation, called by MediaEngine
+ bool SendPacket(rtc::CopyOnWriteBuffer* packet,
+ const rtc::PacketOptions& options) override;
+ bool SendRtcp(rtc::CopyOnWriteBuffer* packet,
+ const rtc::PacketOptions& options) override;
+
+ // From RtpTransportInternal
+ void OnWritableState(bool writable);
+
+ void OnNetworkRouteChanged(absl::optional<rtc::NetworkRoute> network_route);
+
+ bool SendPacket(bool rtcp,
+ rtc::CopyOnWriteBuffer* packet,
+ const rtc::PacketOptions& options);
+
+ void EnableMedia_w() RTC_RUN_ON(worker_thread());
+ void DisableMedia_w() RTC_RUN_ON(worker_thread());
+
+ // Performs actions if the RTP/RTCP writable state changed. This should
+ // be called whenever a channel's writable state changes or when RTCP muxing
+ // becomes active/inactive.
+ void UpdateWritableState_n() RTC_RUN_ON(network_thread());
+ void ChannelWritable_n() RTC_RUN_ON(network_thread());
+ void ChannelNotWritable_n() RTC_RUN_ON(network_thread());
+
+ bool SetPayloadTypeDemuxingEnabled_w(bool enabled)
+ RTC_RUN_ON(worker_thread());
+
+ // Should be called whenever the conditions for
+ // IsReadyToReceiveMedia/IsReadyToSendMedia are satisfied (or unsatisfied).
+ // Updates the send/recv state of the media channel.
+ virtual void UpdateMediaSendRecvState_w() RTC_RUN_ON(worker_thread()) = 0;
+
+ bool UpdateLocalStreams_w(const std::vector<StreamParams>& streams,
+ webrtc::SdpType type,
+ std::string& error_desc)
+ RTC_RUN_ON(worker_thread());
+ bool UpdateRemoteStreams_w(const MediaContentDescription* content,
+ webrtc::SdpType type,
+ std::string& error_desc)
+ RTC_RUN_ON(worker_thread());
+ virtual bool SetLocalContent_w(const MediaContentDescription* content,
+ webrtc::SdpType type,
+ std::string& error_desc)
+ RTC_RUN_ON(worker_thread()) = 0;
+ virtual bool SetRemoteContent_w(const MediaContentDescription* content,
+ webrtc::SdpType type,
+ std::string& error_desc)
+ RTC_RUN_ON(worker_thread()) = 0;
+
+ // Returns a list of RTP header extensions where any extension URI is unique.
+ // Encrypted extensions will be either preferred or discarded, depending on
+ // the current crypto_options_.
+ RtpHeaderExtensions GetDeduplicatedRtpHeaderExtensions(
+ const RtpHeaderExtensions& extensions);
+
+ // Add `payload_type` to `demuxer_criteria_` if payload type demuxing is
+ // enabled.
+ // Returns true if the demuxer payload type changed and a re-registration
+ // is needed.
+ bool MaybeAddHandledPayloadType(int payload_type) RTC_RUN_ON(worker_thread());
+
+ // Returns true if the demuxer payload type criteria was non-empty before
+ // clearing.
+ bool ClearHandledPayloadTypes() RTC_RUN_ON(worker_thread());
+
+ // Hops to the network thread to update the transport if an update is
+ // requested. If `update_demuxer` is false and `extensions` is not set, the
+ // function simply returns. If either of these is set, the function updates
+ // the transport with either or both of the demuxer criteria and the supplied
+ // rtp header extensions.
+ // Returns `true` if either an update wasn't needed or one was successfully
+ // applied. If the return value is `false`, then updating the demuxer criteria
+ // failed, which needs to be treated as an error.
+ bool MaybeUpdateDemuxerAndRtpExtensions_w(
+ bool update_demuxer,
+ absl::optional<RtpHeaderExtensions> extensions,
+ std::string& error_desc) RTC_RUN_ON(worker_thread());
+
+ bool RegisterRtpDemuxerSink_w() RTC_RUN_ON(worker_thread());
+
+ // Return description of media channel to facilitate logging
+ std::string ToString() const;
+
+ const std::unique_ptr<MediaSendChannelInterface> media_send_channel_;
+ const std::unique_ptr<MediaReceiveChannelInterface> media_receive_channel_;
+
+ private:
+ bool ConnectToRtpTransport_n() RTC_RUN_ON(network_thread());
+ void DisconnectFromRtpTransport_n() RTC_RUN_ON(network_thread());
+ void SignalSentPacket_n(const rtc::SentPacket& sent_packet);
+
+ webrtc::TaskQueueBase* const worker_thread_;
+ rtc::Thread* const network_thread_;
+ webrtc::TaskQueueBase* const signaling_thread_;
+ rtc::scoped_refptr<webrtc::PendingTaskSafetyFlag> alive_;
+
+ std::function<void()> on_first_packet_received_
+ RTC_GUARDED_BY(network_thread());
+
+ webrtc::RtpTransportInternal* rtp_transport_
+ RTC_GUARDED_BY(network_thread()) = nullptr;
+
+ std::vector<std::pair<rtc::Socket::Option, int> > socket_options_
+ RTC_GUARDED_BY(network_thread());
+ std::vector<std::pair<rtc::Socket::Option, int> > rtcp_socket_options_
+ RTC_GUARDED_BY(network_thread());
+ bool writable_ RTC_GUARDED_BY(network_thread()) = false;
+ bool was_ever_writable_n_ RTC_GUARDED_BY(network_thread()) = false;
+ bool was_ever_writable_ RTC_GUARDED_BY(worker_thread()) = false;
+ const bool srtp_required_ = true;
+
+ // Set to either kPreferEncryptedExtension or kDiscardEncryptedExtension
+ // based on the supplied CryptoOptions.
+ const webrtc::RtpExtension::Filter extensions_filter_;
+
+ // Currently the `enabled_` flag is accessed from the signaling thread as
+ // well, but it can be changed only when signaling thread does a synchronous
+ // call to the worker thread, so it should be safe.
+ bool enabled_ RTC_GUARDED_BY(worker_thread()) = false;
+ bool enabled_s_ RTC_GUARDED_BY(signaling_thread()) = false;
+ bool payload_type_demuxing_enabled_ RTC_GUARDED_BY(worker_thread()) = true;
+ std::vector<StreamParams> local_streams_ RTC_GUARDED_BY(worker_thread());
+ std::vector<StreamParams> remote_streams_ RTC_GUARDED_BY(worker_thread());
+ webrtc::RtpTransceiverDirection local_content_direction_ RTC_GUARDED_BY(
+ worker_thread()) = webrtc::RtpTransceiverDirection::kInactive;
+ webrtc::RtpTransceiverDirection remote_content_direction_ RTC_GUARDED_BY(
+ worker_thread()) = webrtc::RtpTransceiverDirection::kInactive;
+
+ // Cached list of payload types, used if payload type demuxing is re-enabled.
+ webrtc::flat_set<uint8_t> payload_types_ RTC_GUARDED_BY(worker_thread());
+ // A stored copy of the rtp header extensions as applied to the transport.
+ RtpHeaderExtensions rtp_header_extensions_ RTC_GUARDED_BY(worker_thread());
+ // TODO(bugs.webrtc.org/12239): Modified on worker thread, accessed
+ // on network thread in RegisterRtpDemuxerSink_n (called from Init_w)
+ webrtc::RtpDemuxerCriteria demuxer_criteria_;
+ // This generator is used to generate SSRCs for local streams.
+ // This is needed in cases where SSRCs are not negotiated or set explicitly
+ // like in Simulcast.
+ // This object is not owned by the channel so it must outlive it.
+ rtc::UniqueRandomIdGenerator* const ssrc_generator_;
+};
+
+// VoiceChannel is a specialization that adds support for early media, DTMF,
+// and input/output level monitoring.
+class VoiceChannel : public BaseChannel {
+ public:
+ VoiceChannel(
+ webrtc::TaskQueueBase* worker_thread,
+ rtc::Thread* network_thread,
+ webrtc::TaskQueueBase* signaling_thread,
+ std::unique_ptr<VoiceMediaSendChannelInterface> send_channel_impl,
+ std::unique_ptr<VoiceMediaReceiveChannelInterface> receive_channel_impl,
+ absl::string_view mid,
+ bool srtp_required,
+ webrtc::CryptoOptions crypto_options,
+ rtc::UniqueRandomIdGenerator* ssrc_generator);
+
+ ~VoiceChannel();
+
+ VideoChannel* AsVideoChannel() override {
+ RTC_CHECK_NOTREACHED();
+ return nullptr;
+ }
+ VoiceChannel* AsVoiceChannel() override { return this; }
+
+ VoiceMediaSendChannelInterface* send_channel() {
+ return media_send_channel_->AsVoiceSendChannel();
+ }
+
+ VoiceMediaReceiveChannelInterface* receive_channel() {
+ return media_receive_channel_->AsVoiceReceiveChannel();
+ }
+
+ VoiceMediaSendChannelInterface* media_send_channel() override {
+ return send_channel();
+ }
+
+ VoiceMediaSendChannelInterface* voice_media_send_channel() override {
+ return send_channel();
+ }
+
+ VoiceMediaReceiveChannelInterface* media_receive_channel() override {
+ return receive_channel();
+ }
+
+ VoiceMediaReceiveChannelInterface* voice_media_receive_channel() override {
+ return receive_channel();
+ }
+
+ cricket::MediaType media_type() const override {
+ return cricket::MEDIA_TYPE_AUDIO;
+ }
+
+ private:
+ // overrides from BaseChannel
+ void UpdateMediaSendRecvState_w() RTC_RUN_ON(worker_thread()) override;
+ bool SetLocalContent_w(const MediaContentDescription* content,
+ webrtc::SdpType type,
+ std::string& error_desc)
+ RTC_RUN_ON(worker_thread()) override;
+ bool SetRemoteContent_w(const MediaContentDescription* content,
+ webrtc::SdpType type,
+ std::string& error_desc)
+ RTC_RUN_ON(worker_thread()) override;
+
+ // Last AudioSenderParameter sent down to the media_channel() via
+ // SetSenderParameters.
+ AudioSenderParameter last_send_params_ RTC_GUARDED_BY(worker_thread());
+ // Last AudioReceiverParameters sent down to the media_channel() via
+ // SetReceiverParameters.
+ AudioReceiverParameters last_recv_params_ RTC_GUARDED_BY(worker_thread());
+};
+
+// VideoChannel is a specialization for video.
+class VideoChannel : public BaseChannel {
+ public:
+ VideoChannel(
+ webrtc::TaskQueueBase* worker_thread,
+ rtc::Thread* network_thread,
+ webrtc::TaskQueueBase* signaling_thread,
+ std::unique_ptr<VideoMediaSendChannelInterface> media_send_channel,
+ std::unique_ptr<VideoMediaReceiveChannelInterface> media_receive_channel,
+ absl::string_view mid,
+ bool srtp_required,
+ webrtc::CryptoOptions crypto_options,
+ rtc::UniqueRandomIdGenerator* ssrc_generator);
+ ~VideoChannel();
+
+ VideoChannel* AsVideoChannel() override { return this; }
+ VoiceChannel* AsVoiceChannel() override {
+ RTC_CHECK_NOTREACHED();
+ return nullptr;
+ }
+
+ VideoMediaSendChannelInterface* send_channel() {
+ return media_send_channel_->AsVideoSendChannel();
+ }
+
+ VideoMediaReceiveChannelInterface* receive_channel() {
+ return media_receive_channel_->AsVideoReceiveChannel();
+ }
+
+ VideoMediaSendChannelInterface* media_send_channel() override {
+ return send_channel();
+ }
+
+ VideoMediaSendChannelInterface* video_media_send_channel() override {
+ return send_channel();
+ }
+
+ VideoMediaReceiveChannelInterface* media_receive_channel() override {
+ return receive_channel();
+ }
+
+ VideoMediaReceiveChannelInterface* video_media_receive_channel() override {
+ return receive_channel();
+ }
+
+ cricket::MediaType media_type() const override {
+ return cricket::MEDIA_TYPE_VIDEO;
+ }
+
+ private:
+ // overrides from BaseChannel
+ void UpdateMediaSendRecvState_w() RTC_RUN_ON(worker_thread()) override;
+ bool SetLocalContent_w(const MediaContentDescription* content,
+ webrtc::SdpType type,
+ std::string& error_desc)
+ RTC_RUN_ON(worker_thread()) override;
+ bool SetRemoteContent_w(const MediaContentDescription* content,
+ webrtc::SdpType type,
+ std::string& error_desc)
+ RTC_RUN_ON(worker_thread()) override;
+
+ // Last VideoSenderParameters sent down to the media_channel() via
+ // SetSenderParameters.
+ VideoSenderParameters last_send_params_ RTC_GUARDED_BY(worker_thread());
+ // Last VideoReceiverParameters sent down to the media_channel() via
+ // SetReceiverParameters.
+ VideoReceiverParameters last_recv_params_ RTC_GUARDED_BY(worker_thread());
+};
+
+} // namespace cricket
+
+#endif // PC_CHANNEL_H_
diff --git a/third_party/libwebrtc/pc/channel_interface.h b/third_party/libwebrtc/pc/channel_interface.h
new file mode 100644
index 0000000000..8d6a9fe745
--- /dev/null
+++ b/third_party/libwebrtc/pc/channel_interface.h
@@ -0,0 +1,105 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_CHANNEL_INTERFACE_H_
+#define PC_CHANNEL_INTERFACE_H_
+
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "absl/strings/string_view.h"
+#include "api/jsep.h"
+#include "api/media_types.h"
+#include "media/base/media_channel.h"
+#include "pc/rtp_transport_internal.h"
+
+namespace webrtc {
+class Call;
+class VideoBitrateAllocatorFactory;
+} // namespace webrtc
+
+namespace cricket {
+
+class VoiceChannel;
+class VideoChannel;
+class MediaContentDescription;
+struct MediaConfig;
+
+// A Channel is a construct that groups media streams of the same type
+// (audio or video), both outgoing and incoming.
+// When the PeerConnection API is used, a Channel corresponds one to one
+// to an RtpTransceiver.
+// When Unified Plan is used, there can only be at most one outgoing and
+// one incoming stream. With Plan B, there can be more than one.
+
+// ChannelInterface contains methods common to voice and video channels.
+// As more methods are added to BaseChannel, they should be included in the
+// interface as well.
+// TODO(bugs.webrtc.org/13931): Merge this class into RtpTransceiver.
+class ChannelInterface {
+ public:
+ virtual ~ChannelInterface() = default;
+ virtual cricket::MediaType media_type() const = 0;
+
+ virtual VideoChannel* AsVideoChannel() = 0;
+ virtual VoiceChannel* AsVoiceChannel() = 0;
+
+ virtual MediaSendChannelInterface* media_send_channel() = 0;
+ // Typecasts of media_channel(). Will cause an exception if the
+ // channel is of the wrong type.
+ virtual VideoMediaSendChannelInterface* video_media_send_channel() = 0;
+ virtual VoiceMediaSendChannelInterface* voice_media_send_channel() = 0;
+ virtual MediaReceiveChannelInterface* media_receive_channel() = 0;
+ // Typecasts of media_channel(). Will cause an exception if the
+ // channel is of the wrong type.
+ virtual VideoMediaReceiveChannelInterface* video_media_receive_channel() = 0;
+ virtual VoiceMediaReceiveChannelInterface* voice_media_receive_channel() = 0;
+
+ // Returns a string view for the transport name. Fetching the transport name
+ // must be done on the network thread only and note that the lifetime of
+ // the returned object should be assumed to only be the calling scope.
+ // TODO(deadbeef): This is redundant; remove this.
+ virtual absl::string_view transport_name() const = 0;
+
+ // TODO(tommi): Change return type to string_view.
+ virtual const std::string& mid() const = 0;
+
+ // Enables or disables this channel
+ virtual void Enable(bool enable) = 0;
+
+ // Used for latency measurements.
+ virtual void SetFirstPacketReceivedCallback(
+ std::function<void()> callback) = 0;
+
+ // Channel control
+ virtual bool SetLocalContent(const MediaContentDescription* content,
+ webrtc::SdpType type,
+ std::string& error_desc) = 0;
+ virtual bool SetRemoteContent(const MediaContentDescription* content,
+ webrtc::SdpType type,
+ std::string& error_desc) = 0;
+ virtual bool SetPayloadTypeDemuxingEnabled(bool enabled) = 0;
+
+ // Access to the local and remote streams that were set on the channel.
+ virtual const std::vector<StreamParams>& local_streams() const = 0;
+ virtual const std::vector<StreamParams>& remote_streams() const = 0;
+
+ // Set an RTP level transport.
+ // Some examples:
+ // * An RtpTransport without encryption.
+ // * An SrtpTransport for SDES.
+ // * A DtlsSrtpTransport for DTLS-SRTP.
+ virtual bool SetRtpTransport(webrtc::RtpTransportInternal* rtp_transport) = 0;
+};
+
+} // namespace cricket
+
+#endif // PC_CHANNEL_INTERFACE_H_
diff --git a/third_party/libwebrtc/pc/channel_unittest.cc b/third_party/libwebrtc/pc/channel_unittest.cc
new file mode 100644
index 0000000000..c675cd0446
--- /dev/null
+++ b/third_party/libwebrtc/pc/channel_unittest.cc
@@ -0,0 +1,2529 @@
+/*
+ * Copyright 2009 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/channel.h"
+
+#include <stddef.h>
+
+#include <cstdint>
+#include <string>
+#include <type_traits>
+
+#include "absl/functional/any_invocable.h"
+#include "api/array_view.h"
+#include "api/audio_options.h"
+#include "api/rtp_parameters.h"
+#include "api/task_queue/pending_task_safety_flag.h"
+#include "media/base/codec.h"
+#include "media/base/fake_media_engine.h"
+#include "media/base/fake_rtp.h"
+#include "media/base/media_channel.h"
+#include "media/base/media_constants.h"
+#include "media/base/rid_description.h"
+#include "p2p/base/candidate_pair_interface.h"
+#include "p2p/base/dtls_transport_internal.h"
+#include "p2p/base/fake_dtls_transport.h"
+#include "p2p/base/fake_packet_transport.h"
+#include "p2p/base/ice_transport_internal.h"
+#include "p2p/base/p2p_constants.h"
+#include "p2p/base/packet_transport_internal.h"
+#include "pc/dtls_srtp_transport.h"
+#include "pc/jsep_transport.h"
+#include "pc/rtp_transport.h"
+#include "rtc_base/arraysize.h"
+#include "rtc_base/buffer.h"
+#include "rtc_base/byte_order.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/rtc_certificate.h"
+#include "rtc_base/ssl_identity.h"
+#include "rtc_base/task_queue_for_test.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+#include "test/scoped_key_value_config.h"
+
+namespace {
+
+using ::cricket::DtlsTransportInternal;
+using ::cricket::FakeVoiceMediaReceiveChannel;
+using ::cricket::FakeVoiceMediaSendChannel;
+using ::cricket::RidDescription;
+using ::cricket::RidDirection;
+using ::cricket::StreamParams;
+using ::testing::AllOf;
+using ::testing::ElementsAre;
+using ::testing::Field;
+using ::webrtc::RtpTransceiverDirection;
+using ::webrtc::SdpType;
+
+const cricket::Codec kPcmuCodec =
+ cricket::CreateAudioCodec(0, "PCMU", 64000, 1);
+const cricket::Codec kPcmaCodec =
+ cricket::CreateAudioCodec(8, "PCMA", 64000, 1);
+const cricket::Codec kIsacCodec =
+ cricket::CreateAudioCodec(103, "ISAC", 40000, 1);
+const cricket::Codec kH264Codec = cricket::CreateVideoCodec(97, "H264");
+const cricket::Codec kH264SvcCodec = cricket::CreateVideoCodec(99, "H264-SVC");
+const uint32_t kSsrc1 = 0x1111;
+const uint32_t kSsrc2 = 0x2222;
+const uint32_t kSsrc3 = 0x3333;
+const uint32_t kSsrc4 = 0x4444;
+const int kAudioPts[] = {0, 8};
+const int kVideoPts[] = {97, 99};
+enum class NetworkIsWorker { Yes, No };
+
+
+template <class ChannelT,
+ class MediaSendChannelT,
+ class MediaReceiveChannelT,
+ class MediaSendChannelInterfaceT,
+ class MediaReceiveChannelInterfaceT,
+ class ContentT,
+ class MediaInfoT,
+ class OptionsT>
+class Traits {
+ public:
+ typedef ChannelT Channel;
+ typedef MediaSendChannelT MediaSendChannel;
+ typedef MediaReceiveChannelT MediaReceiveChannel;
+ typedef MediaSendChannelInterfaceT MediaSendChannelInterface;
+ typedef MediaReceiveChannelInterfaceT MediaReceiveChannelInterface;
+ typedef ContentT Content;
+ typedef MediaInfoT MediaInfo;
+ typedef OptionsT Options;
+};
+
+class VoiceTraits : public Traits<cricket::VoiceChannel,
+ cricket::FakeVoiceMediaSendChannel,
+ cricket::FakeVoiceMediaReceiveChannel,
+ cricket::VoiceMediaSendChannelInterface,
+ cricket::VoiceMediaReceiveChannelInterface,
+ cricket::AudioContentDescription,
+ cricket::VoiceMediaInfo,
+ cricket::AudioOptions> {};
+
+class VideoTraits : public Traits<cricket::VideoChannel,
+ cricket::FakeVideoMediaSendChannel,
+ cricket::FakeVideoMediaReceiveChannel,
+ cricket::VideoMediaSendChannelInterface,
+ cricket::VideoMediaReceiveChannelInterface,
+ cricket::VideoContentDescription,
+ cricket::VideoMediaInfo,
+ cricket::VideoOptions> {};
+
+// Base class for Voice/Video tests
+template <class T>
+class ChannelTest : public ::testing::Test, public sigslot::has_slots<> {
+ public:
+ enum Flags {
+ RTCP_MUX = 0x1,
+ SSRC_MUX = 0x8,
+ DTLS = 0x10,
+ // Use BaseChannel with PacketTransportInternal rather than
+ // DtlsTransportInternal.
+ RAW_PACKET_TRANSPORT = 0x20,
+ };
+
+ ChannelTest(bool verify_playout,
+ rtc::ArrayView<const uint8_t> rtp_data,
+ rtc::ArrayView<const uint8_t> rtcp_data,
+ NetworkIsWorker network_is_worker)
+ : verify_playout_(verify_playout),
+ rtp_packet_(rtp_data.data(), rtp_data.size()),
+ rtcp_packet_(rtcp_data.data(), rtcp_data.size()) {
+ if (network_is_worker == NetworkIsWorker::Yes) {
+ network_thread_ = rtc::Thread::Current();
+ } else {
+ network_thread_keeper_ = rtc::Thread::Create();
+ network_thread_keeper_->SetName("Network", nullptr);
+ network_thread_ = network_thread_keeper_.get();
+ }
+ RTC_DCHECK(network_thread_);
+ }
+
+ ~ChannelTest() {
+ if (network_thread_) {
+ SendTask(network_thread_, [this]() {
+ network_thread_safety_->SetNotAlive();
+ DeinitChannels();
+ });
+ }
+ }
+
+ void CreateChannels(int flags1, int flags2) {
+ CreateChannels(std::make_unique<typename T::MediaSendChannel>(
+ typename T::Options(), network_thread_),
+ std::make_unique<typename T::MediaReceiveChannel>(
+ typename T::Options(), network_thread_),
+ std::make_unique<typename T::MediaSendChannel>(
+ typename T::Options(), network_thread_),
+ std::make_unique<typename T::MediaReceiveChannel>(
+ typename T::Options(), network_thread_),
+ flags1, flags2);
+ }
+ void CreateChannels(std::unique_ptr<typename T::MediaSendChannel> ch1s,
+ std::unique_ptr<typename T::MediaReceiveChannel> ch1r,
+ std::unique_ptr<typename T::MediaSendChannel> ch2s,
+ std::unique_ptr<typename T::MediaReceiveChannel> ch2r,
+ int flags1,
+ int flags2) {
+ RTC_DCHECK(!channel1_);
+ RTC_DCHECK(!channel2_);
+
+ // Network thread is started in CreateChannels, to allow the test to
+ // configure a fake clock before any threads are spawned and attempt to
+ // access the time.
+ if (network_thread_keeper_) {
+ network_thread_keeper_->Start();
+ }
+
+ // Make sure if using raw packet transports, they're used for both
+ // channels.
+ RTC_DCHECK_EQ(flags1 & RAW_PACKET_TRANSPORT, flags2 & RAW_PACKET_TRANSPORT);
+ rtc::Thread* worker_thread = rtc::Thread::Current();
+ // Based on flags, create fake DTLS or raw packet transports.
+ if (flags1 & RAW_PACKET_TRANSPORT) {
+ fake_rtp_packet_transport1_.reset(
+ new rtc::FakePacketTransport("channel1_rtp"));
+ if (!(flags1 & RTCP_MUX)) {
+ fake_rtcp_packet_transport1_.reset(
+ new rtc::FakePacketTransport("channel1_rtcp"));
+ }
+ } else {
+ // Confirmed to work with KT_RSA and KT_ECDSA.
+ fake_rtp_dtls_transport1_.reset(new cricket::FakeDtlsTransport(
+ "channel1", cricket::ICE_CANDIDATE_COMPONENT_RTP, network_thread_));
+ if (!(flags1 & RTCP_MUX)) {
+ fake_rtcp_dtls_transport1_.reset(new cricket::FakeDtlsTransport(
+ "channel1", cricket::ICE_CANDIDATE_COMPONENT_RTCP,
+ network_thread_));
+ }
+ if (flags1 & DTLS) {
+ auto cert1 = rtc::RTCCertificate::Create(
+ rtc::SSLIdentity::Create("session1", rtc::KT_DEFAULT));
+ fake_rtp_dtls_transport1_->SetLocalCertificate(cert1);
+ if (fake_rtcp_dtls_transport1_) {
+ fake_rtcp_dtls_transport1_->SetLocalCertificate(cert1);
+ }
+ }
+ }
+ // Based on flags, create fake DTLS or raw packet transports.
+ if (flags2 & RAW_PACKET_TRANSPORT) {
+ fake_rtp_packet_transport2_.reset(
+ new rtc::FakePacketTransport("channel2_rtp"));
+ if (!(flags2 & RTCP_MUX)) {
+ fake_rtcp_packet_transport2_.reset(
+ new rtc::FakePacketTransport("channel2_rtcp"));
+ }
+ } else {
+ // Confirmed to work with KT_RSA and KT_ECDSA.
+ fake_rtp_dtls_transport2_.reset(new cricket::FakeDtlsTransport(
+ "channel2", cricket::ICE_CANDIDATE_COMPONENT_RTP, network_thread_));
+ if (!(flags2 & RTCP_MUX)) {
+ fake_rtcp_dtls_transport2_.reset(new cricket::FakeDtlsTransport(
+ "channel2", cricket::ICE_CANDIDATE_COMPONENT_RTCP,
+ network_thread_));
+ }
+ if (flags2 & DTLS) {
+ auto cert2 = rtc::RTCCertificate::Create(
+ rtc::SSLIdentity::Create("session2", rtc::KT_DEFAULT));
+ fake_rtp_dtls_transport2_->SetLocalCertificate(cert2);
+ if (fake_rtcp_dtls_transport2_) {
+ fake_rtcp_dtls_transport2_->SetLocalCertificate(cert2);
+ }
+ }
+ }
+ rtp_transport1_ = CreateRtpTransportBasedOnFlags(
+ fake_rtp_packet_transport1_.get(), fake_rtcp_packet_transport1_.get(),
+ fake_rtp_dtls_transport1_.get(), fake_rtcp_dtls_transport1_.get(),
+ flags1);
+ rtp_transport2_ = CreateRtpTransportBasedOnFlags(
+ fake_rtp_packet_transport2_.get(), fake_rtcp_packet_transport2_.get(),
+ fake_rtp_dtls_transport2_.get(), fake_rtcp_dtls_transport2_.get(),
+ flags2);
+
+ channel1_ = CreateChannel(worker_thread, network_thread_, std::move(ch1s),
+ std::move(ch1r), rtp_transport1_.get(), flags1);
+ channel2_ = CreateChannel(worker_thread, network_thread_, std::move(ch2s),
+ std::move(ch2r), rtp_transport2_.get(), flags2);
+ CreateContent(flags1, kPcmuCodec, kH264Codec, &local_media_content1_);
+ CreateContent(flags2, kPcmuCodec, kH264Codec, &local_media_content2_);
+ CopyContent(local_media_content1_, &remote_media_content1_);
+ CopyContent(local_media_content2_, &remote_media_content2_);
+
+ // Add stream information (SSRC) to the local content but not to the remote
+ // content. This means that we per default know the SSRC of what we send but
+ // not what we receive.
+ AddLegacyStreamInContent(kSsrc1, flags1, &local_media_content1_);
+ AddLegacyStreamInContent(kSsrc2, flags2, &local_media_content2_);
+
+ // If SSRC_MUX is used we also need to know the SSRC of the incoming stream.
+ if (flags1 & SSRC_MUX) {
+ AddLegacyStreamInContent(kSsrc1, flags1, &remote_media_content1_);
+ }
+ if (flags2 & SSRC_MUX) {
+ AddLegacyStreamInContent(kSsrc2, flags2, &remote_media_content2_);
+ }
+ }
+ std::unique_ptr<typename T::Channel> CreateChannel(
+ rtc::Thread* worker_thread,
+ rtc::Thread* network_thread,
+ std::unique_ptr<typename T::MediaSendChannel> ch_send,
+ std::unique_ptr<typename T::MediaReceiveChannel> ch_receive,
+ webrtc::RtpTransportInternal* rtp_transport,
+ int flags);
+
+ std::unique_ptr<webrtc::RtpTransportInternal> CreateRtpTransportBasedOnFlags(
+ rtc::PacketTransportInternal* rtp_packet_transport,
+ rtc::PacketTransportInternal* rtcp_packet_transport,
+ DtlsTransportInternal* rtp_dtls_transport,
+ DtlsTransportInternal* rtcp_dtls_transport,
+ int flags) {
+ if (flags & RTCP_MUX) {
+ rtcp_packet_transport = nullptr;
+ rtcp_dtls_transport = nullptr;
+ }
+
+ if (flags & DTLS) {
+ return CreateDtlsSrtpTransport(rtp_dtls_transport, rtcp_dtls_transport);
+ } else {
+ if (flags & RAW_PACKET_TRANSPORT) {
+ return CreateUnencryptedTransport(rtp_packet_transport,
+ rtcp_packet_transport);
+ } else {
+ return CreateUnencryptedTransport(rtp_dtls_transport,
+ rtcp_dtls_transport);
+ }
+ }
+ }
+
+ // Unininitializes the channels on the network thread.
+ void DeinitChannels() {
+ if (!channel1_ && !channel2_)
+ return;
+ SendTask(network_thread_, [this]() {
+ if (channel1_) {
+ RTC_DCHECK_RUN_ON(channel1_->network_thread());
+ channel1_->SetRtpTransport(nullptr);
+ }
+ if (channel2_) {
+ RTC_DCHECK_RUN_ON(channel2_->network_thread());
+ channel2_->SetRtpTransport(nullptr);
+ }
+ });
+ }
+
+ std::unique_ptr<webrtc::RtpTransport> CreateUnencryptedTransport(
+ rtc::PacketTransportInternal* rtp_packet_transport,
+ rtc::PacketTransportInternal* rtcp_packet_transport) {
+ auto rtp_transport = std::make_unique<webrtc::RtpTransport>(
+ rtcp_packet_transport == nullptr);
+
+ SendTask(network_thread_,
+ [&rtp_transport, rtp_packet_transport, rtcp_packet_transport] {
+ rtp_transport->SetRtpPacketTransport(rtp_packet_transport);
+ if (rtcp_packet_transport) {
+ rtp_transport->SetRtcpPacketTransport(rtcp_packet_transport);
+ }
+ });
+ return rtp_transport;
+ }
+
+ std::unique_ptr<webrtc::DtlsSrtpTransport> CreateDtlsSrtpTransport(
+ cricket::DtlsTransportInternal* rtp_dtls_transport,
+ cricket::DtlsTransportInternal* rtcp_dtls_transport) {
+ auto dtls_srtp_transport = std::make_unique<webrtc::DtlsSrtpTransport>(
+ rtcp_dtls_transport == nullptr, field_trials_);
+
+ SendTask(network_thread_,
+ [&dtls_srtp_transport, rtp_dtls_transport, rtcp_dtls_transport] {
+ dtls_srtp_transport->SetDtlsTransports(rtp_dtls_transport,
+ rtcp_dtls_transport);
+ });
+ return dtls_srtp_transport;
+ }
+
+ void ConnectFakeTransports() {
+ SendTask(network_thread_, [this] {
+ bool asymmetric = false;
+ // Depending on test flags, could be using DTLS or raw packet transport.
+ if (fake_rtp_dtls_transport1_ && fake_rtp_dtls_transport2_) {
+ fake_rtp_dtls_transport1_->SetDestination(
+ fake_rtp_dtls_transport2_.get(), asymmetric);
+ }
+ if (fake_rtcp_dtls_transport1_ && fake_rtcp_dtls_transport2_) {
+ fake_rtcp_dtls_transport1_->SetDestination(
+ fake_rtcp_dtls_transport2_.get(), asymmetric);
+ }
+ if (fake_rtp_packet_transport1_ && fake_rtp_packet_transport2_) {
+ fake_rtp_packet_transport1_->SetDestination(
+ fake_rtp_packet_transport2_.get(), asymmetric);
+ }
+ if (fake_rtcp_packet_transport1_ && fake_rtcp_packet_transport2_) {
+ fake_rtcp_packet_transport1_->SetDestination(
+ fake_rtcp_packet_transport2_.get(), asymmetric);
+ }
+ });
+ // The transport becoming writable will asynchronously update the send state
+ // on the worker thread; since this test uses the main thread as the worker
+ // thread, we must process the message queue for this to occur.
+ WaitForThreads();
+ }
+
+ bool SendInitiate() {
+ std::string err;
+ bool result = channel1_->SetLocalContent(&local_media_content1_,
+ SdpType::kOffer, err);
+ if (result) {
+ channel1_->Enable(true);
+ FlushCurrentThread();
+ result = channel2_->SetRemoteContent(&remote_media_content1_,
+ SdpType::kOffer, err);
+ if (result) {
+ ConnectFakeTransports();
+ result = channel2_->SetLocalContent(&local_media_content2_,
+ SdpType::kAnswer, err);
+ }
+ }
+ return result;
+ }
+
+ bool SendAccept() {
+ channel2_->Enable(true);
+ FlushCurrentThread();
+ std::string err;
+ return channel1_->SetRemoteContent(&remote_media_content2_,
+ SdpType::kAnswer, err);
+ }
+
+ bool SendOffer() {
+ std::string err;
+ bool result = channel1_->SetLocalContent(&local_media_content1_,
+ SdpType::kOffer, err);
+ if (result) {
+ channel1_->Enable(true);
+ result = channel2_->SetRemoteContent(&remote_media_content1_,
+ SdpType::kOffer, err);
+ }
+ return result;
+ }
+
+ bool SendProvisionalAnswer() {
+ std::string err;
+ bool result = channel2_->SetLocalContent(&local_media_content2_,
+ SdpType::kPrAnswer, err);
+ if (result) {
+ channel2_->Enable(true);
+ result = channel1_->SetRemoteContent(&remote_media_content2_,
+ SdpType::kPrAnswer, err);
+ ConnectFakeTransports();
+ }
+ return result;
+ }
+
+ bool SendFinalAnswer() {
+ std::string err;
+ bool result = channel2_->SetLocalContent(&local_media_content2_,
+ SdpType::kAnswer, err);
+ if (result) {
+ result = channel1_->SetRemoteContent(&remote_media_content2_,
+ SdpType::kAnswer, err);
+ }
+ return result;
+ }
+
+ void SendRtp(typename T::MediaSendChannel* media_channel, rtc::Buffer data) {
+ network_thread_->PostTask(webrtc::SafeTask(
+ network_thread_safety_, [media_channel, data = std::move(data)]() {
+ media_channel->SendPacket(data.data(), data.size(),
+ rtc::PacketOptions());
+ }));
+ }
+
+ void SendRtp1() {
+ SendRtp1(rtc::Buffer(rtp_packet_.data(), rtp_packet_.size()));
+ }
+
+ void SendRtp1(rtc::Buffer data) {
+ SendRtp(media_send_channel1_impl(), std::move(data));
+ }
+
+ void SendRtp2() {
+ SendRtp2(rtc::Buffer(rtp_packet_.data(), rtp_packet_.size()));
+ }
+
+ void SendRtp2(rtc::Buffer data) {
+ SendRtp(media_send_channel2_impl(), std::move(data));
+ }
+
+ // Methods to send custom data.
+ void SendCustomRtp1(uint32_t ssrc, int sequence_number, int pl_type = -1) {
+ SendRtp1(CreateRtpData(ssrc, sequence_number, pl_type));
+ }
+ void SendCustomRtp2(uint32_t ssrc, int sequence_number, int pl_type = -1) {
+ SendRtp2(CreateRtpData(ssrc, sequence_number, pl_type));
+ }
+
+ bool CheckRtp1() {
+ return media_receive_channel1_impl()->CheckRtp(rtp_packet_.data(),
+ rtp_packet_.size());
+ }
+ bool CheckRtp2() {
+ return media_receive_channel2_impl()->CheckRtp(rtp_packet_.data(),
+ rtp_packet_.size());
+ }
+ // Methods to check custom data.
+ bool CheckCustomRtp1(uint32_t ssrc, int sequence_number, int pl_type = -1) {
+ rtc::Buffer data = CreateRtpData(ssrc, sequence_number, pl_type);
+ return media_receive_channel1_impl()->CheckRtp(data.data(), data.size());
+ }
+ bool CheckCustomRtp2(uint32_t ssrc, int sequence_number, int pl_type = -1) {
+ rtc::Buffer data = CreateRtpData(ssrc, sequence_number, pl_type);
+ return media_receive_channel2_impl()->CheckRtp(data.data(), data.size());
+ }
+ rtc::Buffer CreateRtpData(uint32_t ssrc, int sequence_number, int pl_type) {
+ rtc::Buffer data(rtp_packet_.data(), rtp_packet_.size());
+ // Set SSRC in the rtp packet copy.
+ rtc::SetBE32(data.data() + 8, ssrc);
+ rtc::SetBE16(data.data() + 2, sequence_number);
+ if (pl_type >= 0) {
+ rtc::Set8(data.data(), 1, static_cast<uint8_t>(pl_type));
+ }
+ return data;
+ }
+
+ bool CheckNoRtp1() { return media_send_channel1_impl()->CheckNoRtp(); }
+ bool CheckNoRtp2() { return media_send_channel2_impl()->CheckNoRtp(); }
+
+ void CreateContent(int flags,
+ const cricket::Codec& audio_codec,
+ const cricket::Codec& video_codec,
+ typename T::Content* content) {
+ // overridden in specialized classes
+ }
+ void CopyContent(const typename T::Content& source,
+ typename T::Content* content) {
+ // overridden in specialized classes
+ }
+
+ // Creates a MediaContent with one stream.
+ // kPcmuCodec is used as audio codec and kH264Codec is used as video codec.
+ typename T::Content* CreateMediaContentWithStream(uint32_t ssrc) {
+ typename T::Content* content = new typename T::Content();
+ CreateContent(0, kPcmuCodec, kH264Codec, content);
+ AddLegacyStreamInContent(ssrc, 0, content);
+ return content;
+ }
+
+ // Will manage the lifetime of a CallThread, making sure it's
+ // destroyed before this object goes out of scope.
+ class ScopedCallThread {
+ public:
+ explicit ScopedCallThread(absl::AnyInvocable<void() &&> functor)
+ : thread_(rtc::Thread::Create()) {
+ thread_->Start();
+ thread_->PostTask(std::move(functor));
+ }
+
+ ~ScopedCallThread() { thread_->Stop(); }
+
+ rtc::Thread* thread() { return thread_.get(); }
+
+ private:
+ std::unique_ptr<rtc::Thread> thread_;
+ };
+
+ cricket::CandidatePairInterface* last_selected_candidate_pair() {
+ return last_selected_candidate_pair_;
+ }
+
+ void AddLegacyStreamInContent(uint32_t ssrc,
+ int flags,
+ typename T::Content* content) {
+ // Base implementation.
+ }
+
+ // Utility method that calls BaseChannel::srtp_active() on the network thread
+ // and returns the result. The `srtp_active()` state is maintained on the
+ // network thread, which callers need to factor in.
+ bool IsSrtpActive(std::unique_ptr<typename T::Channel>& channel) {
+ RTC_DCHECK(channel.get());
+ bool result;
+ SendTask(network_thread_, [&] { result = channel->srtp_active(); });
+ return result;
+ }
+
+ // Returns true iff the transport is set for a channel and rtcp_mux_enabled()
+ // returns true.
+ bool IsRtcpMuxEnabled(std::unique_ptr<typename T::Channel>& channel) {
+ RTC_DCHECK(channel.get());
+ bool result;
+ SendTask(network_thread_, [&] {
+ result = channel->rtp_transport() &&
+ channel->rtp_transport()->rtcp_mux_enabled();
+ });
+ return result;
+ }
+
+ // Tests that can be used by derived classes.
+
+ // Basic sanity check.
+ void TestInit() {
+ CreateChannels(0, 0);
+ EXPECT_FALSE(IsSrtpActive(channel1_));
+ EXPECT_FALSE(media_send_channel1_impl()->sending());
+ if (verify_playout_) {
+ EXPECT_FALSE(media_receive_channel1_impl()->playout());
+ }
+ EXPECT_TRUE(media_send_channel1_impl()->send_codecs().empty());
+ EXPECT_TRUE(media_receive_channel1_impl()->recv_streams().empty());
+ EXPECT_TRUE(media_send_channel1_impl()->rtp_packets().empty());
+ // Basic sanity test for send and receive channel objects
+ EXPECT_EQ(channel1_->media_send_channel()->media_type(),
+ media_send_channel1_impl()->media_type());
+ EXPECT_EQ(channel1_->media_receive_channel()->media_type(),
+ media_receive_channel1_impl()->media_type());
+ EXPECT_EQ(channel1_->media_send_channel()->media_type(),
+ channel1_->media_receive_channel()->media_type());
+ }
+
+ // Test that SetLocalContent and SetRemoteContent properly configure
+ // the codecs.
+ void TestSetContents() {
+ CreateChannels(0, 0);
+ typename T::Content content;
+ CreateContent(0, kPcmuCodec, kH264Codec, &content);
+ std::string err;
+ EXPECT_TRUE(channel1_->SetLocalContent(&content, SdpType::kOffer, err));
+ EXPECT_EQ(0U, media_send_channel1_impl()->send_codecs().size());
+ EXPECT_TRUE(channel1_->SetRemoteContent(&content, SdpType::kAnswer, err));
+ ASSERT_EQ(1U, media_send_channel1_impl()->send_codecs().size());
+ EXPECT_EQ(content.codecs()[0],
+ media_send_channel1_impl()->send_codecs()[0]);
+ }
+
+ // Test that SetLocalContent and SetRemoteContent properly configure
+ // extmap-allow-mixed.
+ void TestSetContentsExtmapAllowMixedCaller(bool offer, bool answer) {
+ // For a caller, SetLocalContent() is called first with an offer and next
+ // SetRemoteContent() is called with the answer.
+ CreateChannels(0, 0);
+ typename T::Content content;
+ CreateContent(0, kPcmuCodec, kH264Codec, &content);
+ auto offer_enum = offer ? (T::Content::kSession) : (T::Content::kNo);
+ auto answer_enum = answer ? (T::Content::kSession) : (T::Content::kNo);
+ content.set_extmap_allow_mixed_enum(offer_enum);
+ std::string err;
+ EXPECT_TRUE(channel1_->SetLocalContent(&content, SdpType::kOffer, err));
+ content.set_extmap_allow_mixed_enum(answer_enum);
+ EXPECT_TRUE(channel1_->SetRemoteContent(&content, SdpType::kAnswer, err));
+ EXPECT_EQ(answer, media_send_channel1_impl()->ExtmapAllowMixed());
+ }
+ void TestSetContentsExtmapAllowMixedCallee(bool offer, bool answer) {
+ // For a callee, SetRemoteContent() is called first with an offer and next
+ // SetLocalContent() is called with the answer.
+ CreateChannels(0, 0);
+ typename T::Content content;
+ CreateContent(0, kPcmuCodec, kH264Codec, &content);
+ auto offer_enum = offer ? (T::Content::kSession) : (T::Content::kNo);
+ auto answer_enum = answer ? (T::Content::kSession) : (T::Content::kNo);
+ content.set_extmap_allow_mixed_enum(offer_enum);
+ std::string err;
+ EXPECT_TRUE(channel1_->SetRemoteContent(&content, SdpType::kOffer, err));
+ content.set_extmap_allow_mixed_enum(answer_enum);
+ EXPECT_TRUE(channel1_->SetLocalContent(&content, SdpType::kAnswer, err));
+ EXPECT_EQ(answer, media_send_channel1()->ExtmapAllowMixed());
+ }
+
+ // Test that SetLocalContent and SetRemoteContent properly deals
+ // with an empty offer.
+ void TestSetContentsNullOffer() {
+ CreateChannels(0, 0);
+ typename T::Content content;
+ std::string err;
+ EXPECT_TRUE(channel1_->SetLocalContent(&content, SdpType::kOffer, err));
+ CreateContent(0, kPcmuCodec, kH264Codec, &content);
+ EXPECT_EQ(0U, media_send_channel1_impl()->send_codecs().size());
+ EXPECT_TRUE(channel1_->SetRemoteContent(&content, SdpType::kAnswer, err));
+ ASSERT_EQ(1U, media_send_channel1_impl()->send_codecs().size());
+ EXPECT_EQ(content.codecs()[0],
+ media_send_channel1_impl()->send_codecs()[0]);
+ }
+
+ // Test that SetLocalContent and SetRemoteContent properly set RTCP
+ // mux.
+ void TestSetContentsRtcpMux() {
+ CreateChannels(0, 0);
+ typename T::Content content;
+ CreateContent(0, kPcmuCodec, kH264Codec, &content);
+ // Both sides agree on mux. Should no longer be a separate RTCP channel.
+ content.set_rtcp_mux(true);
+ std::string err;
+ EXPECT_TRUE(channel1_->SetLocalContent(&content, SdpType::kOffer, err));
+ EXPECT_TRUE(channel1_->SetRemoteContent(&content, SdpType::kAnswer, err));
+ // Only initiator supports mux. Should still have a separate RTCP channel.
+ EXPECT_TRUE(channel2_->SetLocalContent(&content, SdpType::kOffer, err));
+ content.set_rtcp_mux(false);
+ EXPECT_TRUE(channel2_->SetRemoteContent(&content, SdpType::kAnswer, err));
+ }
+
+ // Test that SetLocalContent and SetRemoteContent properly
+ // handles adding and removing StreamParams when the action is a full
+ // SdpType::kOffer / SdpType::kAnswer.
+ void TestChangeStreamParamsInContent() {
+ cricket::StreamParams stream1;
+ stream1.id = "stream1";
+ stream1.ssrcs.push_back(kSsrc1);
+ stream1.cname = "stream1_cname";
+
+ cricket::StreamParams stream2;
+ stream2.id = "stream2";
+ stream2.ssrcs.push_back(kSsrc2);
+ stream2.cname = "stream2_cname";
+
+ // Setup a call where channel 1 send `stream1` to channel 2.
+ CreateChannels(0, 0);
+ typename T::Content content1;
+ CreateContent(0, kPcmuCodec, kH264Codec, &content1);
+ content1.AddStream(stream1);
+ std::string err;
+ EXPECT_TRUE(channel1_->SetLocalContent(&content1, SdpType::kOffer, err));
+ channel1_->Enable(true);
+ EXPECT_EQ(1u, media_send_channel1_impl()->send_streams().size());
+
+ EXPECT_TRUE(channel2_->SetRemoteContent(&content1, SdpType::kOffer, err));
+ EXPECT_EQ(1u, media_receive_channel2_impl()->recv_streams().size());
+ ConnectFakeTransports();
+
+ // Channel 2 do not send anything.
+ typename T::Content content2;
+ CreateContent(0, kPcmuCodec, kH264Codec, &content2);
+ EXPECT_TRUE(channel1_->SetRemoteContent(&content2, SdpType::kAnswer, err));
+ EXPECT_EQ(0u, media_receive_channel1_impl()->recv_streams().size());
+ EXPECT_TRUE(channel2_->SetLocalContent(&content2, SdpType::kAnswer, err));
+ channel2_->Enable(true);
+ EXPECT_EQ(0u, media_send_channel2_impl()->send_streams().size());
+
+ SendCustomRtp1(kSsrc1, 0);
+ WaitForThreads();
+ EXPECT_TRUE(CheckCustomRtp2(kSsrc1, 0));
+
+ // Let channel 2 update the content by sending `stream2` and enable SRTP.
+ typename T::Content content3;
+ CreateContent(0, kPcmuCodec, kH264Codec, &content3);
+ content3.AddStream(stream2);
+ EXPECT_TRUE(channel2_->SetLocalContent(&content3, SdpType::kOffer, err));
+ ASSERT_EQ(1u, media_send_channel2_impl()->send_streams().size());
+ EXPECT_EQ(stream2, media_send_channel2_impl()->send_streams()[0]);
+
+ EXPECT_TRUE(channel1_->SetRemoteContent(&content3, SdpType::kOffer, err));
+ ASSERT_EQ(1u, media_receive_channel1_impl()->recv_streams().size());
+ EXPECT_EQ(stream2, media_receive_channel1_impl()->recv_streams()[0]);
+
+ // Channel 1 replies but stop sending stream1.
+ typename T::Content content4;
+ CreateContent(0, kPcmuCodec, kH264Codec, &content4);
+ EXPECT_TRUE(channel1_->SetLocalContent(&content4, SdpType::kAnswer, err));
+ EXPECT_EQ(0u, media_send_channel1_impl()->send_streams().size());
+
+ EXPECT_TRUE(channel2_->SetRemoteContent(&content4, SdpType::kAnswer, err));
+ EXPECT_EQ(0u, media_receive_channel2_impl()->recv_streams().size());
+
+ SendCustomRtp2(kSsrc2, 0);
+ WaitForThreads();
+ EXPECT_TRUE(CheckCustomRtp1(kSsrc2, 0));
+ }
+
+ // Test that we only start playout and sending at the right times.
+ void TestPlayoutAndSendingStates() {
+ CreateChannels(0, 0);
+ if (verify_playout_) {
+ EXPECT_FALSE(media_receive_channel1_impl()->playout());
+ }
+ EXPECT_FALSE(media_send_channel1_impl()->sending());
+ if (verify_playout_) {
+ EXPECT_FALSE(media_receive_channel2_impl()->playout());
+ }
+ EXPECT_FALSE(media_send_channel2_impl()->sending());
+ channel1_->Enable(true);
+ FlushCurrentThread();
+ if (verify_playout_) {
+ EXPECT_FALSE(media_receive_channel1_impl()->playout());
+ }
+ EXPECT_FALSE(media_send_channel1_impl()->sending());
+ std::string err;
+ EXPECT_TRUE(channel1_->SetLocalContent(&local_media_content1_,
+ SdpType::kOffer, err));
+ if (verify_playout_) {
+ EXPECT_TRUE(media_receive_channel1_impl()->playout());
+ }
+ EXPECT_FALSE(media_send_channel1_impl()->sending());
+ EXPECT_TRUE(channel2_->SetRemoteContent(&local_media_content1_,
+ SdpType::kOffer, err));
+ if (verify_playout_) {
+ EXPECT_FALSE(media_receive_channel2_impl()->playout());
+ }
+ EXPECT_FALSE(media_send_channel2_impl()->sending());
+ EXPECT_TRUE(channel2_->SetLocalContent(&local_media_content2_,
+ SdpType::kAnswer, err));
+ if (verify_playout_) {
+ EXPECT_FALSE(media_receive_channel2_impl()->playout());
+ }
+ EXPECT_FALSE(media_send_channel2_impl()->sending());
+ ConnectFakeTransports();
+ if (verify_playout_) {
+ EXPECT_TRUE(media_receive_channel1_impl()->playout());
+ }
+ EXPECT_FALSE(media_send_channel1_impl()->sending());
+ if (verify_playout_) {
+ EXPECT_FALSE(media_receive_channel2_impl()->playout());
+ }
+ EXPECT_FALSE(media_send_channel2_impl()->sending());
+ channel2_->Enable(true);
+ FlushCurrentThread();
+ if (verify_playout_) {
+ EXPECT_TRUE(media_receive_channel2_impl()->playout());
+ }
+ EXPECT_TRUE(media_send_channel2_impl()->sending());
+ EXPECT_TRUE(channel1_->SetRemoteContent(&local_media_content2_,
+ SdpType::kAnswer, err));
+ if (verify_playout_) {
+ EXPECT_TRUE(media_receive_channel1_impl()->playout());
+ }
+ EXPECT_TRUE(media_send_channel1_impl()->sending());
+ }
+
+ // Test that changing the MediaContentDirection in the local and remote
+ // session description start playout and sending at the right time.
+ void TestMediaContentDirection() {
+ CreateChannels(0, 0);
+ typename T::Content content1;
+ CreateContent(0, kPcmuCodec, kH264Codec, &content1);
+ typename T::Content content2;
+ CreateContent(0, kPcmuCodec, kH264Codec, &content2);
+ // Set `content2` to be InActive.
+ content2.set_direction(RtpTransceiverDirection::kInactive);
+
+ channel1_->Enable(true);
+ channel2_->Enable(true);
+ FlushCurrentThread();
+ if (verify_playout_) {
+ EXPECT_FALSE(media_receive_channel1_impl()->playout());
+ }
+ EXPECT_FALSE(media_send_channel1_impl()->sending());
+ if (verify_playout_) {
+ EXPECT_FALSE(media_receive_channel2_impl()->playout());
+ }
+ EXPECT_FALSE(media_send_channel2_impl()->sending());
+
+ std::string err;
+ EXPECT_TRUE(channel1_->SetLocalContent(&content1, SdpType::kOffer, err));
+ EXPECT_TRUE(channel2_->SetRemoteContent(&content1, SdpType::kOffer, err));
+ EXPECT_TRUE(channel2_->SetLocalContent(&content2, SdpType::kPrAnswer, err));
+ EXPECT_TRUE(
+ channel1_->SetRemoteContent(&content2, SdpType::kPrAnswer, err));
+ ConnectFakeTransports();
+
+ if (verify_playout_) {
+ EXPECT_TRUE(media_receive_channel1_impl()->playout());
+ }
+ EXPECT_FALSE(media_send_channel1_impl()->sending()); // remote InActive
+ if (verify_playout_) {
+ EXPECT_FALSE(media_receive_channel2_impl()->playout()); // local InActive
+ }
+ EXPECT_FALSE(media_send_channel2_impl()->sending()); // local InActive
+
+ // Update `content2` to be RecvOnly.
+ content2.set_direction(RtpTransceiverDirection::kRecvOnly);
+ EXPECT_TRUE(channel2_->SetLocalContent(&content2, SdpType::kPrAnswer, err));
+ EXPECT_TRUE(
+ channel1_->SetRemoteContent(&content2, SdpType::kPrAnswer, err));
+
+ if (verify_playout_) {
+ EXPECT_TRUE(media_receive_channel1_impl()->playout());
+ }
+ EXPECT_TRUE(media_send_channel1_impl()->sending());
+ if (verify_playout_) {
+ EXPECT_TRUE(media_receive_channel2_impl()->playout()); // local RecvOnly
+ }
+ EXPECT_FALSE(media_send_channel2_impl()->sending()); // local RecvOnly
+
+ // Update `content2` to be SendRecv.
+ content2.set_direction(RtpTransceiverDirection::kSendRecv);
+ EXPECT_TRUE(channel2_->SetLocalContent(&content2, SdpType::kAnswer, err));
+ EXPECT_TRUE(channel1_->SetRemoteContent(&content2, SdpType::kAnswer, err));
+
+ if (verify_playout_) {
+ EXPECT_TRUE(media_receive_channel1_impl()->playout());
+ }
+ EXPECT_TRUE(media_send_channel1_impl()->sending());
+ if (verify_playout_) {
+ EXPECT_TRUE(media_receive_channel2_impl()->playout());
+ }
+ EXPECT_TRUE(media_send_channel2_impl()->sending());
+
+ // Update `content2` to be inactive on the receiver while sending at the
+ // sender.
+ content2.set_direction(RtpTransceiverDirection::kInactive);
+ EXPECT_TRUE(channel1_->SetLocalContent(&content1, SdpType::kOffer, err));
+ EXPECT_TRUE(channel2_->SetRemoteContent(&content1, SdpType::kOffer, err));
+ EXPECT_TRUE(channel2_->SetLocalContent(&content2, SdpType::kAnswer, err));
+ content2.set_direction(RtpTransceiverDirection::kRecvOnly);
+ EXPECT_TRUE(channel1_->SetRemoteContent(&content2, SdpType::kAnswer, err));
+ if (verify_playout_) {
+ EXPECT_FALSE(media_receive_channel2_impl()->playout());
+ }
+ EXPECT_TRUE(media_send_channel1_impl()->sending());
+
+ // Re-enable `content2`.
+ content2.set_direction(RtpTransceiverDirection::kSendRecv);
+ EXPECT_TRUE(channel1_->SetLocalContent(&content1, SdpType::kOffer, err));
+ EXPECT_TRUE(channel2_->SetRemoteContent(&content1, SdpType::kOffer, err));
+ EXPECT_TRUE(channel2_->SetLocalContent(&content2, SdpType::kAnswer, err));
+ EXPECT_TRUE(channel1_->SetRemoteContent(&content2, SdpType::kAnswer, err));
+ if (verify_playout_) {
+ EXPECT_TRUE(media_receive_channel2_impl()->playout());
+ }
+ EXPECT_TRUE(media_send_channel1_impl()->sending());
+ }
+
+ // Tests that when the transport channel signals a candidate pair change
+ // event, the media channel will receive a call on the network route change.
+ void TestNetworkRouteChanges() {
+ static constexpr uint16_t kLocalNetId = 1;
+ static constexpr uint16_t kRemoteNetId = 2;
+ static constexpr int kLastPacketId = 100;
+ // Ipv4(20) + UDP(8).
+ static constexpr int kTransportOverheadPerPacket = 28;
+ static constexpr int kSrtpOverheadPerPacket = 10;
+
+ CreateChannels(DTLS, DTLS);
+ SendInitiate();
+
+ typename T::MediaSendChannel* media_send_channel1_impl =
+ this->media_send_channel1_impl();
+ ASSERT_TRUE(media_send_channel1_impl);
+
+ // Need to wait for the threads before calling
+ // `set_num_network_route_changes` because the network route would be set
+ // when creating the channel.
+ WaitForThreads();
+ media_send_channel1_impl->set_num_network_route_changes(0);
+ SendTask(network_thread_, [this] {
+ rtc::NetworkRoute network_route;
+ // The transport channel becomes disconnected.
+ fake_rtp_dtls_transport1_->ice_transport()->SignalNetworkRouteChanged(
+ absl::optional<rtc::NetworkRoute>(network_route));
+ });
+ WaitForThreads();
+ EXPECT_EQ(1, media_send_channel1_impl->num_network_route_changes());
+ EXPECT_FALSE(media_send_channel1_impl->last_network_route().connected);
+ media_send_channel1_impl->set_num_network_route_changes(0);
+
+ SendTask(network_thread_, [this] {
+ rtc::NetworkRoute network_route;
+ network_route.connected = true;
+ network_route.local =
+ rtc::RouteEndpoint::CreateWithNetworkId(kLocalNetId);
+ network_route.remote =
+ rtc::RouteEndpoint::CreateWithNetworkId(kRemoteNetId);
+ network_route.last_sent_packet_id = kLastPacketId;
+ network_route.packet_overhead = kTransportOverheadPerPacket;
+ // The transport channel becomes connected.
+ fake_rtp_dtls_transport1_->ice_transport()->SignalNetworkRouteChanged(
+
+ absl::optional<rtc::NetworkRoute>(network_route));
+ });
+ WaitForThreads();
+ EXPECT_EQ(1, media_send_channel1_impl->num_network_route_changes());
+ EXPECT_TRUE(media_send_channel1_impl->last_network_route().connected);
+ EXPECT_EQ(
+ kLocalNetId,
+ media_send_channel1_impl->last_network_route().local.network_id());
+ EXPECT_EQ(
+ kRemoteNetId,
+ media_send_channel1_impl->last_network_route().remote.network_id());
+ EXPECT_EQ(
+ kLastPacketId,
+ media_send_channel1_impl->last_network_route().last_sent_packet_id);
+ EXPECT_EQ(kTransportOverheadPerPacket + kSrtpOverheadPerPacket,
+ media_send_channel1_impl->transport_overhead_per_packet());
+ }
+
+ // Test setting up a call.
+ void TestCallSetup() {
+ CreateChannels(0, 0);
+ EXPECT_FALSE(IsSrtpActive(channel1_));
+ EXPECT_TRUE(SendInitiate());
+ if (verify_playout_) {
+ EXPECT_TRUE(media_receive_channel1_impl()->playout());
+ }
+ EXPECT_FALSE(media_send_channel1_impl()->sending());
+ EXPECT_TRUE(SendAccept());
+ EXPECT_FALSE(IsSrtpActive(channel1_));
+ EXPECT_TRUE(media_send_channel1_impl()->sending());
+ EXPECT_EQ(1U, media_send_channel1_impl()->send_codecs().size());
+ if (verify_playout_) {
+ EXPECT_TRUE(media_receive_channel2_impl()->playout());
+ }
+ EXPECT_TRUE(media_send_channel2_impl()->sending());
+ EXPECT_EQ(1U, media_send_channel2_impl()->send_codecs().size());
+ }
+
+ // Send voice RTP data to the other side and ensure it gets there.
+ void SendRtpToRtp() {
+ CreateChannels(RTCP_MUX, RTCP_MUX);
+ EXPECT_TRUE(SendInitiate());
+ EXPECT_TRUE(SendAccept());
+ EXPECT_TRUE(IsRtcpMuxEnabled(channel1_));
+ EXPECT_TRUE(IsRtcpMuxEnabled(channel2_));
+ SendRtp1();
+ SendRtp2();
+ WaitForThreads();
+ EXPECT_TRUE(CheckRtp1());
+ EXPECT_TRUE(CheckRtp2());
+ EXPECT_TRUE(CheckNoRtp1());
+ EXPECT_TRUE(CheckNoRtp2());
+ }
+
+ void TestDeinit() {
+ CreateChannels(0, 0);
+ EXPECT_TRUE(SendInitiate());
+ EXPECT_TRUE(SendAccept());
+ SendRtp1();
+ SendRtp2();
+
+ DeinitChannels();
+
+ // Do not wait, destroy channels.
+ channel1_.reset(nullptr);
+ channel2_.reset(nullptr);
+ }
+
+ void SendDtlsSrtpToDtlsSrtp(int flags1, int flags2) {
+ CreateChannels(flags1 | DTLS, flags2 | DTLS);
+ EXPECT_FALSE(IsSrtpActive(channel1_));
+ EXPECT_FALSE(IsSrtpActive(channel2_));
+ EXPECT_TRUE(SendInitiate());
+ WaitForThreads();
+ EXPECT_TRUE(SendAccept());
+ EXPECT_TRUE(IsSrtpActive(channel1_));
+ EXPECT_TRUE(IsSrtpActive(channel2_));
+ SendRtp1();
+ SendRtp2();
+ WaitForThreads();
+ EXPECT_TRUE(CheckRtp1());
+ EXPECT_TRUE(CheckRtp2());
+ EXPECT_TRUE(CheckNoRtp1());
+ EXPECT_TRUE(CheckNoRtp2());
+ }
+
+ // Test that we can send and receive early media when a provisional answer is
+ // sent and received. The test uses SRTP, RTCP mux and SSRC mux.
+ void SendEarlyMediaUsingRtcpMuxSrtp() {
+ int sequence_number1_1 = 0, sequence_number2_2 = 0;
+
+ CreateChannels(SSRC_MUX | RTCP_MUX | DTLS, SSRC_MUX | RTCP_MUX | DTLS);
+ EXPECT_TRUE(SendOffer());
+ EXPECT_TRUE(SendProvisionalAnswer());
+ EXPECT_TRUE(IsSrtpActive(channel1_));
+ EXPECT_TRUE(IsSrtpActive(channel2_));
+ EXPECT_TRUE(IsRtcpMuxEnabled(channel1_));
+ EXPECT_TRUE(IsRtcpMuxEnabled(channel2_));
+ WaitForThreads(); // Wait for 'sending' flag go through network thread.
+ SendCustomRtp1(kSsrc1, ++sequence_number1_1);
+ WaitForThreads();
+ EXPECT_TRUE(CheckCustomRtp2(kSsrc1, sequence_number1_1));
+
+ // Send packets from callee and verify that it is received.
+ SendCustomRtp2(kSsrc2, ++sequence_number2_2);
+ WaitForThreads();
+ EXPECT_TRUE(CheckCustomRtp1(kSsrc2, sequence_number2_2));
+
+ // Complete call setup and ensure everything is still OK.
+ EXPECT_TRUE(SendFinalAnswer());
+ EXPECT_TRUE(IsSrtpActive(channel1_));
+ EXPECT_TRUE(IsSrtpActive(channel2_));
+ SendCustomRtp1(kSsrc1, ++sequence_number1_1);
+ SendCustomRtp2(kSsrc2, ++sequence_number2_2);
+ WaitForThreads();
+ EXPECT_TRUE(CheckCustomRtp2(kSsrc1, sequence_number1_1));
+ EXPECT_TRUE(CheckCustomRtp1(kSsrc2, sequence_number2_2));
+ }
+
+ // Test that we properly send RTP without SRTP from a thread.
+ void SendRtpToRtpOnThread() {
+ CreateChannels(0, 0);
+ EXPECT_TRUE(SendInitiate());
+ EXPECT_TRUE(SendAccept());
+ ScopedCallThread send_rtp1([this] { SendRtp1(); });
+ ScopedCallThread send_rtp2([this] { SendRtp2(); });
+ rtc::Thread* involved_threads[] = {send_rtp1.thread(), send_rtp2.thread()};
+ WaitForThreads(involved_threads);
+ EXPECT_TRUE(CheckRtp1());
+ EXPECT_TRUE(CheckRtp2());
+ EXPECT_TRUE(CheckNoRtp1());
+ EXPECT_TRUE(CheckNoRtp2());
+ }
+
+ // Test that the mediachannel retains its sending state after the transport
+ // becomes non-writable.
+ void SendWithWritabilityLoss() {
+ CreateChannels(RTCP_MUX, RTCP_MUX);
+ EXPECT_TRUE(SendInitiate());
+ EXPECT_TRUE(SendAccept());
+ EXPECT_TRUE(IsRtcpMuxEnabled(channel1_));
+ EXPECT_TRUE(IsRtcpMuxEnabled(channel2_));
+ SendRtp1();
+ SendRtp2();
+ WaitForThreads();
+ EXPECT_TRUE(CheckRtp1());
+ EXPECT_TRUE(CheckRtp2());
+ EXPECT_TRUE(CheckNoRtp1());
+ EXPECT_TRUE(CheckNoRtp2());
+
+ // Lose writability, which should fail.
+ SendTask(network_thread_,
+ [this] { fake_rtp_dtls_transport1_->SetWritable(false); });
+ SendRtp1();
+ SendRtp2();
+ WaitForThreads();
+ EXPECT_TRUE(CheckRtp1());
+ EXPECT_TRUE(CheckNoRtp2());
+
+ // Regain writability
+ SendTask(network_thread_,
+ [this] { fake_rtp_dtls_transport1_->SetWritable(true); });
+ EXPECT_TRUE(media_send_channel1_impl()->sending());
+ SendRtp1();
+ SendRtp2();
+ WaitForThreads();
+ EXPECT_TRUE(CheckRtp1());
+ EXPECT_TRUE(CheckRtp2());
+ EXPECT_TRUE(CheckNoRtp1());
+ EXPECT_TRUE(CheckNoRtp2());
+
+ // Lose writability completely
+ SendTask(network_thread_, [this] {
+ bool asymmetric = true;
+ fake_rtp_dtls_transport1_->SetDestination(nullptr, asymmetric);
+ });
+ EXPECT_TRUE(media_send_channel1_impl()->sending());
+
+ // Should fail also.
+ SendRtp1();
+ SendRtp2();
+ WaitForThreads();
+ EXPECT_TRUE(CheckRtp1());
+ EXPECT_TRUE(CheckNoRtp2());
+ EXPECT_TRUE(CheckNoRtp1());
+
+ // Gain writability back
+ SendTask(network_thread_, [this] {
+ bool asymmetric = true;
+ fake_rtp_dtls_transport1_->SetDestination(fake_rtp_dtls_transport2_.get(),
+ asymmetric);
+ });
+ EXPECT_TRUE(media_send_channel1_impl()->sending());
+ SendRtp1();
+ SendRtp2();
+ WaitForThreads();
+ EXPECT_TRUE(CheckRtp1());
+ EXPECT_TRUE(CheckRtp2());
+ EXPECT_TRUE(CheckNoRtp1());
+ EXPECT_TRUE(CheckNoRtp2());
+ }
+
+ void SendBundleToBundle(const int* pl_types,
+ int len,
+ bool rtcp_mux,
+ bool secure) {
+ ASSERT_EQ(2, len);
+ int sequence_number1_1 = 0, sequence_number2_2 = 0;
+ // Only pl_type1 was added to the bundle filter for both `channel1_`
+ // and `channel2_`.
+ int pl_type1 = pl_types[0];
+ int pl_type2 = pl_types[1];
+ int flags = SSRC_MUX;
+ if (secure)
+ flags |= DTLS;
+ if (rtcp_mux) {
+ flags |= RTCP_MUX;
+ }
+ CreateChannels(flags, flags);
+ EXPECT_TRUE(SendInitiate());
+ EXPECT_TRUE(SendAccept());
+
+ // Both channels can receive pl_type1 only.
+ SendCustomRtp1(kSsrc1, ++sequence_number1_1, pl_type1);
+ SendCustomRtp2(kSsrc2, ++sequence_number2_2, pl_type1);
+ WaitForThreads();
+ EXPECT_TRUE(CheckCustomRtp2(kSsrc1, sequence_number1_1, pl_type1));
+ EXPECT_TRUE(CheckCustomRtp1(kSsrc2, sequence_number2_2, pl_type1));
+ EXPECT_TRUE(CheckNoRtp1());
+ EXPECT_TRUE(CheckNoRtp2());
+
+ SendCustomRtp1(kSsrc3, ++sequence_number1_1, pl_type2);
+ SendCustomRtp2(kSsrc4, ++sequence_number2_2, pl_type2);
+ WaitForThreads();
+ EXPECT_FALSE(CheckCustomRtp2(kSsrc3, sequence_number1_1, pl_type2));
+ EXPECT_FALSE(CheckCustomRtp1(kSsrc4, sequence_number2_2, pl_type2));
+ }
+
+ void TestSetContentFailure() {
+ CreateChannels(0, 0);
+
+ std::string err;
+ std::unique_ptr<typename T::Content> content(
+ CreateMediaContentWithStream(1));
+
+ media_receive_channel1_impl()->set_fail_set_recv_codecs(true);
+ EXPECT_FALSE(
+ channel1_->SetLocalContent(content.get(), SdpType::kOffer, err));
+ EXPECT_FALSE(
+ channel1_->SetLocalContent(content.get(), SdpType::kAnswer, err));
+
+ media_send_channel1_impl()->set_fail_set_send_codecs(true);
+ EXPECT_FALSE(
+ channel1_->SetRemoteContent(content.get(), SdpType::kOffer, err));
+
+ media_send_channel1_impl()->set_fail_set_send_codecs(true);
+ EXPECT_FALSE(
+ channel1_->SetRemoteContent(content.get(), SdpType::kAnswer, err));
+ }
+
+ void TestSendTwoOffers() {
+ CreateChannels(0, 0);
+
+ std::string err;
+ std::unique_ptr<typename T::Content> content1(
+ CreateMediaContentWithStream(1));
+ EXPECT_TRUE(
+ channel1_->SetLocalContent(content1.get(), SdpType::kOffer, err));
+ EXPECT_TRUE(media_send_channel1_impl()->HasSendStream(1));
+
+ std::unique_ptr<typename T::Content> content2(
+ CreateMediaContentWithStream(2));
+ EXPECT_TRUE(
+ channel1_->SetLocalContent(content2.get(), SdpType::kOffer, err));
+ EXPECT_FALSE(media_send_channel1_impl()->HasSendStream(1));
+ EXPECT_TRUE(media_send_channel1_impl()->HasSendStream(2));
+ }
+
+ void TestReceiveTwoOffers() {
+ CreateChannels(0, 0);
+
+ std::string err;
+ std::unique_ptr<typename T::Content> content1(
+ CreateMediaContentWithStream(1));
+ EXPECT_TRUE(
+ channel1_->SetRemoteContent(content1.get(), SdpType::kOffer, err));
+ EXPECT_TRUE(media_receive_channel1_impl()->HasRecvStream(1));
+
+ std::unique_ptr<typename T::Content> content2(
+ CreateMediaContentWithStream(2));
+ EXPECT_TRUE(
+ channel1_->SetRemoteContent(content2.get(), SdpType::kOffer, err));
+ EXPECT_FALSE(media_receive_channel1_impl()->HasRecvStream(1));
+ EXPECT_TRUE(media_receive_channel1_impl()->HasRecvStream(2));
+ }
+
+ void TestSendPrAnswer() {
+ CreateChannels(0, 0);
+
+ std::string err;
+ // Receive offer
+ std::unique_ptr<typename T::Content> content1(
+ CreateMediaContentWithStream(1));
+ EXPECT_TRUE(
+ channel1_->SetRemoteContent(content1.get(), SdpType::kOffer, err));
+ EXPECT_TRUE(media_receive_channel1_impl()->HasRecvStream(1));
+
+ // Send PR answer
+ std::unique_ptr<typename T::Content> content2(
+ CreateMediaContentWithStream(2));
+ EXPECT_TRUE(
+ channel1_->SetLocalContent(content2.get(), SdpType::kPrAnswer, err));
+ EXPECT_TRUE(media_receive_channel1_impl()->HasRecvStream(1));
+ EXPECT_TRUE(media_send_channel1_impl()->HasSendStream(2));
+
+ // Send answer
+ std::unique_ptr<typename T::Content> content3(
+ CreateMediaContentWithStream(3));
+ EXPECT_TRUE(
+ channel1_->SetLocalContent(content3.get(), SdpType::kAnswer, err));
+ EXPECT_TRUE(media_receive_channel1_impl()->HasRecvStream(1));
+ EXPECT_FALSE(media_send_channel1_impl()->HasSendStream(2));
+ EXPECT_TRUE(media_send_channel1_impl()->HasSendStream(3));
+ }
+
+ void TestReceivePrAnswer() {
+ CreateChannels(0, 0);
+
+ std::string err;
+ // Send offer
+ std::unique_ptr<typename T::Content> content1(
+ CreateMediaContentWithStream(1));
+ EXPECT_TRUE(
+ channel1_->SetLocalContent(content1.get(), SdpType::kOffer, err));
+ EXPECT_TRUE(media_send_channel1_impl()->HasSendStream(1));
+
+ // Receive PR answer
+ std::unique_ptr<typename T::Content> content2(
+ CreateMediaContentWithStream(2));
+ EXPECT_TRUE(
+ channel1_->SetRemoteContent(content2.get(), SdpType::kPrAnswer, err));
+ EXPECT_TRUE(media_send_channel1_impl()->HasSendStream(1));
+ EXPECT_TRUE(media_receive_channel1_impl()->HasRecvStream(2));
+
+ // Receive answer
+ std::unique_ptr<typename T::Content> content3(
+ CreateMediaContentWithStream(3));
+ EXPECT_TRUE(
+ channel1_->SetRemoteContent(content3.get(), SdpType::kAnswer, err));
+ EXPECT_TRUE(media_send_channel1_impl()->HasSendStream(1));
+ EXPECT_FALSE(media_receive_channel1_impl()->HasRecvStream(2));
+ EXPECT_TRUE(media_receive_channel1_impl()->HasRecvStream(3));
+ }
+
+ void TestOnTransportReadyToSend() {
+ CreateChannels(0, 0);
+ EXPECT_FALSE(media_send_channel1_impl()->ready_to_send());
+
+ network_thread_->PostTask(
+ [this] { channel1_->OnTransportReadyToSend(true); });
+ WaitForThreads();
+ EXPECT_TRUE(media_send_channel1_impl()->ready_to_send());
+
+ network_thread_->PostTask(
+ [this] { channel1_->OnTransportReadyToSend(false); });
+ WaitForThreads();
+ EXPECT_FALSE(media_send_channel1_impl()->ready_to_send());
+ }
+
+ bool SetRemoteContentWithBitrateLimit(int remote_limit) {
+ typename T::Content content;
+ CreateContent(0, kPcmuCodec, kH264Codec, &content);
+ content.set_bandwidth(remote_limit);
+ return channel1_->SetRemoteContent(&content, SdpType::kOffer, NULL);
+ }
+
+ webrtc::RtpParameters BitrateLimitedParameters(absl::optional<int> limit) {
+ webrtc::RtpParameters parameters;
+ webrtc::RtpEncodingParameters encoding;
+ encoding.max_bitrate_bps = limit;
+ parameters.encodings.push_back(encoding);
+ return parameters;
+ }
+
+ void VerifyMaxBitrate(const webrtc::RtpParameters& parameters,
+ absl::optional<int> expected_bitrate) {
+ EXPECT_EQ(1UL, parameters.encodings.size());
+ EXPECT_EQ(expected_bitrate, parameters.encodings[0].max_bitrate_bps);
+ }
+
+ void DefaultMaxBitrateIsUnlimited() {
+ CreateChannels(0, 0);
+ std::string err;
+ EXPECT_TRUE(channel1_->SetLocalContent(&local_media_content1_,
+ SdpType::kOffer, err));
+ EXPECT_EQ(media_send_channel1_impl()->max_bps(), -1);
+ VerifyMaxBitrate(media_send_channel1()->GetRtpSendParameters(kSsrc1),
+ absl::nullopt);
+ }
+
+ // Test that when a channel gets new RtpTransport with a call to
+ // `SetRtpTransport`, the socket options from the old RtpTransport is merged
+ // with the options on the new one.
+
+ // For example, audio and video may use separate socket options, but initially
+ // be unbundled, then later become bundled. When this happens, their preferred
+ // socket options should be merged to the underlying transport they share.
+ void SocketOptionsMergedOnSetTransport() {
+ constexpr int kSndBufSize = 4000;
+ constexpr int kRcvBufSize = 8000;
+
+ CreateChannels(DTLS, DTLS);
+
+ new_rtp_transport_ = CreateDtlsSrtpTransport(
+ fake_rtp_dtls_transport2_.get(), fake_rtcp_dtls_transport2_.get());
+
+ bool rcv_success, send_success;
+ int rcv_buf, send_buf;
+ SendTask(network_thread_, [&] {
+ channel1_->SetOption(cricket::BaseChannel::ST_RTP,
+ rtc::Socket::Option::OPT_SNDBUF, kSndBufSize);
+ channel2_->SetOption(cricket::BaseChannel::ST_RTP,
+ rtc::Socket::Option::OPT_RCVBUF, kRcvBufSize);
+ channel1_->SetRtpTransport(new_rtp_transport_.get());
+ send_success = fake_rtp_dtls_transport2_->GetOption(
+ rtc::Socket::Option::OPT_SNDBUF, &send_buf);
+ rcv_success = fake_rtp_dtls_transport2_->GetOption(
+ rtc::Socket::Option::OPT_RCVBUF, &rcv_buf);
+ });
+
+ ASSERT_TRUE(send_success);
+ EXPECT_EQ(kSndBufSize, send_buf);
+ ASSERT_TRUE(rcv_success);
+ EXPECT_EQ(kRcvBufSize, rcv_buf);
+ }
+
+ void CreateSimulcastContent(const std::vector<std::string>& rids,
+ typename T::Content* content) {
+ std::vector<RidDescription> rid_descriptions;
+ for (const std::string& name : rids) {
+ rid_descriptions.push_back(RidDescription(name, RidDirection::kSend));
+ }
+
+ StreamParams stream;
+ stream.set_rids(rid_descriptions);
+ CreateContent(0, kPcmuCodec, kH264Codec, content);
+ // This is for unified plan, so there can be only one StreamParams.
+ content->mutable_streams().clear();
+ content->AddStream(stream);
+ }
+
+ void VerifySimulcastStreamParams(const StreamParams& expected,
+ const typename T::Channel* channel) {
+ const std::vector<StreamParams>& streams = channel->local_streams();
+ ASSERT_EQ(1u, streams.size());
+ const StreamParams& result = streams[0];
+ EXPECT_EQ(expected.rids(), result.rids());
+ EXPECT_TRUE(result.has_ssrcs());
+ EXPECT_EQ(expected.rids().size() * 2, result.ssrcs.size());
+ std::vector<uint32_t> primary_ssrcs;
+ result.GetPrimarySsrcs(&primary_ssrcs);
+ EXPECT_EQ(expected.rids().size(), primary_ssrcs.size());
+ }
+
+ void TestUpdateLocalStreamsWithSimulcast() {
+ CreateChannels(0, 0);
+ typename T::Content content1, content2, content3;
+ CreateSimulcastContent({"f", "h", "q"}, &content1);
+ std::string err;
+ EXPECT_TRUE(channel1_->SetLocalContent(&content1, SdpType::kOffer, err));
+ VerifySimulcastStreamParams(content1.streams()[0], channel1_.get());
+ StreamParams stream1 = channel1_->local_streams()[0];
+
+ // Create a similar offer. SetLocalContent should not remove and add.
+ CreateSimulcastContent({"f", "h", "q"}, &content2);
+ EXPECT_TRUE(channel1_->SetLocalContent(&content2, SdpType::kOffer, err));
+ VerifySimulcastStreamParams(content2.streams()[0], channel1_.get());
+ StreamParams stream2 = channel1_->local_streams()[0];
+ // Check that the streams are identical (SSRCs didn't change).
+ EXPECT_EQ(stream1, stream2);
+
+ // Create third offer that has same RIDs in different order.
+ CreateSimulcastContent({"f", "q", "h"}, &content3);
+ EXPECT_TRUE(channel1_->SetLocalContent(&content3, SdpType::kOffer, err));
+ VerifySimulcastStreamParams(content3.streams()[0], channel1_.get());
+ }
+
+ protected:
+ void WaitForThreads() { WaitForThreads(rtc::ArrayView<rtc::Thread*>()); }
+ static void ProcessThreadQueue(rtc::Thread* thread) {
+ RTC_DCHECK(thread->IsCurrent());
+ while (!thread->empty()) {
+ thread->ProcessMessages(0);
+ }
+ }
+ static void FlushCurrentThread() {
+ rtc::Thread::Current()->ProcessMessages(0);
+ }
+ void WaitForThreads(rtc::ArrayView<rtc::Thread*> threads) {
+ // `threads` and current thread post packets to network thread.
+ for (rtc::Thread* thread : threads) {
+ SendTask(thread, [thread] { ProcessThreadQueue(thread); });
+ }
+ ProcessThreadQueue(rtc::Thread::Current());
+ // Network thread move them around and post back to worker = current thread.
+ if (!network_thread_->IsCurrent()) {
+ SendTask(network_thread_,
+ [this] { ProcessThreadQueue(network_thread_); });
+ }
+ // Worker thread = current Thread process received messages.
+ ProcessThreadQueue(rtc::Thread::Current());
+ }
+
+ // Accessors that return the standard VideoMedia{Send|Receive}ChannelInterface
+ typename T::MediaSendChannelInterface* media_send_channel1() {
+ return channel1_->media_send_channel();
+ }
+ typename T::MediaSendChannelInterface* media_send_channel2() {
+ return channel2_->media_send_channel();
+ }
+ typename T::MediaReceiveChannelInterface* media_receive_channel1() {
+ return channel1_->media_receive_channel();
+ }
+ typename T::MediaReceiveChannelInterface* media_receive_channel2() {
+ return channel2_->media_receive_channel();
+ }
+
+ // Accessors that return the FakeMedia<type>SendChannel object.
+ // Note that these depend on getting the object back that was
+ // passed to the channel constructor.
+ // T::MediaSendChannel is either FakeVoiceMediaSendChannel or
+ // FakeVideoMediaSendChannel.
+ typename T::MediaSendChannel* media_send_channel1_impl() {
+ RTC_DCHECK(channel1_);
+ return static_cast<typename T::MediaSendChannel*>(
+ channel1_->media_send_channel());
+ }
+
+ typename T::MediaSendChannel* media_send_channel2_impl() {
+ RTC_DCHECK(channel2_);
+ RTC_DCHECK(channel2_->media_send_channel());
+ return static_cast<typename T::MediaSendChannel*>(
+ channel2_->media_send_channel());
+ }
+ typename T::MediaReceiveChannel* media_receive_channel1_impl() {
+ RTC_DCHECK(channel1_);
+ RTC_DCHECK(channel1_->media_receive_channel());
+ return static_cast<typename T::MediaReceiveChannel*>(
+ channel1_->media_receive_channel());
+ }
+
+ typename T::MediaReceiveChannel* media_receive_channel2_impl() {
+ RTC_DCHECK(channel2_);
+ RTC_DCHECK(channel2_->media_receive_channel());
+ return static_cast<typename T::MediaReceiveChannel*>(
+ channel2_->media_receive_channel());
+ }
+
+ rtc::AutoThread main_thread_;
+ // TODO(pbos): Remove playout from all media channels and let renderers mute
+ // themselves.
+ const bool verify_playout_;
+ rtc::scoped_refptr<webrtc::PendingTaskSafetyFlag> network_thread_safety_ =
+ webrtc::PendingTaskSafetyFlag::CreateDetached();
+ std::unique_ptr<rtc::Thread> network_thread_keeper_;
+ rtc::Thread* network_thread_;
+ std::unique_ptr<cricket::FakeDtlsTransport> fake_rtp_dtls_transport1_;
+ std::unique_ptr<cricket::FakeDtlsTransport> fake_rtcp_dtls_transport1_;
+ std::unique_ptr<cricket::FakeDtlsTransport> fake_rtp_dtls_transport2_;
+ std::unique_ptr<cricket::FakeDtlsTransport> fake_rtcp_dtls_transport2_;
+ std::unique_ptr<rtc::FakePacketTransport> fake_rtp_packet_transport1_;
+ std::unique_ptr<rtc::FakePacketTransport> fake_rtcp_packet_transport1_;
+ std::unique_ptr<rtc::FakePacketTransport> fake_rtp_packet_transport2_;
+ std::unique_ptr<rtc::FakePacketTransport> fake_rtcp_packet_transport2_;
+ std::unique_ptr<webrtc::RtpTransportInternal> rtp_transport1_;
+ std::unique_ptr<webrtc::RtpTransportInternal> rtp_transport2_;
+ std::unique_ptr<webrtc::RtpTransportInternal> new_rtp_transport_;
+ cricket::FakeMediaEngine media_engine_;
+ std::unique_ptr<typename T::Channel> channel1_;
+ std::unique_ptr<typename T::Channel> channel2_;
+ typename T::Content local_media_content1_;
+ typename T::Content local_media_content2_;
+ typename T::Content remote_media_content1_;
+ typename T::Content remote_media_content2_;
+ // The RTP and RTCP packets to send in the tests.
+ rtc::Buffer rtp_packet_;
+ rtc::Buffer rtcp_packet_;
+ cricket::CandidatePairInterface* last_selected_candidate_pair_;
+ rtc::UniqueRandomIdGenerator ssrc_generator_;
+ webrtc::test::ScopedKeyValueConfig field_trials_;
+};
+
+template <>
+std::unique_ptr<cricket::VoiceChannel> ChannelTest<VoiceTraits>::CreateChannel(
+ rtc::Thread* worker_thread,
+ rtc::Thread* network_thread,
+ std::unique_ptr<cricket::FakeVoiceMediaSendChannel> send_ch,
+ std::unique_ptr<cricket::FakeVoiceMediaReceiveChannel> receive_ch,
+ webrtc::RtpTransportInternal* rtp_transport,
+ int flags) {
+ rtc::Thread* signaling_thread = rtc::Thread::Current();
+ auto channel = std::make_unique<cricket::VoiceChannel>(
+ worker_thread, network_thread, signaling_thread, std::move(send_ch),
+ std::move(receive_ch), cricket::CN_AUDIO, (flags & DTLS) != 0,
+ webrtc::CryptoOptions(), &ssrc_generator_);
+ SendTask(network_thread, [&]() {
+ RTC_DCHECK_RUN_ON(channel->network_thread());
+ channel->SetRtpTransport(rtp_transport);
+ });
+ return channel;
+}
+
+template <>
+void ChannelTest<VoiceTraits>::CreateContent(
+ int flags,
+ const cricket::Codec& audio_codec,
+ const cricket::Codec& video_codec,
+ cricket::AudioContentDescription* audio) {
+ audio->AddCodec(audio_codec);
+ audio->set_rtcp_mux((flags & RTCP_MUX) != 0);
+}
+
+template <>
+void ChannelTest<VoiceTraits>::CopyContent(
+ const cricket::AudioContentDescription& source,
+ cricket::AudioContentDescription* audio) {
+ *audio = source;
+}
+
+template <>
+void ChannelTest<VoiceTraits>::AddLegacyStreamInContent(
+ uint32_t ssrc,
+ int flags,
+ cricket::AudioContentDescription* audio) {
+ audio->AddLegacyStream(ssrc);
+}
+
+class VoiceChannelSingleThreadTest : public ChannelTest<VoiceTraits> {
+ public:
+ typedef ChannelTest<VoiceTraits> Base;
+ VoiceChannelSingleThreadTest()
+ : Base(true, kPcmuFrame, kRtcpReport, NetworkIsWorker::Yes) {}
+};
+
+class VoiceChannelDoubleThreadTest : public ChannelTest<VoiceTraits> {
+ public:
+ typedef ChannelTest<VoiceTraits> Base;
+ VoiceChannelDoubleThreadTest()
+ : Base(true, kPcmuFrame, kRtcpReport, NetworkIsWorker::No) {}
+};
+
+class VoiceChannelWithEncryptedRtpHeaderExtensionsSingleThreadTest
+ : public ChannelTest<VoiceTraits> {
+ public:
+ typedef ChannelTest<VoiceTraits> Base;
+ VoiceChannelWithEncryptedRtpHeaderExtensionsSingleThreadTest()
+ : Base(true,
+ kPcmuFrameWithExtensions,
+ kRtcpReport,
+ NetworkIsWorker::Yes) {}
+};
+
+class VoiceChannelWithEncryptedRtpHeaderExtensionsDoubleThreadTest
+ : public ChannelTest<VoiceTraits> {
+ public:
+ typedef ChannelTest<VoiceTraits> Base;
+ VoiceChannelWithEncryptedRtpHeaderExtensionsDoubleThreadTest()
+ : Base(true, kPcmuFrameWithExtensions, kRtcpReport, NetworkIsWorker::No) {
+ }
+};
+
+// override to add NULL parameter
+template <>
+std::unique_ptr<cricket::VideoChannel> ChannelTest<VideoTraits>::CreateChannel(
+ rtc::Thread* worker_thread,
+ rtc::Thread* network_thread,
+ std::unique_ptr<cricket::FakeVideoMediaSendChannel> send_ch,
+ std::unique_ptr<cricket::FakeVideoMediaReceiveChannel> receive_ch,
+ webrtc::RtpTransportInternal* rtp_transport,
+ int flags) {
+ rtc::Thread* signaling_thread = rtc::Thread::Current();
+ auto channel = std::make_unique<cricket::VideoChannel>(
+ worker_thread, network_thread, signaling_thread, std::move(send_ch),
+ std::move(receive_ch), cricket::CN_VIDEO, (flags & DTLS) != 0,
+ webrtc::CryptoOptions(), &ssrc_generator_);
+ SendTask(network_thread, [&]() {
+ RTC_DCHECK_RUN_ON(channel->network_thread());
+ channel->SetRtpTransport(rtp_transport);
+ });
+ return channel;
+}
+
+template <>
+void ChannelTest<VideoTraits>::CreateContent(
+ int flags,
+ const cricket::Codec& audio_codec,
+ const cricket::Codec& video_codec,
+ cricket::VideoContentDescription* video) {
+ video->AddCodec(video_codec);
+ video->set_rtcp_mux((flags & RTCP_MUX) != 0);
+}
+
+template <>
+void ChannelTest<VideoTraits>::CopyContent(
+ const cricket::VideoContentDescription& source,
+ cricket::VideoContentDescription* video) {
+ *video = source;
+}
+
+template <>
+void ChannelTest<VideoTraits>::AddLegacyStreamInContent(
+ uint32_t ssrc,
+ int flags,
+ cricket::VideoContentDescription* video) {
+ video->AddLegacyStream(ssrc);
+}
+
+class VideoChannelSingleThreadTest : public ChannelTest<VideoTraits> {
+ public:
+ typedef ChannelTest<VideoTraits> Base;
+ VideoChannelSingleThreadTest()
+ : Base(false, kH264Packet, kRtcpReport, NetworkIsWorker::Yes) {}
+};
+
+class VideoChannelDoubleThreadTest : public ChannelTest<VideoTraits> {
+ public:
+ typedef ChannelTest<VideoTraits> Base;
+ VideoChannelDoubleThreadTest()
+ : Base(false, kH264Packet, kRtcpReport, NetworkIsWorker::No) {}
+};
+
+TEST_F(VoiceChannelSingleThreadTest, TestInit) {
+ Base::TestInit();
+ EXPECT_FALSE(media_send_channel1_impl()->IsStreamMuted(0));
+ EXPECT_TRUE(media_send_channel1_impl()->dtmf_info_queue().empty());
+}
+
+TEST_F(VoiceChannelSingleThreadTest, TestDeinit) {
+ Base::TestDeinit();
+}
+
+TEST_F(VoiceChannelSingleThreadTest, TestSetContents) {
+ Base::TestSetContents();
+}
+
+TEST_F(VoiceChannelSingleThreadTest, TestSetContentsExtmapAllowMixedAsCaller) {
+ Base::TestSetContentsExtmapAllowMixedCaller(/*offer=*/true, /*answer=*/true);
+}
+
+TEST_F(VoiceChannelSingleThreadTest,
+ TestSetContentsExtmapAllowMixedNotSupportedAsCaller) {
+ Base::TestSetContentsExtmapAllowMixedCaller(/*offer=*/true, /*answer=*/false);
+}
+
+TEST_F(VoiceChannelSingleThreadTest, TestSetContentsExtmapAllowMixedAsCallee) {
+ Base::TestSetContentsExtmapAllowMixedCallee(/*offer=*/true, /*answer=*/true);
+}
+
+TEST_F(VoiceChannelSingleThreadTest,
+ TestSetContentsExtmapAllowMixedNotSupportedAsCallee) {
+ Base::TestSetContentsExtmapAllowMixedCallee(/*offer=*/true, /*answer=*/false);
+}
+
+TEST_F(VoiceChannelSingleThreadTest, TestSetContentsNullOffer) {
+ Base::TestSetContentsNullOffer();
+}
+
+TEST_F(VoiceChannelSingleThreadTest, TestSetContentsRtcpMux) {
+ Base::TestSetContentsRtcpMux();
+}
+
+TEST_F(VoiceChannelSingleThreadTest, TestSetContentsRtcpMuxWithPrAnswer) {
+ Base::TestSetContentsRtcpMux();
+}
+
+TEST_F(VoiceChannelSingleThreadTest, TestChangeStreamParamsInContent) {
+ Base::TestChangeStreamParamsInContent();
+}
+
+TEST_F(VoiceChannelSingleThreadTest, TestPlayoutAndSendingStates) {
+ Base::TestPlayoutAndSendingStates();
+}
+
+TEST_F(VoiceChannelSingleThreadTest, TestMediaContentDirection) {
+ Base::TestMediaContentDirection();
+}
+
+TEST_F(VoiceChannelSingleThreadTest, TestNetworkRouteChanges) {
+ Base::TestNetworkRouteChanges();
+}
+
+TEST_F(VoiceChannelSingleThreadTest, TestCallSetup) {
+ Base::TestCallSetup();
+}
+
+TEST_F(VoiceChannelSingleThreadTest, SendRtpToRtp) {
+ Base::SendRtpToRtp();
+}
+
+TEST_F(VoiceChannelSingleThreadTest, SendDtlsSrtpToDtlsSrtp) {
+ Base::SendDtlsSrtpToDtlsSrtp(0, 0);
+}
+
+TEST_F(VoiceChannelSingleThreadTest, SendDtlsSrtpToDtlsSrtpRtcpMux) {
+ Base::SendDtlsSrtpToDtlsSrtp(RTCP_MUX, RTCP_MUX);
+}
+
+TEST_F(VoiceChannelSingleThreadTest, SendEarlyMediaUsingRtcpMuxSrtp) {
+ Base::SendEarlyMediaUsingRtcpMuxSrtp();
+}
+
+TEST_F(VoiceChannelSingleThreadTest, SendRtpToRtpOnThread) {
+ Base::SendRtpToRtpOnThread();
+}
+
+TEST_F(VoiceChannelSingleThreadTest, SendWithWritabilityLoss) {
+ Base::SendWithWritabilityLoss();
+}
+
+TEST_F(VoiceChannelSingleThreadTest, TestSetContentFailure) {
+ Base::TestSetContentFailure();
+}
+
+TEST_F(VoiceChannelSingleThreadTest, TestSendTwoOffers) {
+ Base::TestSendTwoOffers();
+}
+
+TEST_F(VoiceChannelSingleThreadTest, TestReceiveTwoOffers) {
+ Base::TestReceiveTwoOffers();
+}
+
+TEST_F(VoiceChannelSingleThreadTest, TestSendPrAnswer) {
+ Base::TestSendPrAnswer();
+}
+
+TEST_F(VoiceChannelSingleThreadTest, TestReceivePrAnswer) {
+ Base::TestReceivePrAnswer();
+}
+
+TEST_F(VoiceChannelSingleThreadTest, TestOnTransportReadyToSend) {
+ Base::TestOnTransportReadyToSend();
+}
+
+TEST_F(VoiceChannelSingleThreadTest, SendBundleToBundle) {
+ Base::SendBundleToBundle(kAudioPts, arraysize(kAudioPts), false, false);
+}
+
+TEST_F(VoiceChannelSingleThreadTest, SendBundleToBundleSecure) {
+ Base::SendBundleToBundle(kAudioPts, arraysize(kAudioPts), false, true);
+}
+
+TEST_F(VoiceChannelSingleThreadTest, SendBundleToBundleWithRtcpMux) {
+ Base::SendBundleToBundle(kAudioPts, arraysize(kAudioPts), true, false);
+}
+
+TEST_F(VoiceChannelSingleThreadTest, SendBundleToBundleWithRtcpMuxSecure) {
+ Base::SendBundleToBundle(kAudioPts, arraysize(kAudioPts), true, true);
+}
+
+TEST_F(VoiceChannelSingleThreadTest, DefaultMaxBitrateIsUnlimited) {
+ Base::DefaultMaxBitrateIsUnlimited();
+}
+
+TEST_F(VoiceChannelSingleThreadTest, SocketOptionsMergedOnSetTransport) {
+ Base::SocketOptionsMergedOnSetTransport();
+}
+
+// VoiceChannelDoubleThreadTest
+TEST_F(VoiceChannelDoubleThreadTest, TestInit) {
+ Base::TestInit();
+ EXPECT_FALSE(media_send_channel1_impl()->IsStreamMuted(0));
+ EXPECT_TRUE(media_send_channel1_impl()->dtmf_info_queue().empty());
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, TestDeinit) {
+ Base::TestDeinit();
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, TestSetContents) {
+ Base::TestSetContents();
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, TestSetContentsExtmapAllowMixedAsCaller) {
+ Base::TestSetContentsExtmapAllowMixedCaller(/*offer=*/true, /*answer=*/true);
+}
+
+TEST_F(VoiceChannelDoubleThreadTest,
+ TestSetContentsExtmapAllowMixedNotSupportedAsCaller) {
+ Base::TestSetContentsExtmapAllowMixedCaller(/*offer=*/true, /*answer=*/false);
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, TestSetContentsExtmapAllowMixedAsCallee) {
+ Base::TestSetContentsExtmapAllowMixedCallee(/*offer=*/true, /*answer=*/true);
+}
+
+TEST_F(VoiceChannelDoubleThreadTest,
+ TestSetContentsExtmapAllowMixedNotSupportedAsCallee) {
+ Base::TestSetContentsExtmapAllowMixedCallee(/*offer=*/true, /*answer=*/false);
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, TestSetContentsNullOffer) {
+ Base::TestSetContentsNullOffer();
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, TestSetContentsRtcpMux) {
+ Base::TestSetContentsRtcpMux();
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, TestSetContentsRtcpMuxWithPrAnswer) {
+ Base::TestSetContentsRtcpMux();
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, TestChangeStreamParamsInContent) {
+ Base::TestChangeStreamParamsInContent();
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, TestPlayoutAndSendingStates) {
+ Base::TestPlayoutAndSendingStates();
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, TestMediaContentDirection) {
+ Base::TestMediaContentDirection();
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, TestNetworkRouteChanges) {
+ Base::TestNetworkRouteChanges();
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, TestCallSetup) {
+ Base::TestCallSetup();
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, SendRtpToRtp) {
+ Base::SendRtpToRtp();
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, SendDtlsSrtpToDtlsSrtp) {
+ Base::SendDtlsSrtpToDtlsSrtp(0, 0);
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, SendDtlsSrtpToDtlsSrtpRtcpMux) {
+ Base::SendDtlsSrtpToDtlsSrtp(RTCP_MUX, RTCP_MUX);
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, SendEarlyMediaUsingRtcpMuxSrtp) {
+ Base::SendEarlyMediaUsingRtcpMuxSrtp();
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, SendRtpToRtpOnThread) {
+ Base::SendRtpToRtpOnThread();
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, SendWithWritabilityLoss) {
+ Base::SendWithWritabilityLoss();
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, TestSetContentFailure) {
+ Base::TestSetContentFailure();
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, TestSendTwoOffers) {
+ Base::TestSendTwoOffers();
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, TestReceiveTwoOffers) {
+ Base::TestReceiveTwoOffers();
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, TestSendPrAnswer) {
+ Base::TestSendPrAnswer();
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, TestReceivePrAnswer) {
+ Base::TestReceivePrAnswer();
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, TestOnTransportReadyToSend) {
+ Base::TestOnTransportReadyToSend();
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, SendBundleToBundle) {
+ Base::SendBundleToBundle(kAudioPts, arraysize(kAudioPts), false, false);
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, SendBundleToBundleSecure) {
+ Base::SendBundleToBundle(kAudioPts, arraysize(kAudioPts), false, true);
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, SendBundleToBundleWithRtcpMux) {
+ Base::SendBundleToBundle(kAudioPts, arraysize(kAudioPts), true, false);
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, SendBundleToBundleWithRtcpMuxSecure) {
+ Base::SendBundleToBundle(kAudioPts, arraysize(kAudioPts), true, true);
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, DefaultMaxBitrateIsUnlimited) {
+ Base::DefaultMaxBitrateIsUnlimited();
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, SocketOptionsMergedOnSetTransport) {
+ Base::SocketOptionsMergedOnSetTransport();
+}
+
+// VideoChannelSingleThreadTest
+TEST_F(VideoChannelSingleThreadTest, TestInit) {
+ Base::TestInit();
+}
+
+TEST_F(VideoChannelSingleThreadTest, TestDeinit) {
+ Base::TestDeinit();
+}
+
+TEST_F(VideoChannelSingleThreadTest, TestSetContents) {
+ Base::TestSetContents();
+}
+
+TEST_F(VideoChannelSingleThreadTest, TestSetContentsExtmapAllowMixedAsCaller) {
+ Base::TestSetContentsExtmapAllowMixedCaller(/*offer=*/true, /*answer=*/true);
+}
+
+TEST_F(VideoChannelSingleThreadTest,
+ TestSetContentsExtmapAllowMixedNotSupportedAsCaller) {
+ Base::TestSetContentsExtmapAllowMixedCaller(/*offer=*/true, /*answer=*/false);
+}
+
+TEST_F(VideoChannelSingleThreadTest, TestSetContentsExtmapAllowMixedAsCallee) {
+ Base::TestSetContentsExtmapAllowMixedCallee(/*offer=*/true, /*answer=*/true);
+}
+
+TEST_F(VideoChannelSingleThreadTest,
+ TestSetContentsExtmapAllowMixedNotSupportedAsCallee) {
+ Base::TestSetContentsExtmapAllowMixedCallee(/*offer=*/true, /*answer=*/false);
+}
+
+TEST_F(VideoChannelSingleThreadTest, TestSetContentsNullOffer) {
+ Base::TestSetContentsNullOffer();
+}
+
+TEST_F(VideoChannelSingleThreadTest, TestSetContentsRtcpMux) {
+ Base::TestSetContentsRtcpMux();
+}
+
+TEST_F(VideoChannelSingleThreadTest, TestSetContentsRtcpMuxWithPrAnswer) {
+ Base::TestSetContentsRtcpMux();
+}
+
+TEST_F(VideoChannelSingleThreadTest, TestChangeStreamParamsInContent) {
+ Base::TestChangeStreamParamsInContent();
+}
+
+TEST_F(VideoChannelSingleThreadTest, TestPlayoutAndSendingStates) {
+ Base::TestPlayoutAndSendingStates();
+}
+
+TEST_F(VideoChannelSingleThreadTest, TestMediaContentDirection) {
+ Base::TestMediaContentDirection();
+}
+
+TEST_F(VideoChannelSingleThreadTest, TestNetworkRouteChanges) {
+ Base::TestNetworkRouteChanges();
+}
+
+TEST_F(VideoChannelSingleThreadTest, TestCallSetup) {
+ Base::TestCallSetup();
+}
+
+TEST_F(VideoChannelSingleThreadTest, SendRtpToRtp) {
+ Base::SendRtpToRtp();
+}
+
+TEST_F(VideoChannelSingleThreadTest, SendDtlsSrtpToDtlsSrtp) {
+ Base::SendDtlsSrtpToDtlsSrtp(0, 0);
+}
+
+TEST_F(VideoChannelSingleThreadTest, SendDtlsSrtpToDtlsSrtpRtcpMux) {
+ Base::SendDtlsSrtpToDtlsSrtp(RTCP_MUX, RTCP_MUX);
+}
+
+TEST_F(VideoChannelSingleThreadTest, SendEarlyMediaUsingRtcpMuxSrtp) {
+ Base::SendEarlyMediaUsingRtcpMuxSrtp();
+}
+
+TEST_F(VideoChannelSingleThreadTest, SendRtpToRtpOnThread) {
+ Base::SendRtpToRtpOnThread();
+}
+
+TEST_F(VideoChannelSingleThreadTest, SendWithWritabilityLoss) {
+ Base::SendWithWritabilityLoss();
+}
+
+TEST_F(VideoChannelSingleThreadTest, TestSetContentFailure) {
+ Base::TestSetContentFailure();
+}
+
+TEST_F(VideoChannelSingleThreadTest, TestSendTwoOffers) {
+ Base::TestSendTwoOffers();
+}
+
+TEST_F(VideoChannelSingleThreadTest, TestReceiveTwoOffers) {
+ Base::TestReceiveTwoOffers();
+}
+
+TEST_F(VideoChannelSingleThreadTest, TestSendPrAnswer) {
+ Base::TestSendPrAnswer();
+}
+
+TEST_F(VideoChannelSingleThreadTest, TestReceivePrAnswer) {
+ Base::TestReceivePrAnswer();
+}
+
+TEST_F(VideoChannelSingleThreadTest, SendBundleToBundle) {
+ Base::SendBundleToBundle(kVideoPts, arraysize(kVideoPts), false, false);
+}
+
+TEST_F(VideoChannelSingleThreadTest, SendBundleToBundleSecure) {
+ Base::SendBundleToBundle(kVideoPts, arraysize(kVideoPts), false, true);
+}
+
+TEST_F(VideoChannelSingleThreadTest, SendBundleToBundleWithRtcpMux) {
+ Base::SendBundleToBundle(kVideoPts, arraysize(kVideoPts), true, false);
+}
+
+TEST_F(VideoChannelSingleThreadTest, SendBundleToBundleWithRtcpMuxSecure) {
+ Base::SendBundleToBundle(kVideoPts, arraysize(kVideoPts), true, true);
+}
+
+TEST_F(VideoChannelSingleThreadTest, TestOnTransportReadyToSend) {
+ Base::TestOnTransportReadyToSend();
+}
+
+TEST_F(VideoChannelSingleThreadTest, DefaultMaxBitrateIsUnlimited) {
+ Base::DefaultMaxBitrateIsUnlimited();
+}
+
+TEST_F(VideoChannelSingleThreadTest, SocketOptionsMergedOnSetTransport) {
+ Base::SocketOptionsMergedOnSetTransport();
+}
+
+TEST_F(VideoChannelSingleThreadTest, UpdateLocalStreamsWithSimulcast) {
+ Base::TestUpdateLocalStreamsWithSimulcast();
+}
+
+TEST_F(VideoChannelSingleThreadTest, TestSetLocalOfferWithPacketization) {
+ const cricket::VideoCodec kVp8Codec = cricket::CreateVideoCodec(97, "VP8");
+ cricket::VideoCodec vp9_codec = cricket::CreateVideoCodec(98, "VP9");
+ vp9_codec.packetization = cricket::kPacketizationParamRaw;
+ cricket::VideoContentDescription video;
+ video.set_codecs({kVp8Codec, vp9_codec});
+
+ CreateChannels(0, 0);
+
+ std::string err;
+ EXPECT_TRUE(channel1_->SetLocalContent(&video, SdpType::kOffer, err));
+ EXPECT_THAT(media_send_channel1_impl()->send_codecs(), testing::IsEmpty());
+ ASSERT_THAT(media_receive_channel1_impl()->recv_codecs(), testing::SizeIs(2));
+ EXPECT_TRUE(media_receive_channel1_impl()->recv_codecs()[0].Matches(
+ kVp8Codec, &field_trials_));
+ EXPECT_EQ(media_receive_channel1_impl()->recv_codecs()[0].packetization,
+ absl::nullopt);
+ EXPECT_TRUE(media_receive_channel1_impl()->recv_codecs()[1].Matches(
+ vp9_codec, &field_trials_));
+ EXPECT_EQ(media_receive_channel1_impl()->recv_codecs()[1].packetization,
+ cricket::kPacketizationParamRaw);
+}
+
+TEST_F(VideoChannelSingleThreadTest, TestSetRemoteOfferWithPacketization) {
+ const cricket::VideoCodec kVp8Codec = cricket::CreateVideoCodec(97, "VP8");
+ cricket::VideoCodec vp9_codec = cricket::CreateVideoCodec(98, "VP9");
+ vp9_codec.packetization = cricket::kPacketizationParamRaw;
+ cricket::VideoContentDescription video;
+ video.set_codecs({kVp8Codec, vp9_codec});
+
+ CreateChannels(0, 0);
+
+ std::string err;
+ EXPECT_TRUE(channel1_->SetRemoteContent(&video, SdpType::kOffer, err));
+ EXPECT_TRUE(err.empty());
+ EXPECT_THAT(media_receive_channel1_impl()->recv_codecs(), testing::IsEmpty());
+ ASSERT_THAT(media_send_channel1_impl()->send_codecs(), testing::SizeIs(2));
+ EXPECT_TRUE(media_send_channel1_impl()->send_codecs()[0].Matches(
+ kVp8Codec, &field_trials_));
+ EXPECT_EQ(media_send_channel1_impl()->send_codecs()[0].packetization,
+ absl::nullopt);
+ EXPECT_TRUE(media_send_channel1_impl()->send_codecs()[1].Matches(
+ vp9_codec, &field_trials_));
+ EXPECT_EQ(media_send_channel1_impl()->send_codecs()[1].packetization,
+ cricket::kPacketizationParamRaw);
+}
+
+TEST_F(VideoChannelSingleThreadTest, TestSetAnswerWithPacketization) {
+ const cricket::VideoCodec kVp8Codec = cricket::CreateVideoCodec(97, "VP8");
+ cricket::VideoCodec vp9_codec = cricket::CreateVideoCodec(98, "VP9");
+ vp9_codec.packetization = cricket::kPacketizationParamRaw;
+ cricket::VideoContentDescription video;
+ video.set_codecs({kVp8Codec, vp9_codec});
+
+ CreateChannels(0, 0);
+
+ std::string err;
+ EXPECT_TRUE(channel1_->SetLocalContent(&video, SdpType::kOffer, err));
+ EXPECT_TRUE(err.empty());
+ EXPECT_TRUE(channel1_->SetRemoteContent(&video, SdpType::kAnswer, err));
+ EXPECT_TRUE(err.empty());
+ ASSERT_THAT(media_receive_channel1_impl()->recv_codecs(), testing::SizeIs(2));
+ EXPECT_TRUE(media_receive_channel1_impl()->recv_codecs()[0].Matches(
+ kVp8Codec, &field_trials_));
+ EXPECT_EQ(media_receive_channel1_impl()->recv_codecs()[0].packetization,
+ absl::nullopt);
+ EXPECT_TRUE(media_receive_channel1_impl()->recv_codecs()[1].Matches(
+ vp9_codec, &field_trials_));
+ EXPECT_EQ(media_receive_channel1_impl()->recv_codecs()[1].packetization,
+ cricket::kPacketizationParamRaw);
+ EXPECT_THAT(media_send_channel1_impl()->send_codecs(), testing::SizeIs(2));
+ EXPECT_TRUE(media_send_channel1_impl()->send_codecs()[0].Matches(
+ kVp8Codec, &field_trials_));
+ EXPECT_EQ(media_send_channel1_impl()->send_codecs()[0].packetization,
+ absl::nullopt);
+ EXPECT_TRUE(media_send_channel1_impl()->send_codecs()[1].Matches(
+ vp9_codec, &field_trials_));
+ EXPECT_EQ(media_send_channel1_impl()->send_codecs()[1].packetization,
+ cricket::kPacketizationParamRaw);
+}
+
+TEST_F(VideoChannelSingleThreadTest, TestSetLocalAnswerWithoutPacketization) {
+ const cricket::VideoCodec kLocalCodec = cricket::CreateVideoCodec(98, "VP8");
+ cricket::VideoCodec remote_codec = cricket::CreateVideoCodec(99, "VP8");
+ remote_codec.packetization = cricket::kPacketizationParamRaw;
+ cricket::VideoContentDescription local_video;
+ local_video.set_codecs({kLocalCodec});
+ cricket::VideoContentDescription remote_video;
+ remote_video.set_codecs({remote_codec});
+
+ CreateChannels(0, 0);
+
+ std::string err;
+ EXPECT_TRUE(channel1_->SetRemoteContent(&remote_video, SdpType::kOffer, err));
+ EXPECT_TRUE(channel1_->SetLocalContent(&local_video, SdpType::kAnswer, err));
+ ASSERT_THAT(media_receive_channel1_impl()->recv_codecs(), testing::SizeIs(1));
+ EXPECT_EQ(media_receive_channel1_impl()->recv_codecs()[0].packetization,
+ absl::nullopt);
+ ASSERT_THAT(media_send_channel1_impl()->send_codecs(), testing::SizeIs(1));
+ EXPECT_EQ(media_send_channel1_impl()->send_codecs()[0].packetization,
+ absl::nullopt);
+}
+
+TEST_F(VideoChannelSingleThreadTest, TestSetRemoteAnswerWithoutPacketization) {
+ cricket::VideoCodec local_codec = cricket::CreateVideoCodec(98, "VP8");
+ local_codec.packetization = cricket::kPacketizationParamRaw;
+ const cricket::VideoCodec kRemoteCodec = cricket::CreateVideoCodec(99, "VP8");
+ cricket::VideoContentDescription local_video;
+ local_video.set_codecs({local_codec});
+ cricket::VideoContentDescription remote_video;
+ remote_video.set_codecs({kRemoteCodec});
+
+ CreateChannels(0, 0);
+
+ std::string err;
+ EXPECT_TRUE(channel1_->SetLocalContent(&local_video, SdpType::kOffer, err));
+ EXPECT_TRUE(
+ channel1_->SetRemoteContent(&remote_video, SdpType::kAnswer, err));
+ ASSERT_THAT(media_receive_channel1_impl()->recv_codecs(), testing::SizeIs(1));
+ EXPECT_EQ(media_receive_channel1_impl()->recv_codecs()[0].packetization,
+ absl::nullopt);
+ ASSERT_THAT(media_send_channel1_impl()->send_codecs(), testing::SizeIs(1));
+ EXPECT_EQ(media_send_channel1_impl()->send_codecs()[0].packetization,
+ absl::nullopt);
+}
+
+TEST_F(VideoChannelSingleThreadTest,
+ TestSetRemoteAnswerWithInvalidPacketization) {
+ cricket::VideoCodec local_codec = cricket::CreateVideoCodec(98, "VP8");
+ local_codec.packetization = cricket::kPacketizationParamRaw;
+ cricket::VideoCodec remote_codec = cricket::CreateVideoCodec(99, "VP8");
+ remote_codec.packetization = "unknownpacketizationattributevalue";
+ cricket::VideoContentDescription local_video;
+ local_video.set_codecs({local_codec});
+ cricket::VideoContentDescription remote_video;
+ remote_video.set_codecs({remote_codec});
+
+ CreateChannels(0, 0);
+
+ std::string err;
+ EXPECT_TRUE(channel1_->SetLocalContent(&local_video, SdpType::kOffer, err));
+ EXPECT_TRUE(err.empty());
+ EXPECT_FALSE(
+ channel1_->SetRemoteContent(&remote_video, SdpType::kAnswer, err));
+ EXPECT_FALSE(err.empty());
+ ASSERT_THAT(media_receive_channel1_impl()->recv_codecs(), testing::SizeIs(1));
+ EXPECT_EQ(media_receive_channel1_impl()->recv_codecs()[0].packetization,
+ cricket::kPacketizationParamRaw);
+ EXPECT_THAT(media_send_channel1_impl()->send_codecs(), testing::IsEmpty());
+}
+
+TEST_F(VideoChannelSingleThreadTest,
+ TestSetLocalAnswerWithInvalidPacketization) {
+ cricket::VideoCodec local_codec = cricket::CreateVideoCodec(98, "VP8");
+ local_codec.packetization = cricket::kPacketizationParamRaw;
+ const cricket::VideoCodec kRemoteCodec = cricket::CreateVideoCodec(99, "VP8");
+ cricket::VideoContentDescription local_video;
+ local_video.set_codecs({local_codec});
+ cricket::VideoContentDescription remote_video;
+ remote_video.set_codecs({kRemoteCodec});
+
+ CreateChannels(0, 0);
+
+ std::string err;
+ EXPECT_TRUE(channel1_->SetRemoteContent(&remote_video, SdpType::kOffer, err));
+ EXPECT_TRUE(err.empty());
+ EXPECT_FALSE(channel1_->SetLocalContent(&local_video, SdpType::kAnswer, err));
+ EXPECT_FALSE(err.empty());
+ EXPECT_THAT(media_receive_channel1_impl()->recv_codecs(), testing::IsEmpty());
+ ASSERT_THAT(media_send_channel1_impl()->send_codecs(), testing::SizeIs(1));
+ EXPECT_EQ(media_send_channel1_impl()->send_codecs()[0].packetization,
+ absl::nullopt);
+}
+
+TEST_F(VideoChannelSingleThreadTest,
+ StopsPacketizationVerificationWhenMatchIsFoundInRemoteAnswer) {
+ cricket::VideoCodec vp8_foo = cricket::CreateVideoCodec(96, "VP8");
+ vp8_foo.packetization = "foo";
+ cricket::VideoCodec vp8_bar = cricket::CreateVideoCodec(97, "VP8");
+ vp8_bar.packetization = "bar";
+ cricket::VideoCodec vp9 = cricket::CreateVideoCodec(98, "VP9");
+ cricket::VideoCodec vp9_foo = cricket::CreateVideoCodec(99, "VP9");
+ vp9_foo.packetization = "bar";
+ cricket::VideoContentDescription local;
+ local.set_codecs({vp8_foo, vp8_bar, vp9_foo});
+ cricket::VideoContentDescription remote;
+ remote.set_codecs({vp8_foo, vp9});
+
+ CreateChannels(0, 0);
+ std::string err;
+ ASSERT_TRUE(channel1_->SetLocalContent(&local, SdpType::kOffer, err)) << err;
+ ASSERT_TRUE(channel1_->SetRemoteContent(&remote, SdpType::kAnswer, err))
+ << err;
+
+ EXPECT_THAT(
+ media_receive_channel1_impl()->recv_codecs(),
+ ElementsAre(AllOf(Field(&cricket::Codec::id, 96),
+ Field(&cricket::Codec::packetization, "foo")),
+ AllOf(Field(&cricket::Codec::id, 97),
+ Field(&cricket::Codec::packetization, "bar")),
+ AllOf(Field(&cricket::Codec::id, 99),
+ Field(&cricket::Codec::packetization, absl::nullopt))));
+ EXPECT_THAT(
+ media_send_channel1_impl()->send_codecs(),
+ ElementsAre(AllOf(Field(&cricket::Codec::id, 96),
+ Field(&cricket::Codec::packetization, "foo")),
+ AllOf(Field(&cricket::Codec::id, 98),
+ Field(&cricket::Codec::packetization, absl::nullopt))));
+}
+
+TEST_F(VideoChannelSingleThreadTest,
+ StopsPacketizationVerificationWhenMatchIsFoundInLocalAnswer) {
+ cricket::VideoCodec vp8_foo = cricket::CreateVideoCodec(96, "VP8");
+ vp8_foo.packetization = "foo";
+ cricket::VideoCodec vp8_bar = cricket::CreateVideoCodec(97, "VP8");
+ vp8_bar.packetization = "bar";
+ cricket::VideoCodec vp9 = cricket::CreateVideoCodec(98, "VP9");
+ cricket::VideoCodec vp9_foo = cricket::CreateVideoCodec(99, "VP9");
+ vp9_foo.packetization = "bar";
+ cricket::VideoContentDescription local;
+ local.set_codecs({vp8_foo, vp9});
+ cricket::VideoContentDescription remote;
+ remote.set_codecs({vp8_foo, vp8_bar, vp9_foo});
+
+ CreateChannels(0, 0);
+ std::string err;
+ ASSERT_TRUE(channel1_->SetRemoteContent(&remote, SdpType::kOffer, err))
+ << err;
+ ASSERT_TRUE(channel1_->SetLocalContent(&local, SdpType::kAnswer, err)) << err;
+
+ EXPECT_THAT(
+ media_receive_channel1_impl()->recv_codecs(),
+ ElementsAre(AllOf(Field(&cricket::Codec::id, 96),
+ Field(&cricket::Codec::packetization, "foo")),
+ AllOf(Field(&cricket::Codec::id, 98),
+ Field(&cricket::Codec::packetization, absl::nullopt))));
+ EXPECT_THAT(
+ media_send_channel1_impl()->send_codecs(),
+ ElementsAre(AllOf(Field(&cricket::Codec::id, 96),
+ Field(&cricket::Codec::packetization, "foo")),
+ AllOf(Field(&cricket::Codec::id, 97),
+ Field(&cricket::Codec::packetization, "bar")),
+ AllOf(Field(&cricket::Codec::id, 99),
+ Field(&cricket::Codec::packetization, absl::nullopt))));
+}
+
+TEST_F(VideoChannelSingleThreadTest,
+ ConsidersAllCodecsWithDiffrentPacketizationsInRemoteAnswer) {
+ cricket::VideoCodec vp8 = cricket::CreateVideoCodec(96, "VP8");
+ cricket::VideoCodec vp8_raw = cricket::CreateVideoCodec(97, "VP8");
+ vp8_raw.packetization = cricket::kPacketizationParamRaw;
+ cricket::VideoContentDescription local;
+ local.set_codecs({vp8, vp8_raw});
+ cricket::VideoContentDescription remote;
+ remote.set_codecs({vp8_raw, vp8});
+
+ CreateChannels(0, 0);
+ std::string err;
+ ASSERT_TRUE(channel1_->SetLocalContent(&local, SdpType::kOffer, err)) << err;
+ ASSERT_TRUE(channel1_->SetRemoteContent(&remote, SdpType::kAnswer, err))
+ << err;
+
+ EXPECT_THAT(
+ media_receive_channel1_impl()->recv_codecs(),
+ ElementsAre(AllOf(Field(&cricket::Codec::id, 96),
+ Field(&cricket::Codec::packetization, absl::nullopt)),
+ AllOf(Field(&cricket::Codec::id, 97),
+ Field(&cricket::Codec::packetization,
+ cricket::kPacketizationParamRaw))));
+ EXPECT_THAT(
+ media_send_channel1_impl()->send_codecs(),
+ ElementsAre(AllOf(Field(&cricket::Codec::id, 97),
+ Field(&cricket::Codec::packetization,
+ cricket::kPacketizationParamRaw)),
+ AllOf(Field(&cricket::Codec::id, 96),
+ Field(&cricket::Codec::packetization, absl::nullopt))));
+}
+
+TEST_F(VideoChannelSingleThreadTest,
+ ConsidersAllCodecsWithDiffrentPacketizationsInLocalAnswer) {
+ cricket::VideoCodec vp8 = cricket::CreateVideoCodec(96, "VP8");
+ cricket::VideoCodec vp8_raw = cricket::CreateVideoCodec(97, "VP8");
+ vp8_raw.packetization = cricket::kPacketizationParamRaw;
+ cricket::VideoContentDescription local;
+ local.set_codecs({vp8_raw, vp8});
+ cricket::VideoContentDescription remote;
+ remote.set_codecs({vp8, vp8_raw});
+
+ CreateChannels(0, 0);
+ std::string err;
+ ASSERT_TRUE(channel1_->SetRemoteContent(&remote, SdpType::kOffer, err))
+ << err;
+ ASSERT_TRUE(channel1_->SetLocalContent(&local, SdpType::kAnswer, err)) << err;
+
+ EXPECT_THAT(
+ media_receive_channel1_impl()->recv_codecs(),
+ ElementsAre(AllOf(Field(&cricket::Codec::id, 97),
+ Field(&cricket::Codec::packetization,
+ cricket::kPacketizationParamRaw)),
+ AllOf(Field(&cricket::Codec::id, 96),
+ Field(&cricket::Codec::packetization, absl::nullopt))));
+ EXPECT_THAT(
+ media_send_channel1_impl()->send_codecs(),
+ ElementsAre(AllOf(Field(&cricket::Codec::id, 96),
+ Field(&cricket::Codec::packetization, absl::nullopt)),
+ AllOf(Field(&cricket::Codec::id, 97),
+ Field(&cricket::Codec::packetization,
+ cricket::kPacketizationParamRaw))));
+}
+
+// VideoChannelDoubleThreadTest
+TEST_F(VideoChannelDoubleThreadTest, TestInit) {
+ Base::TestInit();
+}
+
+TEST_F(VideoChannelDoubleThreadTest, TestDeinit) {
+ Base::TestDeinit();
+}
+
+TEST_F(VideoChannelDoubleThreadTest, TestSetContents) {
+ Base::TestSetContents();
+}
+
+TEST_F(VideoChannelDoubleThreadTest, TestSetContentsExtmapAllowMixedAsCaller) {
+ Base::TestSetContentsExtmapAllowMixedCaller(/*offer=*/true, /*answer=*/true);
+}
+
+TEST_F(VideoChannelDoubleThreadTest,
+ TestSetContentsExtmapAllowMixedNotSupportedAsCaller) {
+ Base::TestSetContentsExtmapAllowMixedCaller(/*offer=*/true, /*answer=*/false);
+}
+
+TEST_F(VideoChannelDoubleThreadTest, TestSetContentsExtmapAllowMixedAsCallee) {
+ Base::TestSetContentsExtmapAllowMixedCallee(/*offer=*/true, /*answer=*/true);
+}
+
+TEST_F(VideoChannelDoubleThreadTest,
+ TestSetContentsExtmapAllowMixedNotSupportedAsCallee) {
+ Base::TestSetContentsExtmapAllowMixedCallee(/*offer=*/true, /*answer=*/false);
+}
+
+TEST_F(VideoChannelDoubleThreadTest, TestSetContentsNullOffer) {
+ Base::TestSetContentsNullOffer();
+}
+
+TEST_F(VideoChannelDoubleThreadTest, TestSetContentsRtcpMux) {
+ Base::TestSetContentsRtcpMux();
+}
+
+TEST_F(VideoChannelDoubleThreadTest, TestSetContentsRtcpMuxWithPrAnswer) {
+ Base::TestSetContentsRtcpMux();
+}
+
+TEST_F(VideoChannelDoubleThreadTest, TestChangeStreamParamsInContent) {
+ Base::TestChangeStreamParamsInContent();
+}
+
+TEST_F(VideoChannelDoubleThreadTest, TestPlayoutAndSendingStates) {
+ Base::TestPlayoutAndSendingStates();
+}
+
+TEST_F(VideoChannelDoubleThreadTest, TestMediaContentDirection) {
+ Base::TestMediaContentDirection();
+}
+
+TEST_F(VideoChannelDoubleThreadTest, TestNetworkRouteChanges) {
+ Base::TestNetworkRouteChanges();
+}
+
+TEST_F(VideoChannelDoubleThreadTest, TestCallSetup) {
+ Base::TestCallSetup();
+}
+
+TEST_F(VideoChannelDoubleThreadTest, SendRtpToRtp) {
+ Base::SendRtpToRtp();
+}
+
+TEST_F(VideoChannelDoubleThreadTest, SendDtlsSrtpToDtlsSrtp) {
+ Base::SendDtlsSrtpToDtlsSrtp(0, 0);
+}
+
+TEST_F(VideoChannelDoubleThreadTest, SendDtlsSrtpToDtlsSrtpRtcpMux) {
+ Base::SendDtlsSrtpToDtlsSrtp(RTCP_MUX, RTCP_MUX);
+}
+
+TEST_F(VideoChannelDoubleThreadTest, SendEarlyMediaUsingRtcpMuxSrtp) {
+ Base::SendEarlyMediaUsingRtcpMuxSrtp();
+}
+
+TEST_F(VideoChannelDoubleThreadTest, SendRtpToRtpOnThread) {
+ Base::SendRtpToRtpOnThread();
+}
+
+TEST_F(VideoChannelDoubleThreadTest, SendWithWritabilityLoss) {
+ Base::SendWithWritabilityLoss();
+}
+
+TEST_F(VideoChannelDoubleThreadTest, TestSetContentFailure) {
+ Base::TestSetContentFailure();
+}
+
+TEST_F(VideoChannelDoubleThreadTest, TestSendTwoOffers) {
+ Base::TestSendTwoOffers();
+}
+
+TEST_F(VideoChannelDoubleThreadTest, TestReceiveTwoOffers) {
+ Base::TestReceiveTwoOffers();
+}
+
+TEST_F(VideoChannelDoubleThreadTest, TestSendPrAnswer) {
+ Base::TestSendPrAnswer();
+}
+
+TEST_F(VideoChannelDoubleThreadTest, TestReceivePrAnswer) {
+ Base::TestReceivePrAnswer();
+}
+
+TEST_F(VideoChannelDoubleThreadTest, SendBundleToBundle) {
+ Base::SendBundleToBundle(kVideoPts, arraysize(kVideoPts), false, false);
+}
+
+TEST_F(VideoChannelDoubleThreadTest, SendBundleToBundleSecure) {
+ Base::SendBundleToBundle(kVideoPts, arraysize(kVideoPts), false, true);
+}
+
+TEST_F(VideoChannelDoubleThreadTest, SendBundleToBundleWithRtcpMux) {
+ Base::SendBundleToBundle(kVideoPts, arraysize(kVideoPts), true, false);
+}
+
+TEST_F(VideoChannelDoubleThreadTest, SendBundleToBundleWithRtcpMuxSecure) {
+ Base::SendBundleToBundle(kVideoPts, arraysize(kVideoPts), true, true);
+}
+
+TEST_F(VideoChannelDoubleThreadTest, TestOnTransportReadyToSend) {
+ Base::TestOnTransportReadyToSend();
+}
+
+TEST_F(VideoChannelDoubleThreadTest, DefaultMaxBitrateIsUnlimited) {
+ Base::DefaultMaxBitrateIsUnlimited();
+}
+
+TEST_F(VideoChannelDoubleThreadTest, SocketOptionsMergedOnSetTransport) {
+ Base::SocketOptionsMergedOnSetTransport();
+}
+
+} // namespace
diff --git a/third_party/libwebrtc/pc/connection_context.cc b/third_party/libwebrtc/pc/connection_context.cc
new file mode 100644
index 0000000000..f436e27c0a
--- /dev/null
+++ b/third_party/libwebrtc/pc/connection_context.cc
@@ -0,0 +1,189 @@
+/*
+ * Copyright 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/connection_context.h"
+
+#include <type_traits>
+#include <utility>
+#include <vector>
+
+#include "api/transport/field_trial_based_config.h"
+#include "media/base/media_engine.h"
+#include "media/sctp/sctp_transport_factory.h"
+#include "rtc_base/helpers.h"
+#include "rtc_base/internal/default_socket_server.h"
+#include "rtc_base/socket_server.h"
+#include "rtc_base/time_utils.h"
+
+namespace webrtc {
+
+namespace {
+
+rtc::Thread* MaybeStartNetworkThread(
+ rtc::Thread* old_thread,
+ std::unique_ptr<rtc::SocketFactory>& socket_factory_holder,
+ std::unique_ptr<rtc::Thread>& thread_holder) {
+ if (old_thread) {
+ return old_thread;
+ }
+ std::unique_ptr<rtc::SocketServer> socket_server =
+ rtc::CreateDefaultSocketServer();
+ thread_holder = std::make_unique<rtc::Thread>(socket_server.get());
+ socket_factory_holder = std::move(socket_server);
+
+ thread_holder->SetName("pc_network_thread", nullptr);
+ thread_holder->Start();
+ return thread_holder.get();
+}
+
+rtc::Thread* MaybeWrapThread(rtc::Thread* signaling_thread,
+ bool& wraps_current_thread) {
+ wraps_current_thread = false;
+ if (signaling_thread) {
+ return signaling_thread;
+ }
+ auto this_thread = rtc::Thread::Current();
+ if (!this_thread) {
+ // If this thread isn't already wrapped by an rtc::Thread, create a
+ // wrapper and own it in this class.
+ this_thread = rtc::ThreadManager::Instance()->WrapCurrentThread();
+ wraps_current_thread = true;
+ }
+ return this_thread;
+}
+
+std::unique_ptr<SctpTransportFactoryInterface> MaybeCreateSctpFactory(
+ std::unique_ptr<SctpTransportFactoryInterface> factory,
+ rtc::Thread* network_thread,
+ const FieldTrialsView& field_trials) {
+ if (factory) {
+ return factory;
+ }
+#ifdef WEBRTC_HAVE_SCTP
+ return std::make_unique<cricket::SctpTransportFactory>(network_thread);
+#else
+ return nullptr;
+#endif
+}
+
+} // namespace
+
+// Static
+rtc::scoped_refptr<ConnectionContext> ConnectionContext::Create(
+ PeerConnectionFactoryDependencies* dependencies) {
+ return rtc::scoped_refptr<ConnectionContext>(
+ new ConnectionContext(dependencies));
+}
+
+ConnectionContext::ConnectionContext(
+ PeerConnectionFactoryDependencies* dependencies)
+ : network_thread_(MaybeStartNetworkThread(dependencies->network_thread,
+ owned_socket_factory_,
+ owned_network_thread_)),
+ worker_thread_(dependencies->worker_thread,
+ []() {
+ auto thread_holder = rtc::Thread::Create();
+ thread_holder->SetName("pc_worker_thread", nullptr);
+ thread_holder->Start();
+ return thread_holder;
+ }),
+ signaling_thread_(MaybeWrapThread(dependencies->signaling_thread,
+ wraps_current_thread_)),
+ trials_(dependencies->trials ? std::move(dependencies->trials)
+ : std::make_unique<FieldTrialBasedConfig>()),
+ media_engine_(std::move(dependencies->media_engine)),
+ network_monitor_factory_(
+ std::move(dependencies->network_monitor_factory)),
+ default_network_manager_(std::move(dependencies->network_manager)),
+ call_factory_(std::move(dependencies->call_factory)),
+ default_socket_factory_(std::move(dependencies->packet_socket_factory)),
+ sctp_factory_(
+ MaybeCreateSctpFactory(std::move(dependencies->sctp_factory),
+ network_thread(),
+ *trials_.get())),
+ use_rtx_(true) {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ RTC_DCHECK(!(default_network_manager_ && network_monitor_factory_))
+ << "You can't set both network_manager and network_monitor_factory.";
+
+ signaling_thread_->AllowInvokesToThread(worker_thread());
+ signaling_thread_->AllowInvokesToThread(network_thread_);
+ worker_thread_->AllowInvokesToThread(network_thread_);
+ if (!network_thread_->IsCurrent()) {
+ // network_thread_->IsCurrent() == true means signaling_thread_ is
+ // network_thread_. In this case, no further action is required as
+ // signaling_thread_ can already invoke network_thread_.
+ network_thread_->PostTask(
+ [thread = network_thread_, worker_thread = worker_thread_.get()] {
+ thread->DisallowBlockingCalls();
+ thread->DisallowAllInvokes();
+ if (worker_thread == thread) {
+ // In this case, worker_thread_ == network_thread_
+ thread->AllowInvokesToThread(thread);
+ }
+ });
+ }
+
+ rtc::InitRandom(rtc::Time32());
+
+ rtc::SocketFactory* socket_factory = dependencies->socket_factory;
+ if (socket_factory == nullptr) {
+ if (owned_socket_factory_) {
+ socket_factory = owned_socket_factory_.get();
+ } else {
+ // TODO(bugs.webrtc.org/13145): This case should be deleted. Either
+ // require that a PacketSocketFactory and NetworkManager always are
+ // injected (with no need to construct these default objects), or require
+ // that if a network_thread is injected, an approprite rtc::SocketServer
+ // should be injected too.
+ socket_factory = network_thread()->socketserver();
+ }
+ }
+ if (!default_network_manager_) {
+ // If network_monitor_factory_ is non-null, it will be used to create a
+ // network monitor while on the network thread.
+ default_network_manager_ = std::make_unique<rtc::BasicNetworkManager>(
+ network_monitor_factory_.get(), socket_factory, &field_trials());
+ }
+ if (!default_socket_factory_) {
+ default_socket_factory_ =
+ std::make_unique<rtc::BasicPacketSocketFactory>(socket_factory);
+ }
+ // Set warning levels on the threads, to give warnings when response
+ // may be slower than is expected of the thread.
+ // Since some of the threads may be the same, start with the least
+ // restrictive limits and end with the least permissive ones.
+ // This will give warnings for all cases.
+ signaling_thread_->SetDispatchWarningMs(100);
+ worker_thread_->SetDispatchWarningMs(30);
+ network_thread_->SetDispatchWarningMs(10);
+
+ if (media_engine_) {
+ // TODO(tommi): Change VoiceEngine to do ctor time initialization so that
+ // this isn't necessary.
+ worker_thread_->BlockingCall([&] { media_engine_->Init(); });
+ }
+}
+
+ConnectionContext::~ConnectionContext() {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ // `media_engine_` requires destruction to happen on the worker thread.
+ worker_thread_->PostTask([media_engine = std::move(media_engine_)] {});
+
+ // Make sure `worker_thread()` and `signaling_thread()` outlive
+ // `default_socket_factory_` and `default_network_manager_`.
+ default_socket_factory_ = nullptr;
+ default_network_manager_ = nullptr;
+
+ if (wraps_current_thread_)
+ rtc::ThreadManager::Instance()->UnwrapCurrentThread();
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/connection_context.h b/third_party/libwebrtc/pc/connection_context.h
new file mode 100644
index 0000000000..399e7c2b45
--- /dev/null
+++ b/third_party/libwebrtc/pc/connection_context.h
@@ -0,0 +1,155 @@
+/*
+ * Copyright 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_CONNECTION_CONTEXT_H_
+#define PC_CONNECTION_CONTEXT_H_
+
+#include <memory>
+#include <string>
+
+#include "api/call/call_factory_interface.h"
+#include "api/field_trials_view.h"
+#include "api/media_stream_interface.h"
+#include "api/peer_connection_interface.h"
+#include "api/ref_counted_base.h"
+#include "api/scoped_refptr.h"
+#include "api/sequence_checker.h"
+#include "api/transport/sctp_transport_factory_interface.h"
+#include "media/base/media_engine.h"
+#include "p2p/base/basic_packet_socket_factory.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/network.h"
+#include "rtc_base/network_monitor_factory.h"
+#include "rtc_base/rtc_certificate_generator.h"
+#include "rtc_base/socket_factory.h"
+#include "rtc_base/thread.h"
+#include "rtc_base/thread_annotations.h"
+
+namespace rtc {
+class BasicPacketSocketFactory;
+class UniqueRandomIdGenerator;
+} // namespace rtc
+
+namespace webrtc {
+
+class RtcEventLog;
+
+// This class contains resources needed by PeerConnection and associated
+// objects. A reference to this object is passed to each PeerConnection. The
+// methods on this object are assumed not to change the state in any way that
+// interferes with the operation of other PeerConnections.
+//
+// This class must be created and destroyed on the signaling thread.
+class ConnectionContext final
+ : public rtc::RefCountedNonVirtual<ConnectionContext> {
+ public:
+ // Creates a ConnectionContext. May return null if initialization fails.
+ // The Dependencies class allows simple management of all new dependencies
+ // being added to the ConnectionContext.
+ static rtc::scoped_refptr<ConnectionContext> Create(
+ PeerConnectionFactoryDependencies* dependencies);
+
+ // This class is not copyable or movable.
+ ConnectionContext(const ConnectionContext&) = delete;
+ ConnectionContext& operator=(const ConnectionContext&) = delete;
+
+ // Functions called from PeerConnection and friends
+ SctpTransportFactoryInterface* sctp_transport_factory() const {
+ return sctp_factory_.get();
+ }
+
+ cricket::MediaEngineInterface* media_engine() const {
+ return media_engine_.get();
+ }
+
+ rtc::Thread* signaling_thread() { return signaling_thread_; }
+ const rtc::Thread* signaling_thread() const { return signaling_thread_; }
+ rtc::Thread* worker_thread() { return worker_thread_.get(); }
+ const rtc::Thread* worker_thread() const { return worker_thread_.get(); }
+ rtc::Thread* network_thread() { return network_thread_; }
+ const rtc::Thread* network_thread() const { return network_thread_; }
+
+ // Field trials associated with the PeerConnectionFactory.
+ // Note: that there can be different field trials for different
+ // PeerConnections (but they are not supposed change after creating the
+ // PeerConnection).
+ const FieldTrialsView& field_trials() const { return *trials_.get(); }
+
+ // Accessors only used from the PeerConnectionFactory class
+ rtc::NetworkManager* default_network_manager() {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ return default_network_manager_.get();
+ }
+ rtc::PacketSocketFactory* default_socket_factory() {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ return default_socket_factory_.get();
+ }
+ CallFactoryInterface* call_factory() {
+ RTC_DCHECK_RUN_ON(worker_thread());
+ return call_factory_.get();
+ }
+ rtc::UniqueRandomIdGenerator* ssrc_generator() { return &ssrc_generator_; }
+ // Note: There is lots of code that wants to know whether or not we
+ // use RTX, but so far, no code has been found that sets it to false.
+ // Kept in the API in order to ease introduction if we want to resurrect
+ // the functionality.
+ bool use_rtx() { return use_rtx_; }
+
+ // For use by tests.
+ void set_use_rtx(bool use_rtx) { use_rtx_ = use_rtx; }
+
+ protected:
+ explicit ConnectionContext(PeerConnectionFactoryDependencies* dependencies);
+
+ friend class rtc::RefCountedNonVirtual<ConnectionContext>;
+ ~ConnectionContext();
+
+ private:
+ // The following three variables are used to communicate between the
+ // constructor and the destructor, and are never exposed externally.
+ bool wraps_current_thread_;
+ std::unique_ptr<rtc::SocketFactory> owned_socket_factory_;
+ std::unique_ptr<rtc::Thread> owned_network_thread_
+ RTC_GUARDED_BY(signaling_thread_);
+ rtc::Thread* const network_thread_;
+ AlwaysValidPointer<rtc::Thread> const worker_thread_;
+ rtc::Thread* const signaling_thread_;
+
+ // Accessed both on signaling thread and worker thread.
+ std::unique_ptr<FieldTrialsView> const trials_;
+
+ // This object is const over the lifetime of the ConnectionContext, and is
+ // only altered in the destructor.
+ std::unique_ptr<cricket::MediaEngineInterface> media_engine_;
+
+ // This object should be used to generate any SSRC that is not explicitly
+ // specified by the user (or by the remote party).
+ // TODO(bugs.webrtc.org/12666): This variable is used from both the signaling
+ // and worker threads. See if we can't restrict usage to a single thread.
+ rtc::UniqueRandomIdGenerator ssrc_generator_;
+ std::unique_ptr<rtc::NetworkMonitorFactory> const network_monitor_factory_
+ RTC_GUARDED_BY(signaling_thread_);
+ std::unique_ptr<rtc::NetworkManager> default_network_manager_
+ RTC_GUARDED_BY(signaling_thread_);
+ std::unique_ptr<webrtc::CallFactoryInterface> const call_factory_
+ RTC_GUARDED_BY(worker_thread());
+
+ std::unique_ptr<rtc::PacketSocketFactory> default_socket_factory_
+ RTC_GUARDED_BY(signaling_thread_);
+ std::unique_ptr<SctpTransportFactoryInterface> const sctp_factory_;
+
+ // Controls whether to announce support for the the rfc4588 payload format
+ // for retransmitted video packets.
+ bool use_rtx_;
+};
+
+} // namespace webrtc
+
+#endif // PC_CONNECTION_CONTEXT_H_
diff --git a/third_party/libwebrtc/pc/data_channel_controller.cc b/third_party/libwebrtc/pc/data_channel_controller.cc
new file mode 100644
index 0000000000..93599fdba9
--- /dev/null
+++ b/third_party/libwebrtc/pc/data_channel_controller.cc
@@ -0,0 +1,440 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/data_channel_controller.h"
+
+#include <utility>
+
+#include "absl/algorithm/container.h"
+#include "api/peer_connection_interface.h"
+#include "api/rtc_error.h"
+#include "pc/peer_connection_internal.h"
+#include "pc/sctp_utils.h"
+#include "rtc_base/logging.h"
+
+namespace webrtc {
+
+DataChannelController::~DataChannelController() {
+ RTC_DCHECK(sctp_data_channels_n_.empty())
+ << "Missing call to TeardownDataChannelTransport_n?";
+ RTC_DCHECK(!signaling_safety_.flag()->alive())
+ << "Missing call to PrepareForShutdown?";
+}
+
+bool DataChannelController::HasDataChannels() const {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ return channel_usage_ == DataChannelUsage::kInUse;
+}
+
+bool DataChannelController::HasUsedDataChannels() const {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ return channel_usage_ != DataChannelUsage::kNeverUsed;
+}
+
+RTCError DataChannelController::SendData(
+ StreamId sid,
+ const SendDataParams& params,
+ const rtc::CopyOnWriteBuffer& payload) {
+ RTC_DCHECK_RUN_ON(network_thread());
+ if (!data_channel_transport_) {
+ RTC_LOG(LS_ERROR) << "SendData called before transport is ready";
+ return RTCError(RTCErrorType::INVALID_STATE);
+ }
+ return data_channel_transport_->SendData(sid.stream_id_int(), params,
+ payload);
+}
+
+void DataChannelController::AddSctpDataStream(StreamId sid) {
+ RTC_DCHECK_RUN_ON(network_thread());
+ RTC_DCHECK(sid.HasValue());
+ if (data_channel_transport_) {
+ data_channel_transport_->OpenChannel(sid.stream_id_int());
+ }
+}
+
+void DataChannelController::RemoveSctpDataStream(StreamId sid) {
+ RTC_DCHECK_RUN_ON(network_thread());
+ if (data_channel_transport_) {
+ data_channel_transport_->CloseChannel(sid.stream_id_int());
+ }
+}
+
+void DataChannelController::OnChannelStateChanged(
+ SctpDataChannel* channel,
+ DataChannelInterface::DataState state) {
+ RTC_DCHECK_RUN_ON(network_thread());
+
+ // Stash away the internal id here in case `OnSctpDataChannelClosed` ends up
+ // releasing the last reference to the channel.
+ const int channel_id = channel->internal_id();
+
+ if (state == DataChannelInterface::DataState::kClosed)
+ OnSctpDataChannelClosed(channel);
+
+ DataChannelUsage channel_usage = sctp_data_channels_n_.empty()
+ ? DataChannelUsage::kHaveBeenUsed
+ : DataChannelUsage::kInUse;
+ signaling_thread()->PostTask(SafeTask(
+ signaling_safety_.flag(), [this, channel_id, state, channel_usage] {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ channel_usage_ = channel_usage;
+ pc_->OnSctpDataChannelStateChanged(channel_id, state);
+ }));
+}
+
+void DataChannelController::OnDataReceived(
+ int channel_id,
+ DataMessageType type,
+ const rtc::CopyOnWriteBuffer& buffer) {
+ RTC_DCHECK_RUN_ON(network_thread());
+
+ if (HandleOpenMessage_n(channel_id, type, buffer))
+ return;
+
+ auto it = absl::c_find_if(sctp_data_channels_n_, [&](const auto& c) {
+ return c->sid_n().stream_id_int() == channel_id;
+ });
+
+ if (it != sctp_data_channels_n_.end())
+ (*it)->OnDataReceived(type, buffer);
+}
+
+void DataChannelController::OnChannelClosing(int channel_id) {
+ RTC_DCHECK_RUN_ON(network_thread());
+ auto it = absl::c_find_if(sctp_data_channels_n_, [&](const auto& c) {
+ return c->sid_n().stream_id_int() == channel_id;
+ });
+
+ if (it != sctp_data_channels_n_.end())
+ (*it)->OnClosingProcedureStartedRemotely();
+}
+
+void DataChannelController::OnChannelClosed(int channel_id) {
+ RTC_DCHECK_RUN_ON(network_thread());
+ StreamId sid(channel_id);
+ sid_allocator_.ReleaseSid(sid);
+ auto it = absl::c_find_if(sctp_data_channels_n_,
+ [&](const auto& c) { return c->sid_n() == sid; });
+
+ if (it != sctp_data_channels_n_.end()) {
+ rtc::scoped_refptr<SctpDataChannel> channel = std::move(*it);
+ sctp_data_channels_n_.erase(it);
+ channel->OnClosingProcedureComplete();
+ }
+}
+
+void DataChannelController::OnReadyToSend() {
+ RTC_DCHECK_RUN_ON(network_thread());
+ auto copy = sctp_data_channels_n_;
+ for (const auto& channel : copy) {
+ if (channel->sid_n().HasValue()) {
+ channel->OnTransportReady();
+ } else {
+ // This happens for role==SSL_SERVER channels when we get notified by
+ // the transport *before* the SDP code calls `AllocateSctpSids` to
+ // trigger assignment of sids. In this case OnTransportReady() will be
+ // called from within `AllocateSctpSids` below.
+ RTC_LOG(LS_INFO) << "OnReadyToSend: Still waiting for an id for channel.";
+ }
+ }
+}
+
+void DataChannelController::OnTransportClosed(RTCError error) {
+ RTC_DCHECK_RUN_ON(network_thread());
+
+ // This loop will close all data channels and trigger a callback to
+ // `OnSctpDataChannelClosed`. We'll empty `sctp_data_channels_n_`, first
+ // and `OnSctpDataChannelClosed` will become a noop but we'll release the
+ // StreamId here.
+ std::vector<rtc::scoped_refptr<SctpDataChannel>> temp_sctp_dcs;
+ temp_sctp_dcs.swap(sctp_data_channels_n_);
+ for (const auto& channel : temp_sctp_dcs) {
+ channel->OnTransportChannelClosed(error);
+ sid_allocator_.ReleaseSid(channel->sid_n());
+ }
+}
+
+void DataChannelController::SetupDataChannelTransport_n(
+ DataChannelTransportInterface* transport) {
+ RTC_DCHECK_RUN_ON(network_thread());
+ RTC_DCHECK(transport);
+ set_data_channel_transport(transport);
+}
+
+void DataChannelController::PrepareForShutdown() {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ signaling_safety_.reset(PendingTaskSafetyFlag::CreateDetachedInactive());
+ if (channel_usage_ != DataChannelUsage::kNeverUsed)
+ channel_usage_ = DataChannelUsage::kHaveBeenUsed;
+}
+
+void DataChannelController::TeardownDataChannelTransport_n(RTCError error) {
+ RTC_DCHECK_RUN_ON(network_thread());
+ OnTransportClosed(error);
+ set_data_channel_transport(nullptr);
+ RTC_DCHECK(sctp_data_channels_n_.empty());
+ weak_factory_.InvalidateWeakPtrs();
+}
+
+void DataChannelController::OnTransportChanged(
+ DataChannelTransportInterface* new_data_channel_transport) {
+ RTC_DCHECK_RUN_ON(network_thread());
+ if (data_channel_transport_ &&
+ data_channel_transport_ != new_data_channel_transport) {
+ // Changed which data channel transport is used for `sctp_mid_` (eg. now
+ // it's bundled).
+ set_data_channel_transport(new_data_channel_transport);
+ }
+}
+
+std::vector<DataChannelStats> DataChannelController::GetDataChannelStats()
+ const {
+ RTC_DCHECK_RUN_ON(network_thread());
+ std::vector<DataChannelStats> stats;
+ stats.reserve(sctp_data_channels_n_.size());
+ for (const auto& channel : sctp_data_channels_n_)
+ stats.push_back(channel->GetStats());
+ return stats;
+}
+
+bool DataChannelController::HandleOpenMessage_n(
+ int channel_id,
+ DataMessageType type,
+ const rtc::CopyOnWriteBuffer& buffer) {
+ if (type != DataMessageType::kControl || !IsOpenMessage(buffer))
+ return false;
+
+ // Received OPEN message; parse and signal that a new data channel should
+ // be created.
+ std::string label;
+ InternalDataChannelInit config;
+ config.id = channel_id;
+ if (!ParseDataChannelOpenMessage(buffer, &label, &config)) {
+ RTC_LOG(LS_WARNING) << "Failed to parse the OPEN message for sid "
+ << channel_id;
+ } else {
+ config.open_handshake_role = InternalDataChannelInit::kAcker;
+ auto channel_or_error = CreateDataChannel(label, config);
+ if (channel_or_error.ok()) {
+ signaling_thread()->PostTask(SafeTask(
+ signaling_safety_.flag(),
+ [this, channel = channel_or_error.MoveValue(),
+ ready_to_send = data_channel_transport_->IsReadyToSend()] {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ OnDataChannelOpenMessage(std::move(channel), ready_to_send);
+ }));
+ } else {
+ RTC_LOG(LS_ERROR) << "Failed to create DataChannel from the OPEN message."
+ << ToString(channel_or_error.error().type());
+ }
+ }
+ return true;
+}
+
+void DataChannelController::OnDataChannelOpenMessage(
+ rtc::scoped_refptr<SctpDataChannel> channel,
+ bool ready_to_send) {
+ channel_usage_ = DataChannelUsage::kInUse;
+ auto proxy = SctpDataChannel::CreateProxy(channel, signaling_safety_.flag());
+
+ pc_->Observer()->OnDataChannel(proxy);
+ pc_->NoteDataAddedEvent();
+
+ if (ready_to_send) {
+ network_thread()->PostTask([channel = std::move(channel)] {
+ if (channel->state() != DataChannelInterface::DataState::kClosed)
+ channel->OnTransportReady();
+ });
+ }
+}
+
+// RTC_RUN_ON(network_thread())
+RTCError DataChannelController::ReserveOrAllocateSid(
+ StreamId& sid,
+ absl::optional<rtc::SSLRole> fallback_ssl_role) {
+ if (sid.HasValue()) {
+ return sid_allocator_.ReserveSid(sid)
+ ? RTCError::OK()
+ : RTCError(RTCErrorType::INVALID_RANGE,
+ "StreamId out of range or reserved.");
+ }
+
+ // Attempt to allocate an ID based on the negotiated role.
+ absl::optional<rtc::SSLRole> role = pc_->GetSctpSslRole_n();
+ if (!role)
+ role = fallback_ssl_role;
+ if (role) {
+ sid = sid_allocator_.AllocateSid(*role);
+ if (!sid.HasValue())
+ return RTCError(RTCErrorType::RESOURCE_EXHAUSTED);
+ }
+ // When we get here, we may still not have an ID, but that's a supported case
+ // whereby an id will be assigned later.
+ RTC_DCHECK(sid.HasValue() || !role);
+ return RTCError::OK();
+}
+
+// RTC_RUN_ON(network_thread())
+RTCErrorOr<rtc::scoped_refptr<SctpDataChannel>>
+DataChannelController::CreateDataChannel(const std::string& label,
+ InternalDataChannelInit& config) {
+ StreamId sid(config.id);
+ RTCError err = ReserveOrAllocateSid(sid, config.fallback_ssl_role);
+ if (!err.ok())
+ return err;
+
+ // In case `sid` has changed. Update `config` accordingly.
+ config.id = sid.stream_id_int();
+
+ rtc::scoped_refptr<SctpDataChannel> channel = SctpDataChannel::Create(
+ weak_factory_.GetWeakPtr(), label, data_channel_transport_ != nullptr,
+ config, signaling_thread(), network_thread());
+ RTC_DCHECK(channel);
+ sctp_data_channels_n_.push_back(channel);
+
+ // If we have an id already, notify the transport.
+ if (sid.HasValue())
+ AddSctpDataStream(sid);
+
+ return channel;
+}
+
+RTCErrorOr<rtc::scoped_refptr<DataChannelInterface>>
+DataChannelController::InternalCreateDataChannelWithProxy(
+ const std::string& label,
+ const InternalDataChannelInit& config) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ RTC_DCHECK(!pc_->IsClosed());
+ if (!config.IsValid()) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER,
+ "Invalid DataChannelInit");
+ }
+
+ bool ready_to_send = false;
+ InternalDataChannelInit new_config = config;
+ StreamId sid(new_config.id);
+ auto ret = network_thread()->BlockingCall(
+ [&]() -> RTCErrorOr<rtc::scoped_refptr<SctpDataChannel>> {
+ RTC_DCHECK_RUN_ON(network_thread());
+ auto channel = CreateDataChannel(label, new_config);
+ if (!channel.ok())
+ return channel;
+ ready_to_send =
+ data_channel_transport_ && data_channel_transport_->IsReadyToSend();
+ if (ready_to_send) {
+ // If the transport is ready to send because the initial channel
+ // ready signal may have been sent before the DataChannel creation.
+ // This has to be done async because the upper layer objects (e.g.
+ // Chrome glue and WebKit) are not wired up properly until after
+ // `InternalCreateDataChannelWithProxy` returns.
+ network_thread()->PostTask([channel = channel.value()] {
+ if (channel->state() != DataChannelInterface::DataState::kClosed)
+ channel->OnTransportReady();
+ });
+ }
+
+ return channel;
+ });
+
+ if (!ret.ok())
+ return ret.MoveError();
+
+ channel_usage_ = DataChannelUsage::kInUse;
+ return SctpDataChannel::CreateProxy(ret.MoveValue(),
+ signaling_safety_.flag());
+}
+
+void DataChannelController::AllocateSctpSids(rtc::SSLRole role) {
+ RTC_DCHECK_RUN_ON(network_thread());
+
+ const bool ready_to_send =
+ data_channel_transport_ && data_channel_transport_->IsReadyToSend();
+
+ std::vector<std::pair<SctpDataChannel*, StreamId>> channels_to_update;
+ std::vector<rtc::scoped_refptr<SctpDataChannel>> channels_to_close;
+ for (auto it = sctp_data_channels_n_.begin();
+ it != sctp_data_channels_n_.end();) {
+ if (!(*it)->sid_n().HasValue()) {
+ StreamId sid = sid_allocator_.AllocateSid(role);
+ if (sid.HasValue()) {
+ (*it)->SetSctpSid_n(sid);
+ AddSctpDataStream(sid);
+ if (ready_to_send) {
+ RTC_LOG(LS_INFO) << "AllocateSctpSids: Id assigned, ready to send.";
+ (*it)->OnTransportReady();
+ }
+ channels_to_update.push_back(std::make_pair((*it).get(), sid));
+ } else {
+ channels_to_close.push_back(std::move(*it));
+ it = sctp_data_channels_n_.erase(it);
+ continue;
+ }
+ }
+ ++it;
+ }
+
+ // Since closing modifies the list of channels, we have to do the actual
+ // closing outside the loop.
+ for (const auto& channel : channels_to_close) {
+ channel->CloseAbruptlyWithDataChannelFailure("Failed to allocate SCTP SID");
+ }
+}
+
+void DataChannelController::OnSctpDataChannelClosed(SctpDataChannel* channel) {
+ RTC_DCHECK_RUN_ON(network_thread());
+ // After the closing procedure is done, it's safe to use this ID for
+ // another data channel.
+ if (channel->sid_n().HasValue()) {
+ sid_allocator_.ReleaseSid(channel->sid_n());
+ }
+ auto it = absl::c_find_if(sctp_data_channels_n_,
+ [&](const auto& c) { return c.get() == channel; });
+ if (it != sctp_data_channels_n_.end())
+ sctp_data_channels_n_.erase(it);
+}
+
+void DataChannelController::set_data_channel_transport(
+ DataChannelTransportInterface* transport) {
+ RTC_DCHECK_RUN_ON(network_thread());
+
+ if (data_channel_transport_)
+ data_channel_transport_->SetDataSink(nullptr);
+
+ data_channel_transport_ = transport;
+
+ if (data_channel_transport_) {
+ // There's a new data channel transport. This needs to be signaled to the
+ // `sctp_data_channels_n_` so that they can reopen and reconnect. This is
+ // necessary when bundling is applied.
+ NotifyDataChannelsOfTransportCreated();
+ data_channel_transport_->SetDataSink(this);
+ }
+}
+
+void DataChannelController::NotifyDataChannelsOfTransportCreated() {
+ RTC_DCHECK_RUN_ON(network_thread());
+ RTC_DCHECK(data_channel_transport_);
+
+ for (const auto& channel : sctp_data_channels_n_) {
+ if (channel->sid_n().HasValue())
+ AddSctpDataStream(channel->sid_n());
+ channel->OnTransportChannelCreated();
+ }
+}
+
+rtc::Thread* DataChannelController::network_thread() const {
+ return pc_->network_thread();
+}
+
+rtc::Thread* DataChannelController::signaling_thread() const {
+ return pc_->signaling_thread();
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/data_channel_controller.h b/third_party/libwebrtc/pc/data_channel_controller.h
new file mode 100644
index 0000000000..bf3ac03437
--- /dev/null
+++ b/third_party/libwebrtc/pc/data_channel_controller.h
@@ -0,0 +1,165 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_DATA_CHANNEL_CONTROLLER_H_
+#define PC_DATA_CHANNEL_CONTROLLER_H_
+
+#include <string>
+#include <vector>
+
+#include "api/data_channel_interface.h"
+#include "api/rtc_error.h"
+#include "api/scoped_refptr.h"
+#include "api/sequence_checker.h"
+#include "api/task_queue/pending_task_safety_flag.h"
+#include "api/transport/data_channel_transport_interface.h"
+#include "pc/data_channel_utils.h"
+#include "pc/sctp_data_channel.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/copy_on_write_buffer.h"
+#include "rtc_base/ssl_stream_adapter.h"
+#include "rtc_base/thread.h"
+#include "rtc_base/thread_annotations.h"
+#include "rtc_base/weak_ptr.h"
+
+namespace webrtc {
+
+class PeerConnectionInternal;
+
+class DataChannelController : public SctpDataChannelControllerInterface,
+ public DataChannelSink {
+ public:
+ explicit DataChannelController(PeerConnectionInternal* pc) : pc_(pc) {}
+ ~DataChannelController();
+
+ // Not copyable or movable.
+ DataChannelController(DataChannelController&) = delete;
+ DataChannelController& operator=(const DataChannelController& other) = delete;
+ DataChannelController(DataChannelController&&) = delete;
+ DataChannelController& operator=(DataChannelController&& other) = delete;
+
+ // Implements
+ // SctpDataChannelProviderInterface.
+ RTCError SendData(StreamId sid,
+ const SendDataParams& params,
+ const rtc::CopyOnWriteBuffer& payload) override;
+ void AddSctpDataStream(StreamId sid) override;
+ void RemoveSctpDataStream(StreamId sid) override;
+ void OnChannelStateChanged(SctpDataChannel* channel,
+ DataChannelInterface::DataState state) override;
+
+ // Implements DataChannelSink.
+ void OnDataReceived(int channel_id,
+ DataMessageType type,
+ const rtc::CopyOnWriteBuffer& buffer) override;
+ void OnChannelClosing(int channel_id) override;
+ void OnChannelClosed(int channel_id) override;
+ void OnReadyToSend() override;
+ void OnTransportClosed(RTCError error) override;
+
+ // Called as part of destroying the owning PeerConnection.
+ void PrepareForShutdown();
+
+ // Called from PeerConnection::SetupDataChannelTransport_n
+ void SetupDataChannelTransport_n(DataChannelTransportInterface* transport);
+ // Called from PeerConnection::TeardownDataChannelTransport_n
+ void TeardownDataChannelTransport_n(RTCError error);
+
+ // Called from PeerConnection::OnTransportChanged
+ // to make required changes to datachannels' transports.
+ void OnTransportChanged(
+ DataChannelTransportInterface* data_channel_transport);
+
+ // Called from PeerConnection::GetDataChannelStats on the signaling thread.
+ std::vector<DataChannelStats> GetDataChannelStats() const;
+
+ // Creates channel and adds it to the collection of DataChannels that will
+ // be offered in a SessionDescription, and wraps it in a proxy object.
+ RTCErrorOr<rtc::scoped_refptr<DataChannelInterface>>
+ InternalCreateDataChannelWithProxy(const std::string& label,
+ const InternalDataChannelInit& config);
+ void AllocateSctpSids(rtc::SSLRole role);
+
+ // Check if data channels are currently tracked. Used to decide whether a
+ // rejected m=application section should be reoffered.
+ bool HasDataChannels() const;
+
+ // At some point in time, a data channel has existed.
+ bool HasUsedDataChannels() const;
+
+ protected:
+ rtc::Thread* network_thread() const;
+ rtc::Thread* signaling_thread() const;
+
+ private:
+ void OnSctpDataChannelClosed(SctpDataChannel* channel);
+
+ // Creates a new SctpDataChannel object on the network thread.
+ RTCErrorOr<rtc::scoped_refptr<SctpDataChannel>> CreateDataChannel(
+ const std::string& label,
+ InternalDataChannelInit& config) RTC_RUN_ON(network_thread());
+
+ // Parses and handles open messages. Returns true if the message is an open
+ // message and should be considered to be handled, false otherwise.
+ bool HandleOpenMessage_n(int channel_id,
+ DataMessageType type,
+ const rtc::CopyOnWriteBuffer& buffer)
+ RTC_RUN_ON(network_thread());
+ // Called when a valid data channel OPEN message is received.
+ void OnDataChannelOpenMessage(rtc::scoped_refptr<SctpDataChannel> channel,
+ bool ready_to_send)
+ RTC_RUN_ON(signaling_thread());
+
+ // Accepts a `StreamId` which may be pre-negotiated or unassigned. For
+ // pre-negotiated sids, attempts to reserve the sid in the allocation pool,
+ // for unassigned sids attempts to generate a new sid if possible. Returns
+ // RTCError::OK() if the sid is reserved (and may have been generated) or
+ // if not enough information exists to generate a sid, in which case the sid
+ // will still be unassigned upon return, but will be assigned later.
+ // If the pool has been exhausted or a sid has already been reserved, an
+ // error will be returned.
+ RTCError ReserveOrAllocateSid(StreamId& sid,
+ absl::optional<rtc::SSLRole> fallback_ssl_role)
+ RTC_RUN_ON(network_thread());
+
+ // Called when all data channels need to be notified of a transport channel
+ // (calls OnTransportChannelCreated on the signaling thread).
+ void NotifyDataChannelsOfTransportCreated();
+
+ void set_data_channel_transport(DataChannelTransportInterface* transport);
+
+ // Plugin transport used for data channels. Pointer may be accessed and
+ // checked from any thread, but the object may only be touched on the
+ // network thread.
+ DataChannelTransportInterface* data_channel_transport_
+ RTC_GUARDED_BY(network_thread()) = nullptr;
+ SctpSidAllocator sid_allocator_ RTC_GUARDED_BY(network_thread());
+ std::vector<rtc::scoped_refptr<SctpDataChannel>> sctp_data_channels_n_
+ RTC_GUARDED_BY(network_thread());
+ enum class DataChannelUsage : uint8_t {
+ kNeverUsed = 0,
+ kHaveBeenUsed,
+ kInUse
+ };
+ DataChannelUsage channel_usage_ RTC_GUARDED_BY(signaling_thread()) =
+ DataChannelUsage::kNeverUsed;
+
+ // Owning PeerConnection.
+ PeerConnectionInternal* const pc_;
+ // The weak pointers must be dereferenced and invalidated on the network
+ // thread only.
+ rtc::WeakPtrFactory<DataChannelController> weak_factory_
+ RTC_GUARDED_BY(network_thread()){this};
+ ScopedTaskSafety signaling_safety_;
+};
+
+} // namespace webrtc
+
+#endif // PC_DATA_CHANNEL_CONTROLLER_H_
diff --git a/third_party/libwebrtc/pc/data_channel_controller_unittest.cc b/third_party/libwebrtc/pc/data_channel_controller_unittest.cc
new file mode 100644
index 0000000000..3b8adb6819
--- /dev/null
+++ b/third_party/libwebrtc/pc/data_channel_controller_unittest.cc
@@ -0,0 +1,214 @@
+/*
+ * Copyright 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/data_channel_controller.h"
+
+#include <memory>
+
+#include "pc/peer_connection_internal.h"
+#include "pc/sctp_data_channel.h"
+#include "pc/test/mock_peer_connection_internal.h"
+#include "rtc_base/null_socket_server.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+#include "test/run_loop.h"
+
+namespace webrtc {
+
+namespace {
+
+using ::testing::NiceMock;
+using ::testing::Return;
+
+class MockDataChannelTransport : public webrtc::DataChannelTransportInterface {
+ public:
+ ~MockDataChannelTransport() override {}
+
+ MOCK_METHOD(RTCError, OpenChannel, (int channel_id), (override));
+ MOCK_METHOD(RTCError,
+ SendData,
+ (int channel_id,
+ const SendDataParams& params,
+ const rtc::CopyOnWriteBuffer& buffer),
+ (override));
+ MOCK_METHOD(RTCError, CloseChannel, (int channel_id), (override));
+ MOCK_METHOD(void, SetDataSink, (DataChannelSink * sink), (override));
+ MOCK_METHOD(bool, IsReadyToSend, (), (const, override));
+};
+
+// Convenience class for tests to ensure that shutdown methods for DCC
+// are consistently called. In practice SdpOfferAnswerHandler will call
+// TeardownDataChannelTransport_n on the network thread when destroying the
+// data channel transport and PeerConnection calls PrepareForShutdown() from
+// within PeerConnection::Close(). The DataChannelControllerForTest class mimics
+// behavior by calling those methods from within its destructor.
+class DataChannelControllerForTest : public DataChannelController {
+ public:
+ explicit DataChannelControllerForTest(
+ PeerConnectionInternal* pc,
+ DataChannelTransportInterface* transport = nullptr)
+ : DataChannelController(pc) {
+ if (transport) {
+ network_thread()->BlockingCall(
+ [&] { SetupDataChannelTransport_n(transport); });
+ }
+ }
+
+ ~DataChannelControllerForTest() override {
+ network_thread()->BlockingCall(
+ [&] { TeardownDataChannelTransport_n(RTCError::OK()); });
+ PrepareForShutdown();
+ }
+};
+
+class DataChannelControllerTest : public ::testing::Test {
+ protected:
+ DataChannelControllerTest()
+ : network_thread_(std::make_unique<rtc::NullSocketServer>()) {
+ network_thread_.Start();
+ pc_ = rtc::make_ref_counted<NiceMock<MockPeerConnectionInternal>>();
+ ON_CALL(*pc_, signaling_thread)
+ .WillByDefault(Return(rtc::Thread::Current()));
+ ON_CALL(*pc_, network_thread).WillByDefault(Return(&network_thread_));
+ }
+
+ ~DataChannelControllerTest() override {
+ run_loop_.Flush();
+ network_thread_.Stop();
+ }
+
+ test::RunLoop run_loop_;
+ rtc::Thread network_thread_;
+ rtc::scoped_refptr<NiceMock<MockPeerConnectionInternal>> pc_;
+};
+
+TEST_F(DataChannelControllerTest, CreateAndDestroy) {
+ DataChannelControllerForTest dcc(pc_.get());
+}
+
+TEST_F(DataChannelControllerTest, CreateDataChannelEarlyRelease) {
+ DataChannelControllerForTest dcc(pc_.get());
+ auto ret = dcc.InternalCreateDataChannelWithProxy(
+ "label", InternalDataChannelInit(DataChannelInit()));
+ ASSERT_TRUE(ret.ok());
+ auto channel = ret.MoveValue();
+ // DCC still holds a reference to the channel. Release this reference early.
+ channel = nullptr;
+}
+
+TEST_F(DataChannelControllerTest, CreateDataChannelEarlyClose) {
+ DataChannelControllerForTest dcc(pc_.get());
+ EXPECT_FALSE(dcc.HasDataChannels());
+ EXPECT_FALSE(dcc.HasUsedDataChannels());
+ auto ret = dcc.InternalCreateDataChannelWithProxy(
+ "label", InternalDataChannelInit(DataChannelInit()));
+ ASSERT_TRUE(ret.ok());
+ auto channel = ret.MoveValue();
+ EXPECT_TRUE(dcc.HasDataChannels());
+ EXPECT_TRUE(dcc.HasUsedDataChannels());
+ channel->Close();
+ run_loop_.Flush();
+ EXPECT_FALSE(dcc.HasDataChannels());
+ EXPECT_TRUE(dcc.HasUsedDataChannels());
+}
+
+TEST_F(DataChannelControllerTest, CreateDataChannelLateRelease) {
+ auto dcc = std::make_unique<DataChannelControllerForTest>(pc_.get());
+ auto ret = dcc->InternalCreateDataChannelWithProxy(
+ "label", InternalDataChannelInit(DataChannelInit()));
+ ASSERT_TRUE(ret.ok());
+ auto channel = ret.MoveValue();
+ dcc.reset();
+ channel = nullptr;
+}
+
+TEST_F(DataChannelControllerTest, CloseAfterControllerDestroyed) {
+ auto dcc = std::make_unique<DataChannelControllerForTest>(pc_.get());
+ auto ret = dcc->InternalCreateDataChannelWithProxy(
+ "label", InternalDataChannelInit(DataChannelInit()));
+ ASSERT_TRUE(ret.ok());
+ auto channel = ret.MoveValue();
+ dcc.reset();
+ channel->Close();
+}
+
+// Allocate the maximum number of data channels and then one more.
+// The last allocation should fail.
+TEST_F(DataChannelControllerTest, MaxChannels) {
+ NiceMock<MockDataChannelTransport> transport;
+ int channel_id = 0;
+
+ ON_CALL(*pc_, GetSctpSslRole_n).WillByDefault([&]() {
+ return absl::optional<rtc::SSLRole>((channel_id & 1) ? rtc::SSL_SERVER
+ : rtc::SSL_CLIENT);
+ });
+
+ DataChannelControllerForTest dcc(pc_.get(), &transport);
+
+ // Allocate the maximum number of channels + 1. Inside the loop, the creation
+ // process will allocate a stream id for each channel.
+ for (channel_id = 0; channel_id <= cricket::kMaxSctpStreams; ++channel_id) {
+ auto ret = dcc.InternalCreateDataChannelWithProxy(
+ "label", InternalDataChannelInit(DataChannelInit()));
+ if (channel_id == cricket::kMaxSctpStreams) {
+ // We've reached the maximum and the previous call should have failed.
+ EXPECT_FALSE(ret.ok());
+ } else {
+ // We're still working on saturating the pool. Things should be working.
+ EXPECT_TRUE(ret.ok());
+ }
+ }
+}
+
+// Test that while a data channel is in the `kClosing` state, its StreamId does
+// not get re-used for new channels. Only once the state reaches `kClosed`
+// should a StreamId be available again for allocation.
+TEST_F(DataChannelControllerTest, NoStreamIdReuseWhileClosing) {
+ ON_CALL(*pc_, GetSctpSslRole_n).WillByDefault([&]() {
+ return rtc::SSL_CLIENT;
+ });
+
+ NiceMock<MockDataChannelTransport> transport; // Wider scope than `dcc`.
+ DataChannelControllerForTest dcc(pc_.get(), &transport);
+
+ // Create the first channel and check that we got the expected, first sid.
+ auto channel1 = dcc.InternalCreateDataChannelWithProxy(
+ "label", InternalDataChannelInit(DataChannelInit()))
+ .MoveValue();
+ ASSERT_EQ(channel1->id(), 0);
+
+ // Start closing the channel and make sure its state is `kClosing`
+ channel1->Close();
+ ASSERT_EQ(channel1->state(), DataChannelInterface::DataState::kClosing);
+
+ // Create a second channel and make sure we get a new StreamId, not the same
+ // as that of channel1.
+ auto channel2 = dcc.InternalCreateDataChannelWithProxy(
+ "label2", InternalDataChannelInit(DataChannelInit()))
+ .MoveValue();
+ ASSERT_NE(channel2->id(), channel1->id()); // In practice the id will be 2.
+
+ // Simulate the acknowledgement of the channel closing from the transport.
+ // This completes the closing operation of channel1.
+ pc_->network_thread()->BlockingCall([&] { dcc.OnChannelClosed(0); });
+ run_loop_.Flush();
+ ASSERT_EQ(channel1->state(), DataChannelInterface::DataState::kClosed);
+
+ // Now create a third channel. This time, the id of the first channel should
+ // be available again and therefore the ids of the first and third channels
+ // should be the same.
+ auto channel3 = dcc.InternalCreateDataChannelWithProxy(
+ "label3", InternalDataChannelInit(DataChannelInit()))
+ .MoveValue();
+ EXPECT_EQ(channel3->id(), channel1->id());
+}
+
+} // namespace
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/data_channel_integrationtest.cc b/third_party/libwebrtc/pc/data_channel_integrationtest.cc
new file mode 100644
index 0000000000..faec76d03e
--- /dev/null
+++ b/third_party/libwebrtc/pc/data_channel_integrationtest.cc
@@ -0,0 +1,1167 @@
+/*
+ * Copyright 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdint.h>
+
+#include <cstdlib>
+#include <iterator>
+#include <string>
+#include <tuple>
+#include <vector>
+
+#include "absl/algorithm/container.h"
+#include "absl/types/optional.h"
+#include "api/data_channel_interface.h"
+#include "api/dtls_transport_interface.h"
+#include "api/peer_connection_interface.h"
+#include "api/scoped_refptr.h"
+#include "api/sctp_transport_interface.h"
+#include "api/stats/rtc_stats_report.h"
+#include "api/stats/rtcstats_objects.h"
+#include "api/units/time_delta.h"
+#include "p2p/base/transport_description.h"
+#include "p2p/base/transport_info.h"
+#include "pc/media_session.h"
+#include "pc/session_description.h"
+#include "pc/test/integration_test_helpers.h"
+#include "pc/test/mock_peer_connection_observers.h"
+#include "rtc_base/copy_on_write_buffer.h"
+#include "rtc_base/fake_clock.h"
+#include "rtc_base/gunit.h"
+#include "rtc_base/helpers.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/numerics/safe_conversions.h"
+#include "rtc_base/virtual_socket_server.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+
+namespace {
+
+// All tests in this file require SCTP support.
+#ifdef WEBRTC_HAVE_SCTP
+
+#if defined(WEBRTC_ANDROID)
+// Disable heavy tests running on low-end Android devices.
+#define DISABLED_ON_ANDROID(t) DISABLED_##t
+#else
+#define DISABLED_ON_ANDROID(t) t
+#endif
+
+class DataChannelIntegrationTest
+ : public PeerConnectionIntegrationBaseTest,
+ public ::testing::WithParamInterface<std::tuple<SdpSemantics, bool>> {
+ protected:
+ DataChannelIntegrationTest()
+ : PeerConnectionIntegrationBaseTest(std::get<0>(GetParam())),
+ allow_media_(std::get<1>(GetParam())) {}
+ bool allow_media() { return allow_media_; }
+
+ bool CreatePeerConnectionWrappers() {
+ if (allow_media_) {
+ return PeerConnectionIntegrationBaseTest::CreatePeerConnectionWrappers();
+ }
+ return PeerConnectionIntegrationBaseTest::
+ CreatePeerConnectionWrappersWithoutMediaEngine();
+ }
+
+ private:
+ // True if media is allowed to be added
+ const bool allow_media_;
+};
+
+// Fake clock must be set before threads are started to prevent race on
+// Set/GetClockForTesting().
+// To achieve that, multiple inheritance is used as a mixin pattern
+// where order of construction is finely controlled.
+// This also ensures peerconnection is closed before switching back to non-fake
+// clock, avoiding other races and DCHECK failures such as in rtp_sender.cc.
+class FakeClockForTest : public rtc::ScopedFakeClock {
+ protected:
+ FakeClockForTest() {
+ // Some things use a time of "0" as a special value, so we need to start out
+ // the fake clock at a nonzero time.
+ // TODO(deadbeef): Fix this.
+ AdvanceTime(webrtc::TimeDelta::Seconds(1));
+ }
+
+ // Explicit handle.
+ ScopedFakeClock& FakeClock() { return *this; }
+};
+
+class DataChannelIntegrationTestPlanB
+ : public PeerConnectionIntegrationBaseTest {
+ protected:
+ DataChannelIntegrationTestPlanB()
+ : PeerConnectionIntegrationBaseTest(SdpSemantics::kPlanB_DEPRECATED) {}
+};
+
+class DataChannelIntegrationTestUnifiedPlan
+ : public PeerConnectionIntegrationBaseTest {
+ protected:
+ DataChannelIntegrationTestUnifiedPlan()
+ : PeerConnectionIntegrationBaseTest(SdpSemantics::kUnifiedPlan) {}
+};
+
+void MakeActiveSctpOffer(cricket::SessionDescription* desc) {
+ auto& transport_infos = desc->transport_infos();
+ for (auto& transport_info : transport_infos) {
+ transport_info.description.connection_role = cricket::CONNECTIONROLE_ACTIVE;
+ }
+}
+
+// This test causes a PeerConnection to enter Disconnected state, and
+// sends data on a DataChannel while disconnected.
+// The data should be surfaced when the connection reestablishes.
+TEST_P(DataChannelIntegrationTest, DataChannelWhileDisconnected) {
+ CreatePeerConnectionWrappers();
+ ConnectFakeSignaling();
+ caller()->CreateDataChannel();
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ ASSERT_TRUE_WAIT(callee()->data_observer(), kDefaultTimeout);
+ std::string data1 = "hello first";
+ caller()->data_channel()->Send(DataBuffer(data1));
+ EXPECT_EQ_WAIT(data1, callee()->data_observer()->last_message(),
+ kDefaultTimeout);
+ // Cause a network outage
+ virtual_socket_server()->set_drop_probability(1.0);
+ EXPECT_EQ_WAIT(PeerConnectionInterface::kIceConnectionDisconnected,
+ caller()->standardized_ice_connection_state(),
+ kDefaultTimeout);
+ std::string data2 = "hello second";
+ caller()->data_channel()->Send(DataBuffer(data2));
+ // Remove the network outage. The connection should reestablish.
+ virtual_socket_server()->set_drop_probability(0.0);
+ EXPECT_EQ_WAIT(data2, callee()->data_observer()->last_message(),
+ kDefaultTimeout);
+}
+
+// This test causes a PeerConnection to enter Disconnected state,
+// sends data on a DataChannel while disconnected, and then triggers
+// an ICE restart.
+// The data should be surfaced when the connection reestablishes.
+TEST_P(DataChannelIntegrationTest, DataChannelWhileDisconnectedIceRestart) {
+ CreatePeerConnectionWrappers();
+ ConnectFakeSignaling();
+ caller()->CreateDataChannel();
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ ASSERT_TRUE_WAIT(callee()->data_observer(), kDefaultTimeout);
+ std::string data1 = "hello first";
+ caller()->data_channel()->Send(DataBuffer(data1));
+ EXPECT_EQ_WAIT(data1, callee()->data_observer()->last_message(),
+ kDefaultTimeout);
+ // Cause a network outage
+ virtual_socket_server()->set_drop_probability(1.0);
+ ASSERT_EQ_WAIT(PeerConnectionInterface::kIceConnectionDisconnected,
+ caller()->standardized_ice_connection_state(),
+ kDefaultTimeout);
+ std::string data2 = "hello second";
+ caller()->data_channel()->Send(DataBuffer(data2));
+
+ // Trigger an ICE restart. The signaling channel is not affected by
+ // the network outage.
+ caller()->SetOfferAnswerOptions(IceRestartOfferAnswerOptions());
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ // Remove the network outage. The connection should reestablish.
+ virtual_socket_server()->set_drop_probability(0.0);
+ EXPECT_EQ_WAIT(data2, callee()->data_observer()->last_message(),
+ kDefaultTimeout);
+}
+
+// This test sets up a call between two parties with audio, video and an SCTP
+// data channel.
+TEST_P(DataChannelIntegrationTest, EndToEndCallWithSctpDataChannel) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ // Expect that data channel created on caller side will show up for callee as
+ // well.
+ caller()->CreateDataChannel();
+ if (allow_media()) {
+ caller()->AddAudioVideoTracks();
+ callee()->AddAudioVideoTracks();
+ }
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ if (allow_media()) {
+ // Ensure the existence of the SCTP data channel didn't impede audio/video.
+ MediaExpectations media_expectations;
+ media_expectations.ExpectBidirectionalAudioAndVideo();
+ ASSERT_TRUE(ExpectNewFrames(media_expectations));
+ }
+ // Caller data channel should already exist (it created one). Callee data
+ // channel may not exist yet, since negotiation happens in-band, not in SDP.
+ ASSERT_NE(nullptr, caller()->data_channel());
+ ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout);
+ EXPECT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout);
+ EXPECT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout);
+
+ // Ensure data can be sent in both directions.
+ std::string data = "hello world";
+ caller()->data_channel()->Send(DataBuffer(data));
+ EXPECT_EQ_WAIT(data, callee()->data_observer()->last_message(),
+ kDefaultTimeout);
+ callee()->data_channel()->Send(DataBuffer(data));
+ EXPECT_EQ_WAIT(data, caller()->data_observer()->last_message(),
+ kDefaultTimeout);
+}
+
+// This test sets up a call between two parties with an SCTP
+// data channel only, and sends messages of various sizes.
+TEST_P(DataChannelIntegrationTest,
+ EndToEndCallWithSctpDataChannelVariousSizes) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ // Expect that data channel created on caller side will show up for callee as
+ // well.
+ caller()->CreateDataChannel();
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ // Caller data channel should already exist (it created one). Callee data
+ // channel may not exist yet, since negotiation happens in-band, not in SDP.
+ ASSERT_NE(nullptr, caller()->data_channel());
+ ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout);
+ EXPECT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout);
+ EXPECT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout);
+
+ for (int message_size = 1; message_size < 100000; message_size *= 2) {
+ std::string data(message_size, 'a');
+ caller()->data_channel()->Send(DataBuffer(data));
+ EXPECT_EQ_WAIT(data, callee()->data_observer()->last_message(),
+ kDefaultTimeout);
+ callee()->data_channel()->Send(DataBuffer(data));
+ EXPECT_EQ_WAIT(data, caller()->data_observer()->last_message(),
+ kDefaultTimeout);
+ }
+ // Specifically probe the area around the MTU size.
+ for (int message_size = 1100; message_size < 1300; message_size += 1) {
+ std::string data(message_size, 'a');
+ caller()->data_channel()->Send(DataBuffer(data));
+ EXPECT_EQ_WAIT(data, callee()->data_observer()->last_message(),
+ kDefaultTimeout);
+ callee()->data_channel()->Send(DataBuffer(data));
+ EXPECT_EQ_WAIT(data, caller()->data_observer()->last_message(),
+ kDefaultTimeout);
+ }
+}
+
+// This test sets up a call between two parties with an SCTP
+// data channel only, and sends empty messages
+TEST_P(DataChannelIntegrationTest,
+ EndToEndCallWithSctpDataChannelEmptyMessages) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ // Expect that data channel created on caller side will show up for callee as
+ // well.
+ caller()->CreateDataChannel();
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ // Caller data channel should already exist (it created one). Callee data
+ // channel may not exist yet, since negotiation happens in-band, not in SDP.
+ ASSERT_NE(nullptr, caller()->data_channel());
+ ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout);
+ EXPECT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout);
+ EXPECT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout);
+
+ // Ensure data can be sent in both directions.
+ // Sending empty string data
+ std::string data = "";
+ caller()->data_channel()->Send(DataBuffer(data));
+ EXPECT_EQ_WAIT(1u, callee()->data_observer()->received_message_count(),
+ kDefaultTimeout);
+ EXPECT_TRUE(callee()->data_observer()->last_message().empty());
+ EXPECT_FALSE(callee()->data_observer()->messages().back().binary);
+ callee()->data_channel()->Send(DataBuffer(data));
+ EXPECT_EQ_WAIT(1u, caller()->data_observer()->received_message_count(),
+ kDefaultTimeout);
+ EXPECT_TRUE(caller()->data_observer()->last_message().empty());
+ EXPECT_FALSE(caller()->data_observer()->messages().back().binary);
+
+ // Sending empty binary data
+ rtc::CopyOnWriteBuffer empty_buffer;
+ caller()->data_channel()->Send(DataBuffer(empty_buffer, true));
+ EXPECT_EQ_WAIT(2u, callee()->data_observer()->received_message_count(),
+ kDefaultTimeout);
+ EXPECT_TRUE(callee()->data_observer()->last_message().empty());
+ EXPECT_TRUE(callee()->data_observer()->messages().back().binary);
+ callee()->data_channel()->Send(DataBuffer(empty_buffer, true));
+ EXPECT_EQ_WAIT(2u, caller()->data_observer()->received_message_count(),
+ kDefaultTimeout);
+ EXPECT_TRUE(caller()->data_observer()->last_message().empty());
+ EXPECT_TRUE(caller()->data_observer()->messages().back().binary);
+}
+
+TEST_P(DataChannelIntegrationTest,
+ EndToEndCallWithSctpDataChannelLowestSafeMtu) {
+ // The lowest payload size limit that's tested and found safe for this
+ // application. Note that this is not the safe limit under all conditions;
+ // in particular, the default is not the largest DTLS signature, and
+ // this test does not use TURN.
+ const size_t kLowestSafePayloadSizeLimit = 1225;
+
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ // Expect that data channel created on caller side will show up for callee as
+ // well.
+ caller()->CreateDataChannel();
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ // Caller data channel should already exist (it created one). Callee data
+ // channel may not exist yet, since negotiation happens in-band, not in SDP.
+ ASSERT_NE(nullptr, caller()->data_channel());
+ ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout);
+ EXPECT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout);
+ EXPECT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout);
+
+ virtual_socket_server()->set_max_udp_payload(kLowestSafePayloadSizeLimit);
+ for (int message_size = 1140; message_size < 1240; message_size += 1) {
+ std::string data(message_size, 'a');
+ caller()->data_channel()->Send(DataBuffer(data));
+ ASSERT_EQ_WAIT(data, callee()->data_observer()->last_message(),
+ kDefaultTimeout);
+ callee()->data_channel()->Send(DataBuffer(data));
+ ASSERT_EQ_WAIT(data, caller()->data_observer()->last_message(),
+ kDefaultTimeout);
+ }
+}
+
+// This test verifies that lowering the MTU of the connection will cause
+// the datachannel to not transmit reliably.
+// The purpose of this test is to ensure that we know how a too-small MTU
+// error manifests itself.
+TEST_P(DataChannelIntegrationTest, EndToEndCallWithSctpDataChannelHarmfulMtu) {
+ // The lowest payload size limit that's tested and found safe for this
+ // application in this configuration (see test above).
+ const size_t kLowestSafePayloadSizeLimit = 1225;
+ // The size of the smallest message that fails to be delivered.
+ const size_t kMessageSizeThatIsNotDelivered = 1157;
+
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ caller()->CreateDataChannel();
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ ASSERT_NE(nullptr, caller()->data_channel());
+ ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout);
+ EXPECT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout);
+ EXPECT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout);
+
+ virtual_socket_server()->set_max_udp_payload(kLowestSafePayloadSizeLimit - 1);
+ // Probe for an undelivered or slowly delivered message. The exact
+ // size limit seems to be dependent on the message history, so make the
+ // code easily able to find the current value.
+ bool failure_seen = false;
+ for (size_t message_size = 1110; message_size < 1400; message_size++) {
+ const size_t message_count =
+ callee()->data_observer()->received_message_count();
+ const std::string data(message_size, 'a');
+ caller()->data_channel()->Send(DataBuffer(data));
+ // Wait a very short time for the message to be delivered.
+ // Note: Waiting only 10 ms is too short for Windows bots; they will
+ // flakily fail at a random frame.
+ WAIT(callee()->data_observer()->received_message_count() > message_count,
+ 100);
+ if (callee()->data_observer()->received_message_count() == message_count) {
+ ASSERT_EQ(kMessageSizeThatIsNotDelivered, message_size);
+ failure_seen = true;
+ break;
+ }
+ }
+ ASSERT_TRUE(failure_seen);
+}
+
+// Ensure that when the callee closes an SCTP data channel, the closing
+// procedure results in the data channel being closed for the caller as well.
+TEST_P(DataChannelIntegrationTest, CalleeClosesSctpDataChannel) {
+ // Same procedure as above test.
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ caller()->CreateDataChannel();
+ if (allow_media()) {
+ caller()->AddAudioVideoTracks();
+ callee()->AddAudioVideoTracks();
+ }
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ ASSERT_NE(nullptr, caller()->data_channel());
+ ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout);
+ ASSERT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout);
+ ASSERT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout);
+
+ // Close the data channel on the callee side, and wait for it to reach the
+ // "closed" state on both sides.
+ callee()->data_channel()->Close();
+
+ DataChannelInterface::DataState expected_states[] = {
+ DataChannelInterface::DataState::kConnecting,
+ DataChannelInterface::DataState::kOpen,
+ DataChannelInterface::DataState::kClosing,
+ DataChannelInterface::DataState::kClosed};
+
+ EXPECT_EQ_WAIT(DataChannelInterface::DataState::kClosed,
+ caller()->data_observer()->state(), kDefaultTimeout);
+ EXPECT_THAT(caller()->data_observer()->states(),
+ ::testing::ElementsAreArray(expected_states));
+
+ EXPECT_EQ_WAIT(DataChannelInterface::DataState::kClosed,
+ callee()->data_observer()->state(), kDefaultTimeout);
+ EXPECT_THAT(callee()->data_observer()->states(),
+ ::testing::ElementsAreArray(expected_states));
+}
+
+TEST_P(DataChannelIntegrationTest, SctpDataChannelConfigSentToOtherSide) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ webrtc::DataChannelInit init;
+ init.id = 53;
+ init.maxRetransmits = 52;
+ caller()->CreateDataChannel("data-channel", &init);
+ if (allow_media()) {
+ caller()->AddAudioVideoTracks();
+ callee()->AddAudioVideoTracks();
+ }
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout);
+ ASSERT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout);
+ // Since "negotiated" is false, the "id" parameter should be ignored.
+ EXPECT_NE(init.id, callee()->data_channel()->id());
+ EXPECT_EQ("data-channel", callee()->data_channel()->label());
+ EXPECT_EQ(init.maxRetransmits, callee()->data_channel()->maxRetransmits());
+ EXPECT_FALSE(callee()->data_channel()->negotiated());
+}
+
+// Test sctp's ability to process unordered data stream, where data actually
+// arrives out of order using simulated delays. Previously there have been some
+// bugs in this area.
+TEST_P(DataChannelIntegrationTest, StressTestUnorderedSctpDataChannel) {
+ // Introduce random network delays.
+ // Otherwise it's not a true "unordered" test.
+ virtual_socket_server()->set_delay_mean(20);
+ virtual_socket_server()->set_delay_stddev(5);
+ virtual_socket_server()->UpdateDelayDistribution();
+ // Normal procedure, but with unordered data channel config.
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ webrtc::DataChannelInit init;
+ init.ordered = false;
+ caller()->CreateDataChannel(&init);
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ ASSERT_NE(nullptr, caller()->data_channel());
+ ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout);
+ ASSERT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout);
+ ASSERT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout);
+
+ static constexpr int kNumMessages = 100;
+ // Deliberately chosen to be larger than the MTU so messages get fragmented.
+ static constexpr size_t kMaxMessageSize = 4096;
+ // Create and send random messages.
+ std::vector<std::string> sent_messages;
+ for (int i = 0; i < kNumMessages; ++i) {
+ size_t length =
+ (rand() % kMaxMessageSize) + 1; // NOLINT (rand_r instead of rand)
+ std::string message;
+ ASSERT_TRUE(rtc::CreateRandomString(length, &message));
+ caller()->data_channel()->Send(DataBuffer(message));
+ callee()->data_channel()->Send(DataBuffer(message));
+ sent_messages.push_back(message);
+ }
+
+ // Wait for all messages to be received.
+ EXPECT_EQ_WAIT(rtc::checked_cast<size_t>(kNumMessages),
+ caller()->data_observer()->received_message_count(),
+ kDefaultTimeout);
+ EXPECT_EQ_WAIT(rtc::checked_cast<size_t>(kNumMessages),
+ callee()->data_observer()->received_message_count(),
+ kDefaultTimeout);
+
+ // Sort and compare to make sure none of the messages were corrupted.
+ std::vector<std::string> caller_received_messages;
+ absl::c_transform(caller()->data_observer()->messages(),
+ std::back_inserter(caller_received_messages),
+ [](const auto& a) { return a.data; });
+
+ std::vector<std::string> callee_received_messages;
+ absl::c_transform(callee()->data_observer()->messages(),
+ std::back_inserter(callee_received_messages),
+ [](const auto& a) { return a.data; });
+
+ absl::c_sort(sent_messages);
+ absl::c_sort(caller_received_messages);
+ absl::c_sort(callee_received_messages);
+ EXPECT_EQ(sent_messages, caller_received_messages);
+ EXPECT_EQ(sent_messages, callee_received_messages);
+}
+
+// Repeatedly open and close data channels on a peer connection to check that
+// the channels are properly negotiated and SCTP stream IDs properly recycled.
+TEST_P(DataChannelIntegrationTest, StressTestOpenCloseChannelNoDelay) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+
+ int channel_id = 0;
+ const size_t kChannelCount = 8;
+ const size_t kIterations = 10;
+ bool has_negotiated = false;
+
+ webrtc::DataChannelInit init;
+ for (size_t repeats = 0; repeats < kIterations; ++repeats) {
+ RTC_LOG(LS_INFO) << "Iteration " << (repeats + 1) << "/" << kIterations;
+
+ for (size_t i = 0; i < kChannelCount; ++i) {
+ rtc::StringBuilder sb;
+ sb << "channel-" << channel_id++;
+ caller()->CreateDataChannel(sb.Release(), &init);
+ }
+ ASSERT_EQ(caller()->data_channels().size(), kChannelCount);
+
+ if (!has_negotiated) {
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ has_negotiated = true;
+ }
+
+ for (size_t i = 0; i < kChannelCount; ++i) {
+ ASSERT_EQ_WAIT(caller()->data_channels()[i]->state(),
+ DataChannelInterface::DataState::kOpen, kDefaultTimeout);
+ RTC_LOG(LS_INFO) << "Caller Channel "
+ << caller()->data_channels()[i]->label() << " with id "
+ << caller()->data_channels()[i]->id() << " is open.";
+ }
+ ASSERT_EQ_WAIT(callee()->data_channels().size(), kChannelCount,
+ kDefaultTimeout);
+ for (size_t i = 0; i < kChannelCount; ++i) {
+ ASSERT_EQ_WAIT(callee()->data_channels()[i]->state(),
+ DataChannelInterface::DataState::kOpen, kDefaultTimeout);
+ RTC_LOG(LS_INFO) << "Callee Channel "
+ << callee()->data_channels()[i]->label() << " with id "
+ << callee()->data_channels()[i]->id() << " is open.";
+ }
+
+ // Closing from both sides to attempt creating races.
+ // A real application would likely only close from one side.
+ for (size_t i = 0; i < kChannelCount; ++i) {
+ if (i % 3 == 0) {
+ callee()->data_channels()[i]->Close();
+ caller()->data_channels()[i]->Close();
+ } else {
+ caller()->data_channels()[i]->Close();
+ callee()->data_channels()[i]->Close();
+ }
+ }
+
+ for (size_t i = 0; i < kChannelCount; ++i) {
+ ASSERT_EQ_WAIT(caller()->data_channels()[i]->state(),
+ DataChannelInterface::DataState::kClosed, kDefaultTimeout);
+ ASSERT_EQ_WAIT(callee()->data_channels()[i]->state(),
+ DataChannelInterface::DataState::kClosed, kDefaultTimeout);
+ }
+
+ caller()->data_channels().clear();
+ caller()->data_observers().clear();
+ callee()->data_channels().clear();
+ callee()->data_observers().clear();
+ }
+}
+
+// Repeatedly open and close data channels on a peer connection to check that
+// the channels are properly negotiated and SCTP stream IDs properly recycled.
+// Some delay is added for better coverage.
+TEST_P(DataChannelIntegrationTest, StressTestOpenCloseChannelWithDelay) {
+ // Simulate some network delay
+ virtual_socket_server()->set_delay_mean(20);
+ virtual_socket_server()->set_delay_stddev(5);
+ virtual_socket_server()->UpdateDelayDistribution();
+
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+
+ int channel_id = 0;
+ const size_t kChannelCount = 8;
+ const size_t kIterations = 10;
+ bool has_negotiated = false;
+
+ webrtc::DataChannelInit init;
+ for (size_t repeats = 0; repeats < kIterations; ++repeats) {
+ RTC_LOG(LS_INFO) << "Iteration " << (repeats + 1) << "/" << kIterations;
+
+ for (size_t i = 0; i < kChannelCount; ++i) {
+ rtc::StringBuilder sb;
+ sb << "channel-" << channel_id++;
+ caller()->CreateDataChannel(sb.Release(), &init);
+ }
+ ASSERT_EQ(caller()->data_channels().size(), kChannelCount);
+
+ if (!has_negotiated) {
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ has_negotiated = true;
+ }
+
+ for (size_t i = 0; i < kChannelCount; ++i) {
+ ASSERT_EQ_WAIT(caller()->data_channels()[i]->state(),
+ DataChannelInterface::DataState::kOpen, kDefaultTimeout);
+ RTC_LOG(LS_INFO) << "Caller Channel "
+ << caller()->data_channels()[i]->label() << " with id "
+ << caller()->data_channels()[i]->id() << " is open.";
+ }
+ ASSERT_EQ_WAIT(callee()->data_channels().size(), kChannelCount,
+ kDefaultTimeout);
+ for (size_t i = 0; i < kChannelCount; ++i) {
+ ASSERT_EQ_WAIT(callee()->data_channels()[i]->state(),
+ DataChannelInterface::DataState::kOpen, kDefaultTimeout);
+ RTC_LOG(LS_INFO) << "Callee Channel "
+ << callee()->data_channels()[i]->label() << " with id "
+ << callee()->data_channels()[i]->id() << " is open.";
+ }
+
+ // Closing from both sides to attempt creating races.
+ // A real application would likely only close from one side.
+ for (size_t i = 0; i < kChannelCount; ++i) {
+ if (i % 3 == 0) {
+ callee()->data_channels()[i]->Close();
+ caller()->data_channels()[i]->Close();
+ } else {
+ caller()->data_channels()[i]->Close();
+ callee()->data_channels()[i]->Close();
+ }
+ }
+
+ for (size_t i = 0; i < kChannelCount; ++i) {
+ ASSERT_EQ_WAIT(caller()->data_channels()[i]->state(),
+ DataChannelInterface::DataState::kClosed, kDefaultTimeout);
+ ASSERT_EQ_WAIT(callee()->data_channels()[i]->state(),
+ DataChannelInterface::DataState::kClosed, kDefaultTimeout);
+ }
+
+ caller()->data_channels().clear();
+ caller()->data_observers().clear();
+ callee()->data_channels().clear();
+ callee()->data_observers().clear();
+ }
+}
+
+// This test sets up a call between two parties with audio, and video. When
+// audio and video are setup and flowing, an SCTP data channel is negotiated.
+TEST_P(DataChannelIntegrationTest, AddSctpDataChannelInSubsequentOffer) {
+ // This test can't be performed without media.
+ if (!allow_media()) {
+ return;
+ }
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ // Do initial offer/answer with audio/video.
+ caller()->AddAudioVideoTracks();
+ callee()->AddAudioVideoTracks();
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ // Create data channel and do new offer and answer.
+ caller()->CreateDataChannel();
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ // Caller data channel should already exist (it created one). Callee data
+ // channel may not exist yet, since negotiation happens in-band, not in SDP.
+ ASSERT_NE(nullptr, caller()->data_channel());
+ ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout);
+ EXPECT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout);
+ EXPECT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout);
+ // Ensure data can be sent in both directions.
+ std::string data = "hello world";
+ caller()->data_channel()->Send(DataBuffer(data));
+ EXPECT_EQ_WAIT(data, callee()->data_observer()->last_message(),
+ kDefaultTimeout);
+ callee()->data_channel()->Send(DataBuffer(data));
+ EXPECT_EQ_WAIT(data, caller()->data_observer()->last_message(),
+ kDefaultTimeout);
+}
+
+// Set up a connection initially just using SCTP data channels, later
+// upgrading to audio/video, ensuring frames are received end-to-end.
+// Effectively the inverse of the test above. This was broken in M57; see
+// https://crbug.com/711243
+TEST_P(DataChannelIntegrationTest, SctpDataChannelToAudioVideoUpgrade) {
+ // This test can't be performed without media.
+ if (!allow_media()) {
+ return;
+ }
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ // Do initial offer/answer with just data channel.
+ caller()->CreateDataChannel();
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ // Wait until data can be sent over the data channel.
+ ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout);
+ ASSERT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout);
+ ASSERT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout);
+
+ // Do subsequent offer/answer with two-way audio and video. Audio and video
+ // should end up bundled on the DTLS/ICE transport already used for data.
+ caller()->AddAudioVideoTracks();
+ callee()->AddAudioVideoTracks();
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ MediaExpectations media_expectations;
+ media_expectations.ExpectBidirectionalAudioAndVideo();
+ ASSERT_TRUE(ExpectNewFrames(media_expectations));
+}
+
+static void MakeSpecCompliantSctpOffer(cricket::SessionDescription* desc) {
+ cricket::SctpDataContentDescription* dcd_offer =
+ GetFirstSctpDataContentDescription(desc);
+ // See https://crbug.com/webrtc/11211 - this function is a no-op
+ ASSERT_TRUE(dcd_offer);
+ dcd_offer->set_use_sctpmap(false);
+ dcd_offer->set_protocol("UDP/DTLS/SCTP");
+}
+
+// Test that the data channel works when a spec-compliant SCTP m= section is
+// offered (using "a=sctp-port" instead of "a=sctpmap", and using
+// "UDP/DTLS/SCTP" as the protocol).
+TEST_P(DataChannelIntegrationTest,
+ DataChannelWorksWhenSpecCompliantSctpOfferReceived) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ caller()->CreateDataChannel();
+ caller()->SetGeneratedSdpMunger(MakeSpecCompliantSctpOffer);
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout);
+ EXPECT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout);
+ EXPECT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout);
+
+ // Ensure data can be sent in both directions.
+ std::string data = "hello world";
+ caller()->data_channel()->Send(DataBuffer(data));
+ EXPECT_EQ_WAIT(data, callee()->data_observer()->last_message(),
+ kDefaultTimeout);
+ callee()->data_channel()->Send(DataBuffer(data));
+ EXPECT_EQ_WAIT(data, caller()->data_observer()->last_message(),
+ kDefaultTimeout);
+}
+
+// Test that after closing PeerConnections, they stop sending any packets
+// (ICE, DTLS, RTP...).
+TEST_P(DataChannelIntegrationTest, ClosingConnectionStopsPacketFlow) {
+ // This test can't be performed without media.
+ if (!allow_media()) {
+ return;
+ }
+ // Set up audio/video/data, wait for some frames to be received.
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ caller()->AddAudioVideoTracks();
+ caller()->CreateDataChannel();
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ MediaExpectations media_expectations;
+ media_expectations.CalleeExpectsSomeAudioAndVideo();
+ ASSERT_TRUE(ExpectNewFrames(media_expectations));
+ // Close PeerConnections.
+ ClosePeerConnections();
+ // Pump messages for a second, and ensure no new packets end up sent.
+ uint32_t sent_packets_a = virtual_socket_server()->sent_packets();
+ WAIT(false, 1000);
+ uint32_t sent_packets_b = virtual_socket_server()->sent_packets();
+ EXPECT_EQ(sent_packets_a, sent_packets_b);
+}
+
+TEST_P(DataChannelIntegrationTest, DtlsRoleIsSetNormally) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ caller()->CreateDataChannel();
+ ASSERT_FALSE(caller()->pc()->GetSctpTransport());
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ ASSERT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout);
+ ASSERT_TRUE(caller()->pc()->GetSctpTransport());
+ ASSERT_TRUE(
+ caller()->pc()->GetSctpTransport()->Information().dtls_transport());
+ EXPECT_TRUE(caller()
+ ->pc()
+ ->GetSctpTransport()
+ ->Information()
+ .dtls_transport()
+ ->Information()
+ .role());
+ EXPECT_EQ(caller()
+ ->pc()
+ ->GetSctpTransport()
+ ->Information()
+ .dtls_transport()
+ ->Information()
+ .role(),
+ DtlsTransportTlsRole::kServer);
+ EXPECT_EQ(callee()
+ ->pc()
+ ->GetSctpTransport()
+ ->Information()
+ .dtls_transport()
+ ->Information()
+ .role(),
+ DtlsTransportTlsRole::kClient);
+ // ID should be assigned according to the odd/even rule based on role;
+ // client gets even numbers, server gets odd ones. RFC 8832 section 6.
+ // TODO(hta): Test multiple channels.
+ EXPECT_EQ(caller()->data_channel()->id(), 1);
+}
+
+TEST_P(DataChannelIntegrationTest, DtlsRoleIsSetWhenReversed) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ caller()->CreateDataChannel();
+ callee()->SetReceivedSdpMunger(MakeActiveSctpOffer);
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ ASSERT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout);
+ EXPECT_TRUE(caller()
+ ->pc()
+ ->GetSctpTransport()
+ ->Information()
+ .dtls_transport()
+ ->Information()
+ .role());
+ EXPECT_EQ(caller()
+ ->pc()
+ ->GetSctpTransport()
+ ->Information()
+ .dtls_transport()
+ ->Information()
+ .role(),
+ DtlsTransportTlsRole::kClient);
+ EXPECT_EQ(callee()
+ ->pc()
+ ->GetSctpTransport()
+ ->Information()
+ .dtls_transport()
+ ->Information()
+ .role(),
+ DtlsTransportTlsRole::kServer);
+ // ID should be assigned according to the odd/even rule based on role;
+ // client gets even numbers, server gets odd ones. RFC 8832 section 6.
+ // TODO(hta): Test multiple channels.
+ EXPECT_EQ(caller()->data_channel()->id(), 0);
+}
+
+TEST_P(DataChannelIntegrationTest,
+ DtlsRoleIsSetWhenReversedWithChannelCollision) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ caller()->CreateDataChannel();
+
+ callee()->SetReceivedSdpMunger([this](cricket::SessionDescription* desc) {
+ MakeActiveSctpOffer(desc);
+ callee()->CreateDataChannel();
+ });
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ ASSERT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout);
+ ASSERT_EQ_WAIT(callee()->data_channels().size(), 2U, kDefaultTimeout);
+ ASSERT_EQ_WAIT(caller()->data_channels().size(), 2U, kDefaultTimeout);
+ EXPECT_TRUE(caller()
+ ->pc()
+ ->GetSctpTransport()
+ ->Information()
+ .dtls_transport()
+ ->Information()
+ .role());
+ EXPECT_EQ(caller()
+ ->pc()
+ ->GetSctpTransport()
+ ->Information()
+ .dtls_transport()
+ ->Information()
+ .role(),
+ DtlsTransportTlsRole::kClient);
+ EXPECT_EQ(callee()
+ ->pc()
+ ->GetSctpTransport()
+ ->Information()
+ .dtls_transport()
+ ->Information()
+ .role(),
+ DtlsTransportTlsRole::kServer);
+ // ID should be assigned according to the odd/even rule based on role;
+ // client gets even numbers, server gets odd ones. RFC 8832 section 6.
+ ASSERT_EQ(caller()->data_channels().size(), 2U);
+ ASSERT_EQ(callee()->data_channels().size(), 2U);
+ EXPECT_EQ(caller()->data_channels()[0]->id(), 0);
+ EXPECT_EQ(caller()->data_channels()[1]->id(), 1);
+ EXPECT_EQ(callee()->data_channels()[0]->id(), 1);
+ EXPECT_EQ(callee()->data_channels()[1]->id(), 0);
+}
+
+// Test that transport stats are generated by the RTCStatsCollector for a
+// connection that only involves data channels. This is a regression test for
+// crbug.com/826972.
+TEST_P(DataChannelIntegrationTest,
+ TransportStatsReportedForDataChannelOnlyConnection) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ caller()->CreateDataChannel();
+
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ ASSERT_TRUE_WAIT(callee()->data_channel(), kDefaultTimeout);
+
+ auto caller_report = caller()->NewGetStats();
+ EXPECT_EQ(1u, caller_report->GetStatsOfType<RTCTransportStats>().size());
+ auto callee_report = callee()->NewGetStats();
+ EXPECT_EQ(1u, callee_report->GetStatsOfType<RTCTransportStats>().size());
+}
+
+TEST_P(DataChannelIntegrationTest, QueuedPacketsGetDeliveredInReliableMode) {
+ CreatePeerConnectionWrappers();
+ ConnectFakeSignaling();
+ caller()->CreateDataChannel();
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ ASSERT_TRUE_WAIT(callee()->data_channel(), kDefaultTimeout);
+
+ caller()->data_channel()->Send(DataBuffer("hello first"));
+ ASSERT_EQ_WAIT(1u, callee()->data_observer()->received_message_count(),
+ kDefaultTimeout);
+ // Cause a temporary network outage
+ virtual_socket_server()->set_drop_probability(1.0);
+ for (int i = 1; i <= 10; i++) {
+ caller()->data_channel()->Send(DataBuffer("Sent while blocked"));
+ }
+ // Nothing should be delivered during outage. Short wait.
+ EXPECT_EQ_WAIT(1u, callee()->data_observer()->received_message_count(), 10);
+ // Reverse outage
+ virtual_socket_server()->set_drop_probability(0.0);
+ // All packets should be delivered.
+ EXPECT_EQ_WAIT(11u, callee()->data_observer()->received_message_count(),
+ kDefaultTimeout);
+}
+
+TEST_P(DataChannelIntegrationTest, QueuedPacketsGetDroppedInUnreliableMode) {
+ CreatePeerConnectionWrappers();
+ ConnectFakeSignaling();
+ DataChannelInit init;
+ init.maxRetransmits = 0;
+ init.ordered = false;
+ caller()->CreateDataChannel(&init);
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ ASSERT_TRUE_WAIT(callee()->data_channel(), kDefaultTimeout);
+ caller()->data_channel()->Send(DataBuffer("hello first"));
+ ASSERT_EQ_WAIT(1u, callee()->data_observer()->received_message_count(),
+ kDefaultTimeout);
+ // Cause a temporary network outage
+ virtual_socket_server()->set_drop_probability(1.0);
+ // Send a few packets. Note that all get dropped only when all packets
+ // fit into the receiver receive window/congestion window, so that they
+ // actually get sent.
+ for (int i = 1; i <= 10; i++) {
+ caller()->data_channel()->Send(DataBuffer("Sent while blocked"));
+ }
+ // Nothing should be delivered during outage.
+ // We do a short wait to verify that delivery count is still 1.
+ WAIT(false, 10);
+ EXPECT_EQ(1u, callee()->data_observer()->received_message_count());
+ // Reverse the network outage.
+ virtual_socket_server()->set_drop_probability(0.0);
+ // Send a new packet, and wait for it to be delivered.
+ caller()->data_channel()->Send(DataBuffer("After block"));
+ EXPECT_EQ_WAIT("After block", callee()->data_observer()->last_message(),
+ kDefaultTimeout);
+ // Some messages should be lost, but first and last message should have
+ // been delivered.
+ // First, check that the protocol guarantee is preserved.
+ EXPECT_GT(11u, callee()->data_observer()->received_message_count());
+ EXPECT_LE(2u, callee()->data_observer()->received_message_count());
+ // Then, check that observed behavior (lose all messages) has not changed
+ EXPECT_EQ(2u, callee()->data_observer()->received_message_count());
+}
+
+TEST_P(DataChannelIntegrationTest,
+ QueuedPacketsGetDroppedInLifetimeLimitedMode) {
+ CreatePeerConnectionWrappers();
+ ConnectFakeSignaling();
+ DataChannelInit init;
+ init.maxRetransmitTime = 1;
+ init.ordered = false;
+ caller()->CreateDataChannel(&init);
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ ASSERT_TRUE_WAIT(callee()->data_channel(), kDefaultTimeout);
+ caller()->data_channel()->Send(DataBuffer("hello first"));
+ ASSERT_EQ_WAIT(1u, callee()->data_observer()->received_message_count(),
+ kDefaultTimeout);
+ // Cause a temporary network outage
+ virtual_socket_server()->set_drop_probability(1.0);
+ for (int i = 1; i <= 200; i++) {
+ caller()->data_channel()->Send(DataBuffer("Sent while blocked"));
+ }
+ // Nothing should be delivered during outage.
+ // We do a short wait to verify that delivery count is still 1,
+ // and to make sure max packet lifetime (which is in ms) is exceeded.
+ WAIT(false, 10);
+ EXPECT_EQ(1u, callee()->data_observer()->received_message_count());
+ // Reverse the network outage.
+ virtual_socket_server()->set_drop_probability(0.0);
+ // Send a new packet, and wait for it to be delivered.
+ caller()->data_channel()->Send(DataBuffer("After block"));
+ EXPECT_EQ_WAIT("After block", callee()->data_observer()->last_message(),
+ kDefaultTimeout);
+ // Some messages should be lost, but first and last message should have
+ // been delivered.
+ // First, check that the protocol guarantee is preserved.
+ EXPECT_GT(202u, callee()->data_observer()->received_message_count());
+ EXPECT_LE(2u, callee()->data_observer()->received_message_count());
+ // Then, check that observed behavior (lose some messages) has not changed
+ // DcSctp loses all messages. This is correct.
+ EXPECT_EQ(2u, callee()->data_observer()->received_message_count());
+}
+
+TEST_P(DataChannelIntegrationTest,
+ DISABLED_ON_ANDROID(SomeQueuedPacketsGetDroppedInMaxRetransmitsMode)) {
+ CreatePeerConnectionWrappers();
+ ConnectFakeSignaling();
+ DataChannelInit init;
+ init.maxRetransmits = 0;
+ init.ordered = false;
+ caller()->CreateDataChannel(&init);
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ ASSERT_TRUE_WAIT(callee()->data_channel(), kDefaultTimeout);
+ caller()->data_channel()->Send(DataBuffer("hello first"));
+ ASSERT_EQ_WAIT(1u, callee()->data_observer()->received_message_count(),
+ kDefaultTimeout);
+ // Cause a temporary network outage
+ virtual_socket_server()->set_drop_probability(1.0);
+ // Fill the buffer until queued data starts to build
+ size_t packet_counter = 0;
+ while (caller()->data_channel()->buffered_amount() < 1 &&
+ packet_counter < 10000) {
+ packet_counter++;
+ caller()->data_channel()->Send(DataBuffer("Sent while blocked"));
+ }
+ if (caller()->data_channel()->buffered_amount()) {
+ RTC_LOG(LS_INFO) << "Buffered data after " << packet_counter << " packets";
+ } else {
+ RTC_LOG(LS_INFO) << "No buffered data after " << packet_counter
+ << " packets";
+ }
+ // Nothing should be delivered during outage.
+ // We do a short wait to verify that delivery count is still 1.
+ WAIT(false, 10);
+ EXPECT_EQ(1u, callee()->data_observer()->received_message_count());
+ // Reverse the network outage.
+ virtual_socket_server()->set_drop_probability(0.0);
+ // Send a new packet, and wait for it to be delivered.
+ caller()->data_channel()->Send(DataBuffer("After block"));
+ EXPECT_EQ_WAIT("After block", callee()->data_observer()->last_message(),
+ kDefaultTimeout);
+ // Some messages should be lost, but first and last message should have
+ // been delivered.
+ // Due to the fact that retransmissions are only counted when the packet
+ // goes on the wire, NOT when they are stalled in queue due to
+ // congestion, we expect some of the packets to be delivered, because
+ // congestion prevented them from being sent.
+ // Citation: https://tools.ietf.org/html/rfc7496#section-3.1
+
+ // First, check that the protocol guarantee is preserved.
+ EXPECT_GT(packet_counter,
+ callee()->data_observer()->received_message_count());
+ EXPECT_LE(2u, callee()->data_observer()->received_message_count());
+ // Then, check that observed behavior (lose between 100 and 200 messages)
+ // has not changed.
+ // Usrsctp behavior is different on Android (177) and other platforms (122).
+ // Dcsctp loses 432 packets.
+ EXPECT_GT(2 + packet_counter - 100,
+ callee()->data_observer()->received_message_count());
+ EXPECT_LT(2 + packet_counter - 500,
+ callee()->data_observer()->received_message_count());
+}
+
+INSTANTIATE_TEST_SUITE_P(DataChannelIntegrationTest,
+ DataChannelIntegrationTest,
+ Combine(Values(SdpSemantics::kPlanB_DEPRECATED,
+ SdpSemantics::kUnifiedPlan),
+ testing::Bool()));
+
+TEST_F(DataChannelIntegrationTestUnifiedPlan,
+ EndToEndCallWithBundledSctpDataChannel) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ caller()->CreateDataChannel();
+ caller()->AddAudioVideoTracks();
+ callee()->AddAudioVideoTracks();
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ ASSERT_TRUE_WAIT(caller()->pc()->GetSctpTransport(), kDefaultTimeout);
+ ASSERT_EQ_WAIT(SctpTransportState::kConnected,
+ caller()->pc()->GetSctpTransport()->Information().state(),
+ kDefaultTimeout);
+ ASSERT_TRUE_WAIT(callee()->data_channel(), kDefaultTimeout);
+ ASSERT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout);
+}
+
+TEST_F(DataChannelIntegrationTestUnifiedPlan,
+ EndToEndCallWithDataChannelOnlyConnects) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ caller()->CreateDataChannel();
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ ASSERT_TRUE_WAIT(callee()->data_channel(), kDefaultTimeout);
+ ASSERT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout);
+ ASSERT_TRUE(caller()->data_observer()->IsOpen());
+}
+
+TEST_F(DataChannelIntegrationTestUnifiedPlan, DataChannelClosesWhenClosed) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ caller()->CreateDataChannel();
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ ASSERT_TRUE_WAIT(callee()->data_observer(), kDefaultTimeout);
+ ASSERT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout);
+ caller()->data_channel()->Close();
+ ASSERT_TRUE_WAIT(!callee()->data_observer()->IsOpen(), kDefaultTimeout);
+}
+
+TEST_F(DataChannelIntegrationTestUnifiedPlan,
+ DataChannelClosesWhenClosedReverse) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ caller()->CreateDataChannel();
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ ASSERT_TRUE_WAIT(callee()->data_observer(), kDefaultTimeout);
+ ASSERT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout);
+ callee()->data_channel()->Close();
+ ASSERT_TRUE_WAIT(!caller()->data_observer()->IsOpen(), kDefaultTimeout);
+}
+
+TEST_F(DataChannelIntegrationTestUnifiedPlan,
+ DataChannelClosesWhenPeerConnectionClosed) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ caller()->CreateDataChannel();
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ ASSERT_TRUE_WAIT(callee()->data_observer(), kDefaultTimeout);
+ ASSERT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout);
+ caller()->pc()->Close();
+ ASSERT_TRUE_WAIT(!callee()->data_observer()->IsOpen(), kDefaultTimeout);
+}
+
+#endif // WEBRTC_HAVE_SCTP
+
+} // namespace
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/data_channel_unittest.cc b/third_party/libwebrtc/pc/data_channel_unittest.cc
new file mode 100644
index 0000000000..9b84a1be61
--- /dev/null
+++ b/third_party/libwebrtc/pc/data_channel_unittest.cc
@@ -0,0 +1,1152 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdint.h>
+#include <string.h>
+
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "api/data_channel_interface.h"
+#include "api/rtc_error.h"
+#include "api/scoped_refptr.h"
+#include "api/transport/data_channel_transport_interface.h"
+#include "media/base/media_channel.h"
+#include "media/sctp/sctp_transport_internal.h"
+#include "pc/sctp_data_channel.h"
+#include "pc/sctp_utils.h"
+#include "pc/test/fake_data_channel_controller.h"
+#include "rtc_base/copy_on_write_buffer.h"
+#include "rtc_base/gunit.h"
+#include "rtc_base/null_socket_server.h"
+#include "rtc_base/ssl_stream_adapter.h"
+#include "rtc_base/thread.h"
+#include "test/gtest.h"
+#include "test/run_loop.h"
+
+#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
+#include "test/testsupport/rtc_expect_death.h"
+#endif // RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
+
+namespace webrtc {
+
+namespace {
+
+static constexpr int kDefaultTimeout = 10000;
+
+class FakeDataChannelObserver : public DataChannelObserver {
+ public:
+ FakeDataChannelObserver() { RTC_DCHECK(!IsOkToCallOnTheNetworkThread()); }
+
+ void OnStateChange() override { ++on_state_change_count_; }
+
+ void OnBufferedAmountChange(uint64_t previous_amount) override {
+ ++on_buffered_amount_change_count_;
+ }
+
+ void OnMessage(const DataBuffer& buffer) override { ++messages_received_; }
+
+ size_t messages_received() const { return messages_received_; }
+
+ void ResetOnStateChangeCount() { on_state_change_count_ = 0; }
+
+ void ResetOnBufferedAmountChangeCount() {
+ on_buffered_amount_change_count_ = 0;
+ }
+
+ size_t on_state_change_count() const { return on_state_change_count_; }
+
+ size_t on_buffered_amount_change_count() const {
+ return on_buffered_amount_change_count_;
+ }
+
+ private:
+ size_t messages_received_ = 0u;
+ size_t on_state_change_count_ = 0u;
+ size_t on_buffered_amount_change_count_ = 0u;
+};
+
+class SctpDataChannelTest : public ::testing::Test {
+ protected:
+ SctpDataChannelTest()
+ : network_thread_(std::make_unique<rtc::NullSocketServer>()),
+ controller_(new FakeDataChannelController(&network_thread_)) {
+ network_thread_.Start();
+ inner_channel_ = controller_->CreateDataChannel("test", init_);
+ channel_ =
+ webrtc::SctpDataChannel::CreateProxy(inner_channel_, signaling_safety_);
+ }
+ ~SctpDataChannelTest() override {
+ run_loop_.Flush();
+ signaling_safety_->SetNotAlive();
+ inner_channel_ = nullptr;
+ channel_ = nullptr;
+ controller_.reset();
+ observer_.reset();
+ network_thread_.Stop();
+ }
+
+ void SetChannelReady() {
+ controller_->set_transport_available(true);
+ StreamId sid(0);
+ network_thread_.BlockingCall([&]() {
+ RTC_DCHECK_RUN_ON(&network_thread_);
+ if (!inner_channel_->sid_n().HasValue()) {
+ inner_channel_->SetSctpSid_n(sid);
+ controller_->AddSctpDataStream(sid);
+ }
+ inner_channel_->OnTransportChannelCreated();
+ });
+ controller_->set_ready_to_send(true);
+ run_loop_.Flush();
+ }
+
+ // TODO(bugs.webrtc.org/11547): This mirrors what the DataChannelController
+ // currently does when assigning stream ids to a channel. Right now the sid
+ // in the SctpDataChannel code is (still) tied to the signaling thread, but
+ // the `AddSctpDataStream` operation is a bridge to the transport and needs
+ // to run on the network thread.
+ void SetChannelSid(const rtc::scoped_refptr<SctpDataChannel>& channel,
+ StreamId sid) {
+ RTC_DCHECK(sid.HasValue());
+ network_thread_.BlockingCall([&]() {
+ channel->SetSctpSid_n(sid);
+ controller_->AddSctpDataStream(sid);
+ });
+ }
+
+ void AddObserver() {
+ observer_.reset(new FakeDataChannelObserver());
+ channel_->RegisterObserver(observer_.get());
+ }
+
+ // Wait for queued up methods to run on the network thread.
+ void FlushNetworkThread() {
+ RTC_DCHECK_RUN_ON(run_loop_.task_queue());
+ network_thread_.BlockingCall([] {});
+ }
+
+ // Used to complete pending methods on the network thread
+ // that might queue up methods on the signaling (main) thread
+ // that are run too.
+ void FlushNetworkThreadAndPendingOperations() {
+ FlushNetworkThread();
+ run_loop_.Flush();
+ }
+
+ test::RunLoop run_loop_;
+ rtc::Thread network_thread_;
+ InternalDataChannelInit init_;
+ rtc::scoped_refptr<PendingTaskSafetyFlag> signaling_safety_ =
+ PendingTaskSafetyFlag::Create();
+ std::unique_ptr<FakeDataChannelController> controller_;
+ std::unique_ptr<FakeDataChannelObserver> observer_;
+ rtc::scoped_refptr<SctpDataChannel> inner_channel_;
+ rtc::scoped_refptr<DataChannelInterface> channel_;
+};
+
+TEST_F(SctpDataChannelTest, VerifyConfigurationGetters) {
+ EXPECT_EQ(channel_->label(), "test");
+ EXPECT_EQ(channel_->protocol(), init_.protocol);
+
+ // Note that the `init_.reliable` field is deprecated, so we directly set
+ // it here to match spec behavior for purposes of checking the `reliable()`
+ // getter.
+ init_.reliable = (!init_.maxRetransmits && !init_.maxRetransmitTime);
+ EXPECT_EQ(channel_->reliable(), init_.reliable);
+ EXPECT_EQ(channel_->ordered(), init_.ordered);
+ EXPECT_EQ(channel_->negotiated(), init_.negotiated);
+ EXPECT_EQ(channel_->priority(), Priority::kLow);
+ EXPECT_EQ(channel_->maxRetransmitTime(), static_cast<uint16_t>(-1));
+ EXPECT_EQ(channel_->maxPacketLifeTime(), init_.maxRetransmitTime);
+ EXPECT_EQ(channel_->maxRetransmits(), static_cast<uint16_t>(-1));
+ EXPECT_EQ(channel_->maxRetransmitsOpt(), init_.maxRetransmits);
+
+ // Check the non-const part of the configuration.
+ EXPECT_EQ(channel_->id(), init_.id);
+ network_thread_.BlockingCall(
+ [&]() { EXPECT_EQ(inner_channel_->sid_n(), StreamId()); });
+
+ SetChannelReady();
+ EXPECT_EQ(channel_->id(), 0);
+ network_thread_.BlockingCall(
+ [&]() { EXPECT_EQ(inner_channel_->sid_n(), StreamId(0)); });
+}
+
+// Verifies that the data channel is connected to the transport after creation.
+TEST_F(SctpDataChannelTest, ConnectedToTransportOnCreated) {
+ controller_->set_transport_available(true);
+ rtc::scoped_refptr<SctpDataChannel> dc =
+ controller_->CreateDataChannel("test1", init_);
+ EXPECT_TRUE(controller_->IsConnected(dc.get()));
+
+ // The sid is not set yet, so it should not have added the streams.
+ StreamId sid = network_thread_.BlockingCall([&]() { return dc->sid_n(); });
+ EXPECT_FALSE(controller_->IsStreamAdded(sid));
+
+ SetChannelSid(dc, StreamId(0));
+ sid = network_thread_.BlockingCall([&]() { return dc->sid_n(); });
+ EXPECT_TRUE(controller_->IsStreamAdded(sid));
+}
+
+// Tests the state of the data channel.
+TEST_F(SctpDataChannelTest, StateTransition) {
+ AddObserver();
+
+ EXPECT_EQ(DataChannelInterface::kConnecting, channel_->state());
+ EXPECT_EQ(observer_->on_state_change_count(), 0u);
+ SetChannelReady();
+
+ EXPECT_EQ(DataChannelInterface::kOpen, channel_->state());
+ EXPECT_EQ(observer_->on_state_change_count(), 1u);
+
+ // `Close()` should trigger two state changes, first `kClosing`, then
+ // `kClose`.
+ channel_->Close();
+ // The (simulated) transport close notifications runs on the network thread
+ // and posts a completion notification to the signaling (current) thread.
+ // Allow that operation to complete before checking the state.
+ run_loop_.Flush();
+ EXPECT_EQ(DataChannelInterface::kClosed, channel_->state());
+ EXPECT_EQ(observer_->on_state_change_count(), 3u);
+ EXPECT_TRUE(channel_->error().ok());
+ // Verifies that it's disconnected from the transport.
+ EXPECT_FALSE(controller_->IsConnected(inner_channel_.get()));
+}
+
+// Tests that DataChannel::buffered_amount() is correct after the channel is
+// blocked.
+TEST_F(SctpDataChannelTest, BufferedAmountWhenBlocked) {
+ AddObserver();
+ SetChannelReady();
+ DataBuffer buffer("abcd");
+ size_t successful_sends = 0;
+ auto send_complete = [&](RTCError err) {
+ EXPECT_TRUE(err.ok());
+ ++successful_sends;
+ };
+ channel_->SendAsync(buffer, send_complete);
+ FlushNetworkThreadAndPendingOperations();
+ EXPECT_EQ(channel_->buffered_amount(), 0u);
+ size_t successful_send_count = 1;
+ EXPECT_EQ(successful_send_count, successful_sends);
+ EXPECT_EQ(successful_send_count,
+ observer_->on_buffered_amount_change_count());
+
+ controller_->set_send_blocked(true);
+ const int number_of_packets = 3;
+ for (int i = 0; i < number_of_packets; ++i) {
+ channel_->SendAsync(buffer, send_complete);
+ ++successful_send_count;
+ }
+ FlushNetworkThreadAndPendingOperations();
+ EXPECT_EQ(buffer.data.size() * number_of_packets,
+ channel_->buffered_amount());
+ EXPECT_EQ(successful_send_count, successful_sends);
+
+ // An event should not have been fired for buffered amount.
+ EXPECT_EQ(1u, observer_->on_buffered_amount_change_count());
+
+ // Now buffered amount events should get fired and the value
+ // get down to 0u.
+ controller_->set_send_blocked(false);
+ run_loop_.Flush();
+ EXPECT_EQ(channel_->buffered_amount(), 0u);
+ EXPECT_EQ(successful_send_count, successful_sends);
+ EXPECT_EQ(successful_send_count,
+ observer_->on_buffered_amount_change_count());
+}
+
+// TODO(tommi): This test uses `Send()`. Remove once fully deprecated.
+TEST_F(SctpDataChannelTest, DeprecatedBufferedAmountWhenBlocked) {
+ AddObserver();
+ SetChannelReady();
+ DataBuffer buffer("abcd");
+ EXPECT_TRUE(channel_->Send(buffer));
+ size_t successful_send_count = 1;
+
+ run_loop_.Flush();
+ EXPECT_EQ(0U, channel_->buffered_amount());
+ EXPECT_EQ(successful_send_count,
+ observer_->on_buffered_amount_change_count());
+
+ controller_->set_send_blocked(true);
+
+ const int number_of_packets = 3;
+ for (int i = 0; i < number_of_packets; ++i) {
+ EXPECT_TRUE(channel_->Send(buffer));
+ }
+ EXPECT_EQ(buffer.data.size() * number_of_packets,
+ channel_->buffered_amount());
+ EXPECT_EQ(successful_send_count,
+ observer_->on_buffered_amount_change_count());
+
+ controller_->set_send_blocked(false);
+ run_loop_.Flush();
+ successful_send_count += number_of_packets;
+ EXPECT_EQ(channel_->buffered_amount(), 0u);
+ EXPECT_EQ(successful_send_count,
+ observer_->on_buffered_amount_change_count());
+}
+
+// Tests that the queued data are sent when the channel transitions from blocked
+// to unblocked.
+TEST_F(SctpDataChannelTest, QueuedDataSentWhenUnblocked) {
+ AddObserver();
+ SetChannelReady();
+ DataBuffer buffer("abcd");
+ controller_->set_send_blocked(true);
+ size_t successful_send = 0u;
+ auto send_complete = [&](RTCError err) {
+ EXPECT_TRUE(err.ok());
+ ++successful_send;
+ };
+ channel_->SendAsync(buffer, send_complete);
+ FlushNetworkThreadAndPendingOperations();
+ EXPECT_EQ(1U, successful_send);
+ EXPECT_EQ(0U, observer_->on_buffered_amount_change_count());
+
+ controller_->set_send_blocked(false);
+ SetChannelReady();
+ EXPECT_EQ(channel_->buffered_amount(), 0u);
+ EXPECT_EQ(observer_->on_buffered_amount_change_count(), 1u);
+}
+
+// TODO(tommi): This test uses `Send()`. Remove once fully deprecated.
+TEST_F(SctpDataChannelTest, DeprecatedQueuedDataSentWhenUnblocked) {
+ AddObserver();
+ SetChannelReady();
+ DataBuffer buffer("abcd");
+ controller_->set_send_blocked(true);
+ EXPECT_TRUE(channel_->Send(buffer));
+
+ EXPECT_EQ(0U, observer_->on_buffered_amount_change_count());
+
+ controller_->set_send_blocked(false);
+ SetChannelReady();
+ EXPECT_EQ(0U, channel_->buffered_amount());
+ EXPECT_EQ(1U, observer_->on_buffered_amount_change_count());
+}
+
+// Tests that no crash when the channel is blocked right away while trying to
+// send queued data.
+TEST_F(SctpDataChannelTest, BlockedWhenSendQueuedDataNoCrash) {
+ AddObserver();
+ SetChannelReady();
+ DataBuffer buffer("abcd");
+ controller_->set_send_blocked(true);
+ size_t successful_send = 0u;
+ auto send_complete = [&](RTCError err) {
+ EXPECT_TRUE(err.ok());
+ ++successful_send;
+ };
+ channel_->SendAsync(buffer, send_complete);
+ FlushNetworkThreadAndPendingOperations();
+ EXPECT_EQ(1U, successful_send);
+ EXPECT_EQ(0U, observer_->on_buffered_amount_change_count());
+
+ // Set channel ready while it is still blocked.
+ SetChannelReady();
+ EXPECT_EQ(buffer.size(), channel_->buffered_amount());
+ EXPECT_EQ(0U, observer_->on_buffered_amount_change_count());
+
+ // Unblock the channel to send queued data again, there should be no crash.
+ controller_->set_send_blocked(false);
+ SetChannelReady();
+ EXPECT_EQ(0U, channel_->buffered_amount());
+ EXPECT_EQ(1U, observer_->on_buffered_amount_change_count());
+}
+
+// TODO(tommi): This test uses `Send()`. Remove once fully deprecated.
+TEST_F(SctpDataChannelTest, DeprecatedBlockedWhenSendQueuedDataNoCrash) {
+ AddObserver();
+ SetChannelReady();
+ DataBuffer buffer("abcd");
+ controller_->set_send_blocked(true);
+ EXPECT_TRUE(channel_->Send(buffer));
+ EXPECT_EQ(0U, observer_->on_buffered_amount_change_count());
+
+ // Set channel ready while it is still blocked.
+ SetChannelReady();
+ EXPECT_EQ(buffer.size(), channel_->buffered_amount());
+ EXPECT_EQ(0U, observer_->on_buffered_amount_change_count());
+
+ // Unblock the channel to send queued data again, there should be no crash.
+ controller_->set_send_blocked(false);
+ SetChannelReady();
+ EXPECT_EQ(0U, channel_->buffered_amount());
+ EXPECT_EQ(1U, observer_->on_buffered_amount_change_count());
+}
+
+// Tests that DataChannel::messages_sent() and DataChannel::bytes_sent() are
+// correct, sending data both while unblocked and while blocked.
+TEST_F(SctpDataChannelTest, VerifyMessagesAndBytesSent) {
+ AddObserver();
+ SetChannelReady();
+ std::vector<DataBuffer> buffers({
+ DataBuffer("message 1"),
+ DataBuffer("msg 2"),
+ DataBuffer("message three"),
+ DataBuffer("quadra message"),
+ DataBuffer("fifthmsg"),
+ DataBuffer("message of the beast"),
+ });
+
+ // Default values.
+ EXPECT_EQ(0U, channel_->messages_sent());
+ EXPECT_EQ(0U, channel_->bytes_sent());
+
+ // Send three buffers while not blocked.
+ controller_->set_send_blocked(false);
+ for (int i : {0, 1, 2}) {
+ channel_->SendAsync(buffers[i], nullptr);
+ }
+ FlushNetworkThreadAndPendingOperations();
+
+ size_t bytes_sent = buffers[0].size() + buffers[1].size() + buffers[2].size();
+ EXPECT_EQ_WAIT(0U, channel_->buffered_amount(), kDefaultTimeout);
+ EXPECT_EQ(3U, channel_->messages_sent());
+ EXPECT_EQ(bytes_sent, channel_->bytes_sent());
+
+ // Send three buffers while blocked, queuing the buffers.
+ controller_->set_send_blocked(true);
+ for (int i : {3, 4, 5}) {
+ channel_->SendAsync(buffers[i], nullptr);
+ }
+ FlushNetworkThreadAndPendingOperations();
+ size_t bytes_queued =
+ buffers[3].size() + buffers[4].size() + buffers[5].size();
+ EXPECT_EQ(bytes_queued, channel_->buffered_amount());
+ EXPECT_EQ(3U, channel_->messages_sent());
+ EXPECT_EQ(bytes_sent, channel_->bytes_sent());
+
+ // Unblock and make sure everything was sent.
+ controller_->set_send_blocked(false);
+ EXPECT_EQ_WAIT(0U, channel_->buffered_amount(), kDefaultTimeout);
+ bytes_sent += bytes_queued;
+ EXPECT_EQ(6U, channel_->messages_sent());
+ EXPECT_EQ(bytes_sent, channel_->bytes_sent());
+}
+
+// TODO(tommi): This test uses `Send()`. Remove once fully deprecated.
+TEST_F(SctpDataChannelTest, DeprecatedVerifyMessagesAndBytesSent) {
+ AddObserver();
+ SetChannelReady();
+ std::vector<DataBuffer> buffers({
+ DataBuffer("message 1"),
+ DataBuffer("msg 2"),
+ DataBuffer("message three"),
+ DataBuffer("quadra message"),
+ DataBuffer("fifthmsg"),
+ DataBuffer("message of the beast"),
+ });
+
+ // Default values.
+ EXPECT_EQ(0U, channel_->messages_sent());
+ EXPECT_EQ(0U, channel_->bytes_sent());
+
+ // Send three buffers while not blocked.
+ controller_->set_send_blocked(false);
+ EXPECT_TRUE(channel_->Send(buffers[0]));
+ EXPECT_TRUE(channel_->Send(buffers[1]));
+ EXPECT_TRUE(channel_->Send(buffers[2]));
+ size_t bytes_sent = buffers[0].size() + buffers[1].size() + buffers[2].size();
+ EXPECT_EQ_WAIT(0U, channel_->buffered_amount(), kDefaultTimeout);
+ EXPECT_EQ(3U, channel_->messages_sent());
+ EXPECT_EQ(bytes_sent, channel_->bytes_sent());
+
+ // Send three buffers while blocked, queuing the buffers.
+ controller_->set_send_blocked(true);
+ EXPECT_TRUE(channel_->Send(buffers[3]));
+ EXPECT_TRUE(channel_->Send(buffers[4]));
+ EXPECT_TRUE(channel_->Send(buffers[5]));
+ size_t bytes_queued =
+ buffers[3].size() + buffers[4].size() + buffers[5].size();
+ EXPECT_EQ(bytes_queued, channel_->buffered_amount());
+ EXPECT_EQ(3U, channel_->messages_sent());
+ EXPECT_EQ(bytes_sent, channel_->bytes_sent());
+
+ // Unblock and make sure everything was sent.
+ controller_->set_send_blocked(false);
+ EXPECT_EQ_WAIT(0U, channel_->buffered_amount(), kDefaultTimeout);
+ bytes_sent += bytes_queued;
+ EXPECT_EQ(6U, channel_->messages_sent());
+ EXPECT_EQ(bytes_sent, channel_->bytes_sent());
+}
+
+// Tests that the queued control message is sent when channel is ready.
+TEST_F(SctpDataChannelTest, OpenMessageSent) {
+ // Initially the id is unassigned.
+ EXPECT_EQ(-1, channel_->id());
+
+ SetChannelReady();
+ EXPECT_GE(channel_->id(), 0);
+ EXPECT_EQ(DataMessageType::kControl,
+ controller_->last_send_data_params().type);
+ EXPECT_EQ(controller_->last_sid(), channel_->id());
+}
+
+TEST_F(SctpDataChannelTest, QueuedOpenMessageSent) {
+ controller_->set_send_blocked(true);
+ SetChannelReady();
+ controller_->set_send_blocked(false);
+
+ EXPECT_EQ(DataMessageType::kControl,
+ controller_->last_send_data_params().type);
+ EXPECT_EQ(controller_->last_sid(), channel_->id());
+}
+
+// Tests that the DataChannel created after transport gets ready can enter OPEN
+// state.
+TEST_F(SctpDataChannelTest, LateCreatedChannelTransitionToOpen) {
+ SetChannelReady();
+ InternalDataChannelInit init;
+ init.id = 1;
+ auto dc = webrtc::SctpDataChannel::CreateProxy(
+ controller_->CreateDataChannel("test1", init), signaling_safety_);
+ EXPECT_EQ(DataChannelInterface::kOpen, dc->state());
+}
+
+// Tests that an unordered DataChannel sends data as ordered until the OPEN_ACK
+// message is received.
+TEST_F(SctpDataChannelTest, SendUnorderedAfterReceivesOpenAck) {
+ SetChannelReady();
+ InternalDataChannelInit init;
+ init.id = 1;
+ init.ordered = false;
+ rtc::scoped_refptr<SctpDataChannel> dc =
+ controller_->CreateDataChannel("test1", init);
+ auto proxy = webrtc::SctpDataChannel::CreateProxy(dc, signaling_safety_);
+
+ EXPECT_EQ_WAIT(DataChannelInterface::kOpen, proxy->state(), 1000);
+
+ // Sends a message and verifies it's ordered.
+ DataBuffer buffer("some data");
+ proxy->SendAsync(buffer, nullptr);
+ EXPECT_TRUE(controller_->last_send_data_params().ordered);
+
+ // Emulates receiving an OPEN_ACK message.
+ rtc::CopyOnWriteBuffer payload;
+ WriteDataChannelOpenAckMessage(&payload);
+ network_thread_.BlockingCall(
+ [&] { dc->OnDataReceived(DataMessageType::kControl, payload); });
+
+ // Sends another message and verifies it's unordered.
+ proxy->SendAsync(buffer, nullptr);
+ FlushNetworkThreadAndPendingOperations();
+ EXPECT_FALSE(controller_->last_send_data_params().ordered);
+}
+
+// TODO(tommi): This test uses `Send()`. Remove once fully deprecated.
+TEST_F(SctpDataChannelTest, DeprecatedSendUnorderedAfterReceivesOpenAck) {
+ SetChannelReady();
+ InternalDataChannelInit init;
+ init.id = 1;
+ init.ordered = false;
+ rtc::scoped_refptr<SctpDataChannel> dc =
+ controller_->CreateDataChannel("test1", init);
+ auto proxy = webrtc::SctpDataChannel::CreateProxy(dc, signaling_safety_);
+
+ EXPECT_EQ_WAIT(DataChannelInterface::kOpen, proxy->state(), 1000);
+
+ // Sends a message and verifies it's ordered.
+ DataBuffer buffer("some data");
+ ASSERT_TRUE(proxy->Send(buffer));
+ EXPECT_TRUE(controller_->last_send_data_params().ordered);
+
+ // Emulates receiving an OPEN_ACK message.
+ rtc::CopyOnWriteBuffer payload;
+ WriteDataChannelOpenAckMessage(&payload);
+ network_thread_.BlockingCall(
+ [&] { dc->OnDataReceived(DataMessageType::kControl, payload); });
+
+ // Sends another message and verifies it's unordered.
+ ASSERT_TRUE(proxy->Send(buffer));
+ EXPECT_FALSE(controller_->last_send_data_params().ordered);
+}
+
+// Tests that an unordered DataChannel sends unordered data after any DATA
+// message is received.
+TEST_F(SctpDataChannelTest, SendUnorderedAfterReceiveData) {
+ SetChannelReady();
+ InternalDataChannelInit init;
+ init.id = 1;
+ init.ordered = false;
+ rtc::scoped_refptr<SctpDataChannel> dc =
+ controller_->CreateDataChannel("test1", init);
+ auto proxy = webrtc::SctpDataChannel::CreateProxy(dc, signaling_safety_);
+
+ EXPECT_EQ_WAIT(DataChannelInterface::kOpen, proxy->state(), 1000);
+
+ // Emulates receiving a DATA message.
+ DataBuffer buffer("data");
+ network_thread_.BlockingCall(
+ [&] { dc->OnDataReceived(DataMessageType::kText, buffer.data); });
+
+ // Sends a message and verifies it's unordered.
+ proxy->SendAsync(buffer, nullptr);
+ FlushNetworkThreadAndPendingOperations();
+ EXPECT_FALSE(controller_->last_send_data_params().ordered);
+}
+
+// TODO(tommi): This test uses `Send()`. Remove once fully deprecated.
+TEST_F(SctpDataChannelTest, DeprecatedSendUnorderedAfterReceiveData) {
+ SetChannelReady();
+ InternalDataChannelInit init;
+ init.id = 1;
+ init.ordered = false;
+ rtc::scoped_refptr<SctpDataChannel> dc =
+ controller_->CreateDataChannel("test1", init);
+ auto proxy = webrtc::SctpDataChannel::CreateProxy(dc, signaling_safety_);
+
+ EXPECT_EQ_WAIT(DataChannelInterface::kOpen, proxy->state(), 1000);
+
+ // Emulates receiving a DATA message.
+ DataBuffer buffer("data");
+ network_thread_.BlockingCall(
+ [&] { dc->OnDataReceived(DataMessageType::kText, buffer.data); });
+
+ // Sends a message and verifies it's unordered.
+ ASSERT_TRUE(proxy->Send(buffer));
+ EXPECT_FALSE(controller_->last_send_data_params().ordered);
+}
+
+// Tests that the channel can't open until it's successfully sent the OPEN
+// message.
+TEST_F(SctpDataChannelTest, OpenWaitsForOpenMesssage) {
+ DataBuffer buffer("foo");
+
+ controller_->set_send_blocked(true);
+ SetChannelReady();
+ EXPECT_EQ(DataChannelInterface::kConnecting, channel_->state());
+ controller_->set_send_blocked(false);
+ EXPECT_EQ_WAIT(DataChannelInterface::kOpen, channel_->state(), 1000);
+ EXPECT_EQ(DataMessageType::kControl,
+ controller_->last_send_data_params().type);
+}
+
+// Tests that close first makes sure all queued data gets sent.
+TEST_F(SctpDataChannelTest, QueuedCloseFlushes) {
+ DataBuffer buffer("foo");
+
+ controller_->set_send_blocked(true);
+ SetChannelReady();
+ EXPECT_EQ(DataChannelInterface::kConnecting, channel_->state());
+ controller_->set_send_blocked(false);
+ EXPECT_EQ_WAIT(DataChannelInterface::kOpen, channel_->state(), 1000);
+ controller_->set_send_blocked(true);
+ channel_->SendAsync(buffer, nullptr);
+ channel_->Close();
+ controller_->set_send_blocked(false);
+ EXPECT_EQ_WAIT(DataChannelInterface::kClosed, channel_->state(), 1000);
+ EXPECT_TRUE(channel_->error().ok());
+ EXPECT_EQ(DataMessageType::kText, controller_->last_send_data_params().type);
+}
+
+// TODO(tommi): This test uses `Send()`. Remove once fully deprecated.
+TEST_F(SctpDataChannelTest, DeprecatedQueuedCloseFlushes) {
+ DataBuffer buffer("foo");
+
+ controller_->set_send_blocked(true);
+ SetChannelReady();
+ EXPECT_EQ(DataChannelInterface::kConnecting, channel_->state());
+ controller_->set_send_blocked(false);
+ EXPECT_EQ_WAIT(DataChannelInterface::kOpen, channel_->state(), 1000);
+ controller_->set_send_blocked(true);
+ channel_->Send(buffer);
+ channel_->Close();
+ controller_->set_send_blocked(false);
+ EXPECT_EQ_WAIT(DataChannelInterface::kClosed, channel_->state(), 1000);
+ EXPECT_TRUE(channel_->error().ok());
+ EXPECT_EQ(DataMessageType::kText, controller_->last_send_data_params().type);
+}
+
+// Tests that messages are sent with the right id.
+TEST_F(SctpDataChannelTest, SendDataId) {
+ SetChannelSid(inner_channel_, StreamId(1));
+ SetChannelReady();
+ DataBuffer buffer("data");
+ channel_->SendAsync(buffer, nullptr);
+ FlushNetworkThreadAndPendingOperations();
+ EXPECT_EQ(1, controller_->last_sid());
+}
+
+// TODO(tommi): This test uses `Send()`. Remove once fully deprecated.
+TEST_F(SctpDataChannelTest, DeprecatedSendDataId) {
+ SetChannelSid(inner_channel_, StreamId(1));
+ SetChannelReady();
+ DataBuffer buffer("data");
+ EXPECT_TRUE(channel_->Send(buffer));
+ EXPECT_EQ(1, controller_->last_sid());
+}
+
+// Tests that the incoming messages with right ids are accepted.
+TEST_F(SctpDataChannelTest, ReceiveDataWithValidId) {
+ SetChannelSid(inner_channel_, StreamId(1));
+ SetChannelReady();
+
+ AddObserver();
+
+ DataBuffer buffer("abcd");
+ network_thread_.BlockingCall([&] {
+ inner_channel_->OnDataReceived(DataMessageType::kText, buffer.data);
+ });
+ run_loop_.Flush();
+ EXPECT_EQ(1U, observer_->messages_received());
+}
+
+// Tests that no CONTROL message is sent if the datachannel is negotiated and
+// not created from an OPEN message.
+TEST_F(SctpDataChannelTest, NoMsgSentIfNegotiatedAndNotFromOpenMsg) {
+ InternalDataChannelInit config;
+ config.id = 1;
+ config.negotiated = true;
+ config.open_handshake_role = InternalDataChannelInit::kNone;
+
+ SetChannelReady();
+ rtc::scoped_refptr<SctpDataChannel> dc =
+ controller_->CreateDataChannel("test1", config);
+ auto proxy = webrtc::SctpDataChannel::CreateProxy(dc, signaling_safety_);
+
+ EXPECT_EQ_WAIT(DataChannelInterface::kOpen, proxy->state(), 1000);
+ EXPECT_EQ(0, controller_->last_sid());
+}
+
+// Tests that DataChannel::messages_received() and DataChannel::bytes_received()
+// are correct, receiving data both while not open and while open.
+TEST_F(SctpDataChannelTest, VerifyMessagesAndBytesReceived) {
+ AddObserver();
+ std::vector<DataBuffer> buffers({
+ DataBuffer("message 1"),
+ DataBuffer("msg 2"),
+ DataBuffer("message three"),
+ DataBuffer("quadra message"),
+ DataBuffer("fifthmsg"),
+ DataBuffer("message of the beast"),
+ });
+
+ SetChannelSid(inner_channel_, StreamId(1));
+
+ // Default values.
+ EXPECT_EQ(0U, channel_->messages_received());
+ EXPECT_EQ(0U, channel_->bytes_received());
+
+ // Receive three buffers while data channel isn't open.
+ network_thread_.BlockingCall([&] {
+ for (int i : {0, 1, 2})
+ inner_channel_->OnDataReceived(DataMessageType::kText, buffers[i].data);
+ });
+ EXPECT_EQ(0U, observer_->messages_received());
+ EXPECT_EQ(0U, channel_->messages_received());
+ EXPECT_EQ(0U, channel_->bytes_received());
+
+ // Open channel and make sure everything was received.
+ SetChannelReady();
+ size_t bytes_received =
+ buffers[0].size() + buffers[1].size() + buffers[2].size();
+ EXPECT_EQ(3U, observer_->messages_received());
+ EXPECT_EQ(3U, channel_->messages_received());
+ EXPECT_EQ(bytes_received, channel_->bytes_received());
+
+ // Receive three buffers while open.
+ network_thread_.BlockingCall([&] {
+ for (int i : {3, 4, 5})
+ inner_channel_->OnDataReceived(DataMessageType::kText, buffers[i].data);
+ });
+ run_loop_.Flush();
+ bytes_received += buffers[3].size() + buffers[4].size() + buffers[5].size();
+ EXPECT_EQ(6U, observer_->messages_received());
+ EXPECT_EQ(6U, channel_->messages_received());
+ EXPECT_EQ(bytes_received, channel_->bytes_received());
+}
+
+// Tests that OPEN_ACK message is sent if the datachannel is created from an
+// OPEN message.
+TEST_F(SctpDataChannelTest, OpenAckSentIfCreatedFromOpenMessage) {
+ InternalDataChannelInit config;
+ config.id = 1;
+ config.negotiated = true;
+ config.open_handshake_role = InternalDataChannelInit::kAcker;
+
+ SetChannelReady();
+ rtc::scoped_refptr<SctpDataChannel> dc =
+ controller_->CreateDataChannel("test1", config);
+ auto proxy = webrtc::SctpDataChannel::CreateProxy(dc, signaling_safety_);
+
+ EXPECT_EQ_WAIT(DataChannelInterface::kOpen, proxy->state(), 1000);
+
+ EXPECT_EQ(config.id, controller_->last_sid());
+ EXPECT_EQ(DataMessageType::kControl,
+ controller_->last_send_data_params().type);
+}
+
+// Tests the OPEN_ACK role assigned by InternalDataChannelInit.
+TEST_F(SctpDataChannelTest, OpenAckRoleInitialization) {
+ InternalDataChannelInit init;
+ EXPECT_EQ(InternalDataChannelInit::kOpener, init.open_handshake_role);
+ EXPECT_FALSE(init.negotiated);
+
+ DataChannelInit base;
+ base.negotiated = true;
+ InternalDataChannelInit init2(base);
+ EXPECT_EQ(InternalDataChannelInit::kNone, init2.open_handshake_role);
+}
+
+// Tests that that Send() returns false if the sending buffer is full
+// and the channel stays open.
+TEST_F(SctpDataChannelTest, OpenWhenSendBufferFull) {
+ AddObserver();
+ SetChannelReady();
+
+ const size_t packetSize = 1024;
+
+ rtc::CopyOnWriteBuffer buffer(packetSize);
+ memset(buffer.MutableData(), 0, buffer.size());
+
+ DataBuffer packet(buffer, true);
+ controller_->set_send_blocked(true);
+ size_t successful_send = 0u, failed_send = 0u;
+ auto send_complete = [&](RTCError err) {
+ err.ok() ? ++successful_send : ++failed_send;
+ };
+
+ size_t count = DataChannelInterface::MaxSendQueueSize() / packetSize;
+ for (size_t i = 0; i < count; ++i) {
+ channel_->SendAsync(packet, send_complete);
+ }
+
+ // The sending buffer should be full, `Send()` returns false.
+ channel_->SendAsync(packet, std::move(send_complete));
+ FlushNetworkThreadAndPendingOperations();
+ EXPECT_TRUE(DataChannelInterface::kOpen == channel_->state());
+ EXPECT_EQ(successful_send, count);
+ EXPECT_EQ(failed_send, 1u);
+}
+
+// TODO(tommi): This test uses `Send()`. Remove once fully deprecated.
+TEST_F(SctpDataChannelTest, DeprecatedOpenWhenSendBufferFull) {
+ SetChannelReady();
+
+ const size_t packetSize = 1024;
+
+ rtc::CopyOnWriteBuffer buffer(packetSize);
+ memset(buffer.MutableData(), 0, buffer.size());
+
+ DataBuffer packet(buffer, true);
+ controller_->set_send_blocked(true);
+
+ for (size_t i = 0; i < DataChannelInterface::MaxSendQueueSize() / packetSize;
+ ++i) {
+ EXPECT_TRUE(channel_->Send(packet));
+ }
+
+ // The sending buffer should be full, `Send()` returns false.
+ EXPECT_FALSE(channel_->Send(packet));
+ EXPECT_TRUE(DataChannelInterface::kOpen == channel_->state());
+}
+
+// Tests that the DataChannel is closed on transport errors.
+TEST_F(SctpDataChannelTest, ClosedOnTransportError) {
+ SetChannelReady();
+ DataBuffer buffer("abcd");
+ controller_->set_transport_error();
+
+ channel_->SendAsync(buffer, nullptr);
+
+ EXPECT_EQ(DataChannelInterface::kClosed, channel_->state());
+ EXPECT_FALSE(channel_->error().ok());
+ EXPECT_EQ(RTCErrorType::NETWORK_ERROR, channel_->error().type());
+ EXPECT_EQ(RTCErrorDetailType::NONE, channel_->error().error_detail());
+}
+
+// TODO(tommi): This test uses `Send()`. Remove once fully deprecated.
+TEST_F(SctpDataChannelTest, DeprecatedClosedOnTransportError) {
+ SetChannelReady();
+ DataBuffer buffer("abcd");
+ controller_->set_transport_error();
+
+ EXPECT_TRUE(channel_->Send(buffer));
+
+ EXPECT_EQ(DataChannelInterface::kClosed, channel_->state());
+ EXPECT_FALSE(channel_->error().ok());
+ EXPECT_EQ(RTCErrorType::NETWORK_ERROR, channel_->error().type());
+ EXPECT_EQ(RTCErrorDetailType::NONE, channel_->error().error_detail());
+}
+
+// Tests that the DataChannel is closed if the received buffer is full.
+TEST_F(SctpDataChannelTest, ClosedWhenReceivedBufferFull) {
+ SetChannelReady();
+ rtc::CopyOnWriteBuffer buffer(1024);
+ memset(buffer.MutableData(), 0, buffer.size());
+
+ network_thread_.BlockingCall([&] {
+ // Receiving data without having an observer will overflow the buffer.
+ for (size_t i = 0; i < 16 * 1024 + 1; ++i) {
+ inner_channel_->OnDataReceived(DataMessageType::kText, buffer);
+ }
+ });
+ EXPECT_EQ(DataChannelInterface::kClosed, channel_->state());
+ EXPECT_FALSE(channel_->error().ok());
+ EXPECT_EQ(RTCErrorType::RESOURCE_EXHAUSTED, channel_->error().type());
+ EXPECT_EQ(RTCErrorDetailType::NONE, channel_->error().error_detail());
+}
+
+// Tests that sending empty data returns no error and keeps the channel open.
+TEST_F(SctpDataChannelTest, SendEmptyData) {
+ SetChannelSid(inner_channel_, StreamId(1));
+ SetChannelReady();
+ EXPECT_EQ(DataChannelInterface::kOpen, channel_->state());
+
+ DataBuffer buffer("");
+ channel_->SendAsync(buffer, nullptr);
+ EXPECT_EQ(DataChannelInterface::kOpen, channel_->state());
+}
+
+// TODO(tommi): This test uses `Send()`. Remove once fully deprecated.
+TEST_F(SctpDataChannelTest, DeprecatedSendEmptyData) {
+ SetChannelSid(inner_channel_, StreamId(1));
+ SetChannelReady();
+ EXPECT_EQ(DataChannelInterface::kOpen, channel_->state());
+
+ DataBuffer buffer("");
+ EXPECT_TRUE(channel_->Send(buffer));
+ EXPECT_EQ(DataChannelInterface::kOpen, channel_->state());
+}
+
+// Tests that a channel can be closed without being opened or assigned an sid.
+TEST_F(SctpDataChannelTest, NeverOpened) {
+ controller_->set_transport_available(true);
+ network_thread_.BlockingCall(
+ [&] { inner_channel_->OnTransportChannelCreated(); });
+ channel_->Close();
+}
+
+// Tests that a data channel that's not connected to a transport can transition
+// directly to the `kClosed` state when closed.
+// See also chromium:1421534.
+TEST_F(SctpDataChannelTest, UnusedTransitionsDirectlyToClosed) {
+ channel_->Close();
+ EXPECT_EQ(DataChannelInterface::kClosed, channel_->state());
+}
+
+// Test that the data channel goes to the "closed" state (and doesn't crash)
+// when its transport goes away, even while data is buffered.
+TEST_F(SctpDataChannelTest, TransportDestroyedWhileDataBuffered) {
+ AddObserver();
+ SetChannelReady();
+
+ rtc::CopyOnWriteBuffer buffer(1024);
+ memset(buffer.MutableData(), 0, buffer.size());
+ DataBuffer packet(buffer, true);
+
+ // Send a packet while sending is blocked so it ends up buffered.
+ controller_->set_send_blocked(true);
+ channel_->SendAsync(packet, nullptr);
+
+ // Tell the data channel that its transport is being destroyed.
+ // It should then stop using the transport (allowing us to delete it) and
+ // transition to the "closed" state.
+ RTCError error(RTCErrorType::OPERATION_ERROR_WITH_DATA, "");
+ error.set_error_detail(RTCErrorDetailType::SCTP_FAILURE);
+ network_thread_.BlockingCall(
+ [&] { inner_channel_->OnTransportChannelClosed(error); });
+ controller_.reset(nullptr);
+ EXPECT_EQ_WAIT(DataChannelInterface::kClosed, channel_->state(),
+ kDefaultTimeout);
+ EXPECT_FALSE(channel_->error().ok());
+ EXPECT_EQ(RTCErrorType::OPERATION_ERROR_WITH_DATA, channel_->error().type());
+ EXPECT_EQ(RTCErrorDetailType::SCTP_FAILURE, channel_->error().error_detail());
+}
+
+// TODO(tommi): This test uses `Send()`. Remove once fully deprecated.
+TEST_F(SctpDataChannelTest, DeprecatedTransportDestroyedWhileDataBuffered) {
+ SetChannelReady();
+
+ rtc::CopyOnWriteBuffer buffer(1024);
+ memset(buffer.MutableData(), 0, buffer.size());
+ DataBuffer packet(buffer, true);
+
+ // Send a packet while sending is blocked so it ends up buffered.
+ controller_->set_send_blocked(true);
+ EXPECT_TRUE(channel_->Send(packet));
+
+ // Tell the data channel that its transport is being destroyed.
+ // It should then stop using the transport (allowing us to delete it) and
+ // transition to the "closed" state.
+ RTCError error(RTCErrorType::OPERATION_ERROR_WITH_DATA, "");
+ error.set_error_detail(RTCErrorDetailType::SCTP_FAILURE);
+ network_thread_.BlockingCall(
+ [&] { inner_channel_->OnTransportChannelClosed(error); });
+ controller_.reset(nullptr);
+ EXPECT_EQ_WAIT(DataChannelInterface::kClosed, channel_->state(),
+ kDefaultTimeout);
+ EXPECT_FALSE(channel_->error().ok());
+ EXPECT_EQ(RTCErrorType::OPERATION_ERROR_WITH_DATA, channel_->error().type());
+ EXPECT_EQ(RTCErrorDetailType::SCTP_FAILURE, channel_->error().error_detail());
+}
+
+TEST_F(SctpDataChannelTest, TransportGotErrorCode) {
+ SetChannelReady();
+
+ // Tell the data channel that its transport is being destroyed with an
+ // error code.
+ // It should then report that error code.
+ RTCError error(RTCErrorType::OPERATION_ERROR_WITH_DATA,
+ "Transport channel closed");
+ error.set_error_detail(RTCErrorDetailType::SCTP_FAILURE);
+ error.set_sctp_cause_code(
+ static_cast<uint16_t>(cricket::SctpErrorCauseCode::kProtocolViolation));
+ network_thread_.BlockingCall(
+ [&] { inner_channel_->OnTransportChannelClosed(error); });
+ controller_.reset(nullptr);
+ EXPECT_EQ_WAIT(DataChannelInterface::kClosed, channel_->state(),
+ kDefaultTimeout);
+ EXPECT_FALSE(channel_->error().ok());
+ EXPECT_EQ(RTCErrorType::OPERATION_ERROR_WITH_DATA, channel_->error().type());
+ EXPECT_EQ(RTCErrorDetailType::SCTP_FAILURE, channel_->error().error_detail());
+ EXPECT_EQ(
+ static_cast<uint16_t>(cricket::SctpErrorCauseCode::kProtocolViolation),
+ channel_->error().sctp_cause_code());
+}
+
+class SctpSidAllocatorTest : public ::testing::Test {
+ protected:
+ SctpSidAllocator allocator_;
+};
+
+// Verifies that an even SCTP id is allocated for SSL_CLIENT and an odd id for
+// SSL_SERVER.
+TEST_F(SctpSidAllocatorTest, SctpIdAllocationBasedOnRole) {
+ EXPECT_EQ(allocator_.AllocateSid(rtc::SSL_SERVER), StreamId(1));
+ EXPECT_EQ(allocator_.AllocateSid(rtc::SSL_CLIENT), StreamId(0));
+ EXPECT_EQ(allocator_.AllocateSid(rtc::SSL_SERVER), StreamId(3));
+ EXPECT_EQ(allocator_.AllocateSid(rtc::SSL_CLIENT), StreamId(2));
+}
+
+// Verifies that SCTP ids of existing DataChannels are not reused.
+TEST_F(SctpSidAllocatorTest, SctpIdAllocationNoReuse) {
+ StreamId old_id(1);
+ EXPECT_TRUE(allocator_.ReserveSid(old_id));
+
+ StreamId new_id = allocator_.AllocateSid(rtc::SSL_SERVER);
+ EXPECT_TRUE(new_id.HasValue());
+ EXPECT_NE(old_id, new_id);
+
+ old_id = StreamId(0);
+ EXPECT_TRUE(allocator_.ReserveSid(old_id));
+ new_id = allocator_.AllocateSid(rtc::SSL_CLIENT);
+ EXPECT_TRUE(new_id.HasValue());
+ EXPECT_NE(old_id, new_id);
+}
+
+// Verifies that SCTP ids of removed DataChannels can be reused.
+TEST_F(SctpSidAllocatorTest, SctpIdReusedForRemovedDataChannel) {
+ StreamId odd_id(1);
+ StreamId even_id(0);
+ EXPECT_TRUE(allocator_.ReserveSid(odd_id));
+ EXPECT_TRUE(allocator_.ReserveSid(even_id));
+
+ StreamId allocated_id = allocator_.AllocateSid(rtc::SSL_SERVER);
+ EXPECT_EQ(odd_id.stream_id_int() + 2, allocated_id.stream_id_int());
+
+ allocated_id = allocator_.AllocateSid(rtc::SSL_CLIENT);
+ EXPECT_EQ(even_id.stream_id_int() + 2, allocated_id.stream_id_int());
+
+ allocated_id = allocator_.AllocateSid(rtc::SSL_SERVER);
+ EXPECT_EQ(odd_id.stream_id_int() + 4, allocated_id.stream_id_int());
+
+ allocated_id = allocator_.AllocateSid(rtc::SSL_CLIENT);
+ EXPECT_EQ(even_id.stream_id_int() + 4, allocated_id.stream_id_int());
+
+ allocator_.ReleaseSid(odd_id);
+ allocator_.ReleaseSid(even_id);
+
+ // Verifies that removed ids are reused.
+ allocated_id = allocator_.AllocateSid(rtc::SSL_SERVER);
+ EXPECT_EQ(odd_id, allocated_id);
+
+ allocated_id = allocator_.AllocateSid(rtc::SSL_CLIENT);
+ EXPECT_EQ(even_id, allocated_id);
+
+ // Verifies that used higher ids are not reused.
+ allocated_id = allocator_.AllocateSid(rtc::SSL_SERVER);
+ EXPECT_EQ(odd_id.stream_id_int() + 6, allocated_id.stream_id_int());
+
+ allocated_id = allocator_.AllocateSid(rtc::SSL_CLIENT);
+ EXPECT_EQ(even_id.stream_id_int() + 6, allocated_id.stream_id_int());
+}
+
+// Code coverage tests for default implementations in data_channel_interface.*.
+namespace {
+class NoImplDataChannel : public DataChannelInterface {
+ public:
+ NoImplDataChannel() = default;
+ // Send and SendAsync implementations are public and implementation
+ // is in data_channel_interface.cc.
+
+ private:
+ // Implementation for pure virtual methods, just for compilation sake.
+ void RegisterObserver(DataChannelObserver* observer) override {}
+ void UnregisterObserver() override {}
+ std::string label() const override { return ""; }
+ bool reliable() const override { return false; }
+ int id() const override { return -1; }
+ DataState state() const override { return DataChannelInterface::kClosed; }
+ uint32_t messages_sent() const override { return 0u; }
+ uint64_t bytes_sent() const override { return 0u; }
+ uint32_t messages_received() const override { return 0u; }
+ uint64_t bytes_received() const override { return 0u; }
+ uint64_t buffered_amount() const override { return 0u; }
+ void Close() override {}
+};
+
+class NoImplObserver : public DataChannelObserver {
+ public:
+ NoImplObserver() = default;
+
+ private:
+ void OnStateChange() override {}
+ void OnMessage(const DataBuffer& buffer) override {}
+};
+} // namespace
+
+TEST(DataChannelInterfaceTest, Coverage) {
+ auto channel = rtc::make_ref_counted<NoImplDataChannel>();
+ EXPECT_FALSE(channel->ordered());
+ EXPECT_EQ(channel->maxRetransmitTime(), 0u);
+ EXPECT_EQ(channel->maxRetransmits(), 0u);
+ EXPECT_FALSE(channel->maxRetransmitsOpt());
+ EXPECT_FALSE(channel->maxPacketLifeTime());
+ EXPECT_TRUE(channel->protocol().empty());
+ EXPECT_FALSE(channel->negotiated());
+ EXPECT_EQ(channel->MaxSendQueueSize(), 16u * 1024u * 1024u);
+
+ NoImplObserver observer;
+ observer.OnBufferedAmountChange(0u);
+ EXPECT_FALSE(observer.IsOkToCallOnTheNetworkThread());
+}
+
+#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
+
+TEST(DataChannelInterfaceDeathTest, SendDefaultImplDchecks) {
+ auto channel = rtc::make_ref_counted<NoImplDataChannel>();
+ RTC_EXPECT_DEATH(channel->Send(DataBuffer("Foo")), "Check failed: false");
+}
+
+TEST(DataChannelInterfaceDeathTest, SendAsyncDefaultImplDchecks) {
+ auto channel = rtc::make_ref_counted<NoImplDataChannel>();
+ RTC_EXPECT_DEATH(channel->SendAsync(DataBuffer("Foo"), nullptr),
+ "Check failed: false");
+}
+#endif // RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
+
+} // namespace
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/data_channel_utils.cc b/third_party/libwebrtc/pc/data_channel_utils.cc
new file mode 100644
index 0000000000..a772241c3e
--- /dev/null
+++ b/third_party/libwebrtc/pc/data_channel_utils.cc
@@ -0,0 +1,54 @@
+/*
+ * Copyright 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/data_channel_utils.h"
+
+#include <utility>
+
+#include "rtc_base/checks.h"
+
+namespace webrtc {
+
+bool PacketQueue::Empty() const {
+ return packets_.empty();
+}
+
+std::unique_ptr<DataBuffer> PacketQueue::PopFront() {
+ RTC_DCHECK(!packets_.empty());
+ byte_count_ -= packets_.front()->size();
+ std::unique_ptr<DataBuffer> packet = std::move(packets_.front());
+ packets_.pop_front();
+ return packet;
+}
+
+void PacketQueue::PushFront(std::unique_ptr<DataBuffer> packet) {
+ byte_count_ += packet->size();
+ packets_.push_front(std::move(packet));
+}
+
+void PacketQueue::PushBack(std::unique_ptr<DataBuffer> packet) {
+ byte_count_ += packet->size();
+ packets_.push_back(std::move(packet));
+}
+
+void PacketQueue::Clear() {
+ packets_.clear();
+ byte_count_ = 0;
+}
+
+void PacketQueue::Swap(PacketQueue* other) {
+ size_t other_byte_count = other->byte_count_;
+ other->byte_count_ = byte_count_;
+ byte_count_ = other_byte_count;
+
+ other->packets_.swap(packets_);
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/data_channel_utils.h b/third_party/libwebrtc/pc/data_channel_utils.h
new file mode 100644
index 0000000000..8681ba4657
--- /dev/null
+++ b/third_party/libwebrtc/pc/data_channel_utils.h
@@ -0,0 +1,63 @@
+/*
+ * Copyright 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_DATA_CHANNEL_UTILS_H_
+#define PC_DATA_CHANNEL_UTILS_H_
+
+#include <stddef.h>
+#include <stdint.h>
+
+#include <deque>
+#include <memory>
+#include <string>
+#include <utility>
+
+#include "api/data_channel_interface.h"
+#include "media/base/media_engine.h"
+
+namespace webrtc {
+
+// A packet queue which tracks the total queued bytes. Queued packets are
+// owned by this class.
+class PacketQueue final {
+ public:
+ size_t byte_count() const { return byte_count_; }
+
+ bool Empty() const;
+
+ std::unique_ptr<DataBuffer> PopFront();
+
+ void PushFront(std::unique_ptr<DataBuffer> packet);
+ void PushBack(std::unique_ptr<DataBuffer> packet);
+
+ void Clear();
+
+ void Swap(PacketQueue* other);
+
+ private:
+ std::deque<std::unique_ptr<DataBuffer>> packets_;
+ size_t byte_count_ = 0;
+};
+
+struct DataChannelStats {
+ int internal_id;
+ int id;
+ std::string label;
+ std::string protocol;
+ DataChannelInterface::DataState state;
+ uint32_t messages_sent;
+ uint32_t messages_received;
+ uint64_t bytes_sent;
+ uint64_t bytes_received;
+};
+
+} // namespace webrtc
+
+#endif // PC_DATA_CHANNEL_UTILS_H_
diff --git a/third_party/libwebrtc/pc/dtls_srtp_transport.cc b/third_party/libwebrtc/pc/dtls_srtp_transport.cc
new file mode 100644
index 0000000000..d28285dc8d
--- /dev/null
+++ b/third_party/libwebrtc/pc/dtls_srtp_transport.cc
@@ -0,0 +1,330 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/dtls_srtp_transport.h"
+
+#include <string.h>
+
+#include <string>
+#include <utility>
+
+#include "api/dtls_transport_interface.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/ssl_stream_adapter.h"
+
+namespace {
+// Value specified in RFC 5764.
+static const char kDtlsSrtpExporterLabel[] = "EXTRACTOR-dtls_srtp";
+} // namespace
+
+namespace webrtc {
+
+DtlsSrtpTransport::DtlsSrtpTransport(bool rtcp_mux_enabled,
+ const FieldTrialsView& field_trials)
+ : SrtpTransport(rtcp_mux_enabled, field_trials) {}
+
+void DtlsSrtpTransport::SetDtlsTransports(
+ cricket::DtlsTransportInternal* rtp_dtls_transport,
+ cricket::DtlsTransportInternal* rtcp_dtls_transport) {
+ // Transport names should be the same.
+ if (rtp_dtls_transport && rtcp_dtls_transport) {
+ RTC_DCHECK(rtp_dtls_transport->transport_name() ==
+ rtcp_dtls_transport->transport_name());
+ }
+
+ // When using DTLS-SRTP, we must reset the SrtpTransport every time the
+ // DtlsTransport changes and wait until the DTLS handshake is complete to set
+ // the newly negotiated parameters.
+ // If `active_reset_srtp_params_` is true, intentionally reset the SRTP
+ // parameter even though the DtlsTransport may not change.
+ if (IsSrtpActive() && (rtp_dtls_transport != rtp_dtls_transport_ ||
+ active_reset_srtp_params_)) {
+ ResetParams();
+ }
+
+ const std::string transport_name =
+ rtp_dtls_transport ? rtp_dtls_transport->transport_name() : "null";
+
+ if (rtcp_dtls_transport && rtcp_dtls_transport != rtcp_dtls_transport_) {
+ // This would only be possible if using BUNDLE but not rtcp-mux, which isn't
+ // allowed according to the BUNDLE spec.
+ RTC_CHECK(!(IsSrtpActive()))
+ << "Setting RTCP for DTLS/SRTP after the DTLS is active "
+ "should never happen.";
+ }
+
+ if (rtcp_dtls_transport) {
+ RTC_LOG(LS_INFO) << "Setting RTCP Transport on " << transport_name
+ << " transport " << rtcp_dtls_transport;
+ }
+ SetRtcpDtlsTransport(rtcp_dtls_transport);
+ SetRtcpPacketTransport(rtcp_dtls_transport);
+
+ RTC_LOG(LS_INFO) << "Setting RTP Transport on " << transport_name
+ << " transport " << rtp_dtls_transport;
+ SetRtpDtlsTransport(rtp_dtls_transport);
+ SetRtpPacketTransport(rtp_dtls_transport);
+
+ MaybeSetupDtlsSrtp();
+}
+
+void DtlsSrtpTransport::SetRtcpMuxEnabled(bool enable) {
+ SrtpTransport::SetRtcpMuxEnabled(enable);
+ if (enable) {
+ MaybeSetupDtlsSrtp();
+ }
+}
+
+void DtlsSrtpTransport::UpdateSendEncryptedHeaderExtensionIds(
+ const std::vector<int>& send_extension_ids) {
+ if (send_extension_ids_ == send_extension_ids) {
+ return;
+ }
+ send_extension_ids_.emplace(send_extension_ids);
+ if (DtlsHandshakeCompleted()) {
+ // Reset the crypto parameters to update the send extension IDs.
+ SetupRtpDtlsSrtp();
+ }
+}
+
+void DtlsSrtpTransport::UpdateRecvEncryptedHeaderExtensionIds(
+ const std::vector<int>& recv_extension_ids) {
+ if (recv_extension_ids_ == recv_extension_ids) {
+ return;
+ }
+ recv_extension_ids_.emplace(recv_extension_ids);
+ if (DtlsHandshakeCompleted()) {
+ // Reset the crypto parameters to update the receive extension IDs.
+ SetupRtpDtlsSrtp();
+ }
+}
+
+bool DtlsSrtpTransport::IsDtlsActive() {
+ auto rtcp_dtls_transport =
+ rtcp_mux_enabled() ? nullptr : rtcp_dtls_transport_;
+ return (rtp_dtls_transport_ && rtp_dtls_transport_->IsDtlsActive() &&
+ (!rtcp_dtls_transport || rtcp_dtls_transport->IsDtlsActive()));
+}
+
+bool DtlsSrtpTransport::IsDtlsConnected() {
+ auto rtcp_dtls_transport =
+ rtcp_mux_enabled() ? nullptr : rtcp_dtls_transport_;
+ return (rtp_dtls_transport_ &&
+ rtp_dtls_transport_->dtls_state() == DtlsTransportState::kConnected &&
+ (!rtcp_dtls_transport || rtcp_dtls_transport->dtls_state() ==
+ DtlsTransportState::kConnected));
+}
+
+bool DtlsSrtpTransport::IsDtlsWritable() {
+ auto rtcp_packet_transport =
+ rtcp_mux_enabled() ? nullptr : rtcp_dtls_transport_;
+ return rtp_dtls_transport_ && rtp_dtls_transport_->writable() &&
+ (!rtcp_packet_transport || rtcp_packet_transport->writable());
+}
+
+bool DtlsSrtpTransport::DtlsHandshakeCompleted() {
+ return IsDtlsActive() && IsDtlsConnected();
+}
+
+void DtlsSrtpTransport::MaybeSetupDtlsSrtp() {
+ if (IsSrtpActive() || !IsDtlsWritable()) {
+ return;
+ }
+
+ SetupRtpDtlsSrtp();
+
+ if (!rtcp_mux_enabled() && rtcp_dtls_transport_) {
+ SetupRtcpDtlsSrtp();
+ }
+}
+
+void DtlsSrtpTransport::SetupRtpDtlsSrtp() {
+ // Use an empty encrypted header extension ID vector if not set. This could
+ // happen when the DTLS handshake is completed before processing the
+ // Offer/Answer which contains the encrypted header extension IDs.
+ std::vector<int> send_extension_ids;
+ std::vector<int> recv_extension_ids;
+ if (send_extension_ids_) {
+ send_extension_ids = *send_extension_ids_;
+ }
+ if (recv_extension_ids_) {
+ recv_extension_ids = *recv_extension_ids_;
+ }
+
+ int selected_crypto_suite;
+ rtc::ZeroOnFreeBuffer<unsigned char> send_key;
+ rtc::ZeroOnFreeBuffer<unsigned char> recv_key;
+
+ if (!ExtractParams(rtp_dtls_transport_, &selected_crypto_suite, &send_key,
+ &recv_key) ||
+ !SetRtpParams(selected_crypto_suite, &send_key[0],
+ static_cast<int>(send_key.size()), send_extension_ids,
+ selected_crypto_suite, &recv_key[0],
+ static_cast<int>(recv_key.size()), recv_extension_ids)) {
+ RTC_LOG(LS_WARNING) << "DTLS-SRTP key installation for RTP failed";
+ }
+}
+
+void DtlsSrtpTransport::SetupRtcpDtlsSrtp() {
+ // Return if the DTLS-SRTP is active because the encrypted header extension
+ // IDs don't need to be updated for RTCP and the crypto params don't need to
+ // be reset.
+ if (IsSrtpActive()) {
+ return;
+ }
+
+ std::vector<int> send_extension_ids;
+ std::vector<int> recv_extension_ids;
+ if (send_extension_ids_) {
+ send_extension_ids = *send_extension_ids_;
+ }
+ if (recv_extension_ids_) {
+ recv_extension_ids = *recv_extension_ids_;
+ }
+
+ int selected_crypto_suite;
+ rtc::ZeroOnFreeBuffer<unsigned char> rtcp_send_key;
+ rtc::ZeroOnFreeBuffer<unsigned char> rtcp_recv_key;
+ if (!ExtractParams(rtcp_dtls_transport_, &selected_crypto_suite,
+ &rtcp_send_key, &rtcp_recv_key) ||
+ !SetRtcpParams(selected_crypto_suite, &rtcp_send_key[0],
+ static_cast<int>(rtcp_send_key.size()), send_extension_ids,
+ selected_crypto_suite, &rtcp_recv_key[0],
+ static_cast<int>(rtcp_recv_key.size()),
+ recv_extension_ids)) {
+ RTC_LOG(LS_WARNING) << "DTLS-SRTP key installation for RTCP failed";
+ }
+}
+
+bool DtlsSrtpTransport::ExtractParams(
+ cricket::DtlsTransportInternal* dtls_transport,
+ int* selected_crypto_suite,
+ rtc::ZeroOnFreeBuffer<unsigned char>* send_key,
+ rtc::ZeroOnFreeBuffer<unsigned char>* recv_key) {
+ if (!dtls_transport || !dtls_transport->IsDtlsActive()) {
+ return false;
+ }
+
+ if (!dtls_transport->GetSrtpCryptoSuite(selected_crypto_suite)) {
+ RTC_LOG(LS_ERROR) << "No DTLS-SRTP selected crypto suite";
+ return false;
+ }
+
+ RTC_LOG(LS_INFO) << "Extracting keys from transport: "
+ << dtls_transport->transport_name();
+
+ int key_len;
+ int salt_len;
+ if (!rtc::GetSrtpKeyAndSaltLengths((*selected_crypto_suite), &key_len,
+ &salt_len)) {
+ RTC_LOG(LS_ERROR) << "Unknown DTLS-SRTP crypto suite"
+ << selected_crypto_suite;
+ return false;
+ }
+
+ // OK, we're now doing DTLS (RFC 5764)
+ rtc::ZeroOnFreeBuffer<unsigned char> dtls_buffer(key_len * 2 + salt_len * 2);
+
+ // RFC 5705 exporter using the RFC 5764 parameters
+ if (!dtls_transport->ExportKeyingMaterial(kDtlsSrtpExporterLabel, NULL, 0,
+ false, &dtls_buffer[0],
+ dtls_buffer.size())) {
+ RTC_LOG(LS_WARNING) << "DTLS-SRTP key export failed";
+ RTC_DCHECK_NOTREACHED(); // This should never happen
+ return false;
+ }
+
+ // Sync up the keys with the DTLS-SRTP interface
+ rtc::ZeroOnFreeBuffer<unsigned char> client_write_key(key_len + salt_len);
+ rtc::ZeroOnFreeBuffer<unsigned char> server_write_key(key_len + salt_len);
+ size_t offset = 0;
+ memcpy(&client_write_key[0], &dtls_buffer[offset], key_len);
+ offset += key_len;
+ memcpy(&server_write_key[0], &dtls_buffer[offset], key_len);
+ offset += key_len;
+ memcpy(&client_write_key[key_len], &dtls_buffer[offset], salt_len);
+ offset += salt_len;
+ memcpy(&server_write_key[key_len], &dtls_buffer[offset], salt_len);
+
+ rtc::SSLRole role;
+ if (!dtls_transport->GetDtlsRole(&role)) {
+ RTC_LOG(LS_WARNING) << "Failed to get the DTLS role.";
+ return false;
+ }
+
+ if (role == rtc::SSL_SERVER) {
+ *send_key = std::move(server_write_key);
+ *recv_key = std::move(client_write_key);
+ } else {
+ *send_key = std::move(client_write_key);
+ *recv_key = std::move(server_write_key);
+ }
+ return true;
+}
+
+void DtlsSrtpTransport::SetDtlsTransport(
+ cricket::DtlsTransportInternal* new_dtls_transport,
+ cricket::DtlsTransportInternal** old_dtls_transport) {
+ if (*old_dtls_transport == new_dtls_transport) {
+ return;
+ }
+
+ if (*old_dtls_transport) {
+ (*old_dtls_transport)->UnsubscribeDtlsTransportState(this);
+ }
+
+ *old_dtls_transport = new_dtls_transport;
+
+ if (new_dtls_transport) {
+ new_dtls_transport->SubscribeDtlsTransportState(
+ this,
+ [this](cricket::DtlsTransportInternal* transport,
+ DtlsTransportState state) { OnDtlsState(transport, state); });
+ }
+}
+
+void DtlsSrtpTransport::SetRtpDtlsTransport(
+ cricket::DtlsTransportInternal* rtp_dtls_transport) {
+ SetDtlsTransport(rtp_dtls_transport, &rtp_dtls_transport_);
+}
+
+void DtlsSrtpTransport::SetRtcpDtlsTransport(
+ cricket::DtlsTransportInternal* rtcp_dtls_transport) {
+ SetDtlsTransport(rtcp_dtls_transport, &rtcp_dtls_transport_);
+}
+
+void DtlsSrtpTransport::OnDtlsState(cricket::DtlsTransportInternal* transport,
+ DtlsTransportState state) {
+ RTC_DCHECK(transport == rtp_dtls_transport_ ||
+ transport == rtcp_dtls_transport_);
+
+ if (on_dtls_state_change_) {
+ on_dtls_state_change_();
+ }
+
+ if (state != DtlsTransportState::kConnected) {
+ ResetParams();
+ return;
+ }
+
+ MaybeSetupDtlsSrtp();
+}
+
+void DtlsSrtpTransport::OnWritableState(
+ rtc::PacketTransportInternal* packet_transport) {
+ MaybeSetupDtlsSrtp();
+}
+
+void DtlsSrtpTransport::SetOnDtlsStateChange(
+ std::function<void(void)> callback) {
+ on_dtls_state_change_ = std::move(callback);
+}
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/dtls_srtp_transport.h b/third_party/libwebrtc/pc/dtls_srtp_transport.h
new file mode 100644
index 0000000000..0f8338ca0d
--- /dev/null
+++ b/third_party/libwebrtc/pc/dtls_srtp_transport.h
@@ -0,0 +1,105 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_DTLS_SRTP_TRANSPORT_H_
+#define PC_DTLS_SRTP_TRANSPORT_H_
+
+#include <functional>
+#include <string>
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "api/crypto_params.h"
+#include "api/dtls_transport_interface.h"
+#include "api/rtc_error.h"
+#include "p2p/base/dtls_transport_internal.h"
+#include "p2p/base/packet_transport_internal.h"
+#include "pc/srtp_transport.h"
+#include "rtc_base/buffer.h"
+
+namespace webrtc {
+
+// The subclass of SrtpTransport is used for DTLS-SRTP. When the DTLS handshake
+// is finished, it extracts the keying materials from DtlsTransport and
+// configures the SrtpSessions in the base class.
+class DtlsSrtpTransport : public SrtpTransport {
+ public:
+ DtlsSrtpTransport(bool rtcp_mux_enabled, const FieldTrialsView& field_trials);
+
+ // Set P2P layer RTP/RTCP DtlsTransports. When using RTCP-muxing,
+ // `rtcp_dtls_transport` is null.
+ void SetDtlsTransports(cricket::DtlsTransportInternal* rtp_dtls_transport,
+ cricket::DtlsTransportInternal* rtcp_dtls_transport);
+
+ void SetRtcpMuxEnabled(bool enable) override;
+
+ // Set the header extension ids that should be encrypted.
+ void UpdateSendEncryptedHeaderExtensionIds(
+ const std::vector<int>& send_extension_ids);
+
+ void UpdateRecvEncryptedHeaderExtensionIds(
+ const std::vector<int>& recv_extension_ids);
+
+ void SetOnDtlsStateChange(std::function<void(void)> callback);
+
+ RTCError SetSrtpSendKey(const cricket::CryptoParams& params) override {
+ return RTCError(RTCErrorType::UNSUPPORTED_OPERATION,
+ "Set SRTP keys for DTLS-SRTP is not supported.");
+ }
+ RTCError SetSrtpReceiveKey(const cricket::CryptoParams& params) override {
+ return RTCError(RTCErrorType::UNSUPPORTED_OPERATION,
+ "Set SRTP keys for DTLS-SRTP is not supported.");
+ }
+
+ // If `active_reset_srtp_params_` is set to be true, the SRTP parameters will
+ // be reset whenever the DtlsTransports are reset.
+ void SetActiveResetSrtpParams(bool active_reset_srtp_params) {
+ active_reset_srtp_params_ = active_reset_srtp_params;
+ }
+
+ private:
+ bool IsDtlsActive();
+ bool IsDtlsConnected();
+ bool IsDtlsWritable();
+ bool DtlsHandshakeCompleted();
+ void MaybeSetupDtlsSrtp();
+ void SetupRtpDtlsSrtp();
+ void SetupRtcpDtlsSrtp();
+ bool ExtractParams(cricket::DtlsTransportInternal* dtls_transport,
+ int* selected_crypto_suite,
+ rtc::ZeroOnFreeBuffer<unsigned char>* send_key,
+ rtc::ZeroOnFreeBuffer<unsigned char>* recv_key);
+ void SetDtlsTransport(cricket::DtlsTransportInternal* new_dtls_transport,
+ cricket::DtlsTransportInternal** old_dtls_transport);
+ void SetRtpDtlsTransport(cricket::DtlsTransportInternal* rtp_dtls_transport);
+ void SetRtcpDtlsTransport(
+ cricket::DtlsTransportInternal* rtcp_dtls_transport);
+
+ void OnDtlsState(cricket::DtlsTransportInternal* dtls_transport,
+ DtlsTransportState state);
+
+ // Override the SrtpTransport::OnWritableState.
+ void OnWritableState(rtc::PacketTransportInternal* packet_transport) override;
+
+ // Owned by the TransportController.
+ cricket::DtlsTransportInternal* rtp_dtls_transport_ = nullptr;
+ cricket::DtlsTransportInternal* rtcp_dtls_transport_ = nullptr;
+
+ // The encrypted header extension IDs.
+ absl::optional<std::vector<int>> send_extension_ids_;
+ absl::optional<std::vector<int>> recv_extension_ids_;
+
+ bool active_reset_srtp_params_ = false;
+ std::function<void(void)> on_dtls_state_change_;
+};
+
+} // namespace webrtc
+
+#endif // PC_DTLS_SRTP_TRANSPORT_H_
diff --git a/third_party/libwebrtc/pc/dtls_srtp_transport_unittest.cc b/third_party/libwebrtc/pc/dtls_srtp_transport_unittest.cc
new file mode 100644
index 0000000000..bf0676c324
--- /dev/null
+++ b/third_party/libwebrtc/pc/dtls_srtp_transport_unittest.cc
@@ -0,0 +1,576 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/dtls_srtp_transport.h"
+
+#include <string.h>
+
+#include <cstdint>
+#include <memory>
+
+#include "call/rtp_demuxer.h"
+#include "media/base/fake_rtp.h"
+#include "p2p/base/dtls_transport_internal.h"
+#include "p2p/base/fake_dtls_transport.h"
+#include "p2p/base/fake_ice_transport.h"
+#include "p2p/base/p2p_constants.h"
+#include "pc/rtp_transport.h"
+#include "pc/test/rtp_transport_test_util.h"
+#include "rtc_base/async_packet_socket.h"
+#include "rtc_base/byte_order.h"
+#include "rtc_base/containers/flat_set.h"
+#include "rtc_base/copy_on_write_buffer.h"
+#include "rtc_base/rtc_certificate.h"
+#include "rtc_base/ssl_identity.h"
+#include "rtc_base/third_party/sigslot/sigslot.h"
+#include "test/gtest.h"
+#include "test/scoped_key_value_config.h"
+
+using cricket::FakeDtlsTransport;
+using cricket::FakeIceTransport;
+using webrtc::DtlsSrtpTransport;
+using webrtc::RtpTransport;
+using webrtc::SrtpTransport;
+
+const int kRtpAuthTagLen = 10;
+
+class DtlsSrtpTransportTest : public ::testing::Test,
+ public sigslot::has_slots<> {
+ protected:
+ DtlsSrtpTransportTest() {}
+
+ ~DtlsSrtpTransportTest() {
+ if (dtls_srtp_transport1_) {
+ dtls_srtp_transport1_->UnregisterRtpDemuxerSink(&transport_observer1_);
+ }
+ if (dtls_srtp_transport2_) {
+ dtls_srtp_transport2_->UnregisterRtpDemuxerSink(&transport_observer2_);
+ }
+ }
+
+ std::unique_ptr<DtlsSrtpTransport> MakeDtlsSrtpTransport(
+ FakeDtlsTransport* rtp_dtls,
+ FakeDtlsTransport* rtcp_dtls,
+ bool rtcp_mux_enabled) {
+ auto dtls_srtp_transport =
+ std::make_unique<DtlsSrtpTransport>(rtcp_mux_enabled, field_trials_);
+
+ dtls_srtp_transport->SetDtlsTransports(rtp_dtls, rtcp_dtls);
+
+ return dtls_srtp_transport;
+ }
+
+ void MakeDtlsSrtpTransports(FakeDtlsTransport* rtp_dtls1,
+ FakeDtlsTransport* rtcp_dtls1,
+ FakeDtlsTransport* rtp_dtls2,
+ FakeDtlsTransport* rtcp_dtls2,
+ bool rtcp_mux_enabled) {
+ dtls_srtp_transport1_ =
+ MakeDtlsSrtpTransport(rtp_dtls1, rtcp_dtls1, rtcp_mux_enabled);
+ dtls_srtp_transport2_ =
+ MakeDtlsSrtpTransport(rtp_dtls2, rtcp_dtls2, rtcp_mux_enabled);
+
+ dtls_srtp_transport1_->SubscribeRtcpPacketReceived(
+ &transport_observer1_,
+ [this](rtc::CopyOnWriteBuffer* buffer, int64_t packet_time_ms) {
+ transport_observer1_.OnRtcpPacketReceived(buffer, packet_time_ms);
+ });
+ dtls_srtp_transport1_->SubscribeReadyToSend(
+ &transport_observer1_,
+ [this](bool ready) { transport_observer1_.OnReadyToSend(ready); });
+
+ dtls_srtp_transport2_->SubscribeRtcpPacketReceived(
+ &transport_observer2_,
+ [this](rtc::CopyOnWriteBuffer* buffer, int64_t packet_time_ms) {
+ transport_observer2_.OnRtcpPacketReceived(buffer, packet_time_ms);
+ });
+ dtls_srtp_transport2_->SubscribeReadyToSend(
+ &transport_observer2_,
+ [this](bool ready) { transport_observer2_.OnReadyToSend(ready); });
+ webrtc::RtpDemuxerCriteria demuxer_criteria;
+ // 0x00 is the payload type used in kPcmuFrame.
+ demuxer_criteria.payload_types() = {0x00};
+ dtls_srtp_transport1_->RegisterRtpDemuxerSink(demuxer_criteria,
+ &transport_observer1_);
+ dtls_srtp_transport2_->RegisterRtpDemuxerSink(demuxer_criteria,
+ &transport_observer2_);
+ }
+
+ void CompleteDtlsHandshake(FakeDtlsTransport* fake_dtls1,
+ FakeDtlsTransport* fake_dtls2) {
+ auto cert1 = rtc::RTCCertificate::Create(
+ rtc::SSLIdentity::Create("session1", rtc::KT_DEFAULT));
+ fake_dtls1->SetLocalCertificate(cert1);
+ auto cert2 = rtc::RTCCertificate::Create(
+ rtc::SSLIdentity::Create("session1", rtc::KT_DEFAULT));
+ fake_dtls2->SetLocalCertificate(cert2);
+ fake_dtls1->SetDestination(fake_dtls2);
+ }
+
+ void SendRecvRtpPackets() {
+ ASSERT_TRUE(dtls_srtp_transport1_);
+ ASSERT_TRUE(dtls_srtp_transport2_);
+ ASSERT_TRUE(dtls_srtp_transport1_->IsSrtpActive());
+ ASSERT_TRUE(dtls_srtp_transport2_->IsSrtpActive());
+
+ size_t rtp_len = sizeof(kPcmuFrame);
+ size_t packet_size = rtp_len + kRtpAuthTagLen;
+ rtc::Buffer rtp_packet_buffer(packet_size);
+ char* rtp_packet_data = rtp_packet_buffer.data<char>();
+ memcpy(rtp_packet_data, kPcmuFrame, rtp_len);
+ // In order to be able to run this test function multiple times we can not
+ // use the same sequence number twice. Increase the sequence number by one.
+ rtc::SetBE16(reinterpret_cast<uint8_t*>(rtp_packet_data) + 2,
+ ++sequence_number_);
+ rtc::CopyOnWriteBuffer rtp_packet1to2(rtp_packet_data, rtp_len,
+ packet_size);
+ rtc::CopyOnWriteBuffer rtp_packet2to1(rtp_packet_data, rtp_len,
+ packet_size);
+
+ rtc::PacketOptions options;
+ // Send a packet from `srtp_transport1_` to `srtp_transport2_` and verify
+ // that the packet can be successfully received and decrypted.
+ int prev_received_packets = transport_observer2_.rtp_count();
+ ASSERT_TRUE(dtls_srtp_transport1_->SendRtpPacket(&rtp_packet1to2, options,
+ cricket::PF_SRTP_BYPASS));
+ ASSERT_TRUE(transport_observer2_.last_recv_rtp_packet().data());
+ EXPECT_EQ(0, memcmp(transport_observer2_.last_recv_rtp_packet().data(),
+ kPcmuFrame, rtp_len));
+ EXPECT_EQ(prev_received_packets + 1, transport_observer2_.rtp_count());
+
+ prev_received_packets = transport_observer1_.rtp_count();
+ ASSERT_TRUE(dtls_srtp_transport2_->SendRtpPacket(&rtp_packet2to1, options,
+ cricket::PF_SRTP_BYPASS));
+ ASSERT_TRUE(transport_observer1_.last_recv_rtp_packet().data());
+ EXPECT_EQ(0, memcmp(transport_observer1_.last_recv_rtp_packet().data(),
+ kPcmuFrame, rtp_len));
+ EXPECT_EQ(prev_received_packets + 1, transport_observer1_.rtp_count());
+ }
+
+ void SendRecvRtcpPackets() {
+ size_t rtcp_len = sizeof(kRtcpReport);
+ size_t packet_size = rtcp_len + 4 + kRtpAuthTagLen;
+ rtc::Buffer rtcp_packet_buffer(packet_size);
+
+ // TODO(zhihuang): Remove the extra copy when the SendRtpPacket method
+ // doesn't take the CopyOnWriteBuffer by pointer.
+ rtc::CopyOnWriteBuffer rtcp_packet1to2(kRtcpReport, rtcp_len, packet_size);
+ rtc::CopyOnWriteBuffer rtcp_packet2to1(kRtcpReport, rtcp_len, packet_size);
+
+ rtc::PacketOptions options;
+ // Send a packet from `srtp_transport1_` to `srtp_transport2_` and verify
+ // that the packet can be successfully received and decrypted.
+ int prev_received_packets = transport_observer2_.rtcp_count();
+ ASSERT_TRUE(dtls_srtp_transport1_->SendRtcpPacket(&rtcp_packet1to2, options,
+ cricket::PF_SRTP_BYPASS));
+ ASSERT_TRUE(transport_observer2_.last_recv_rtcp_packet().data());
+ EXPECT_EQ(0, memcmp(transport_observer2_.last_recv_rtcp_packet().data(),
+ kRtcpReport, rtcp_len));
+ EXPECT_EQ(prev_received_packets + 1, transport_observer2_.rtcp_count());
+
+ // Do the same thing in the opposite direction;
+ prev_received_packets = transport_observer1_.rtcp_count();
+ ASSERT_TRUE(dtls_srtp_transport2_->SendRtcpPacket(&rtcp_packet2to1, options,
+ cricket::PF_SRTP_BYPASS));
+ ASSERT_TRUE(transport_observer1_.last_recv_rtcp_packet().data());
+ EXPECT_EQ(0, memcmp(transport_observer1_.last_recv_rtcp_packet().data(),
+ kRtcpReport, rtcp_len));
+ EXPECT_EQ(prev_received_packets + 1, transport_observer1_.rtcp_count());
+ }
+
+ void SendRecvRtpPacketsWithHeaderExtension(
+ const std::vector<int>& encrypted_header_ids) {
+ ASSERT_TRUE(dtls_srtp_transport1_);
+ ASSERT_TRUE(dtls_srtp_transport2_);
+ ASSERT_TRUE(dtls_srtp_transport1_->IsSrtpActive());
+ ASSERT_TRUE(dtls_srtp_transport2_->IsSrtpActive());
+
+ size_t rtp_len = sizeof(kPcmuFrameWithExtensions);
+ size_t packet_size = rtp_len + kRtpAuthTagLen;
+ rtc::Buffer rtp_packet_buffer(packet_size);
+ char* rtp_packet_data = rtp_packet_buffer.data<char>();
+ memcpy(rtp_packet_data, kPcmuFrameWithExtensions, rtp_len);
+ // In order to be able to run this test function multiple times we can not
+ // use the same sequence number twice. Increase the sequence number by one.
+ rtc::SetBE16(reinterpret_cast<uint8_t*>(rtp_packet_data) + 2,
+ ++sequence_number_);
+ rtc::CopyOnWriteBuffer rtp_packet1to2(rtp_packet_data, rtp_len,
+ packet_size);
+ rtc::CopyOnWriteBuffer rtp_packet2to1(rtp_packet_data, rtp_len,
+ packet_size);
+
+ char original_rtp_data[sizeof(kPcmuFrameWithExtensions)];
+ memcpy(original_rtp_data, rtp_packet_data, rtp_len);
+
+ rtc::PacketOptions options;
+ // Send a packet from `srtp_transport1_` to `srtp_transport2_` and verify
+ // that the packet can be successfully received and decrypted.
+ ASSERT_TRUE(dtls_srtp_transport1_->SendRtpPacket(&rtp_packet1to2, options,
+ cricket::PF_SRTP_BYPASS));
+ ASSERT_TRUE(transport_observer2_.last_recv_rtp_packet().data());
+ EXPECT_EQ(0, memcmp(transport_observer2_.last_recv_rtp_packet().data(),
+ original_rtp_data, rtp_len));
+ // Get the encrypted packet from underneath packet transport and verify the
+ // data and header extension are actually encrypted.
+ auto fake_dtls_transport = static_cast<FakeDtlsTransport*>(
+ dtls_srtp_transport1_->rtp_packet_transport());
+ auto fake_ice_transport =
+ static_cast<FakeIceTransport*>(fake_dtls_transport->ice_transport());
+ EXPECT_NE(0, memcmp(fake_ice_transport->last_sent_packet().data(),
+ original_rtp_data, rtp_len));
+ CompareHeaderExtensions(reinterpret_cast<const char*>(
+ fake_ice_transport->last_sent_packet().data()),
+ fake_ice_transport->last_sent_packet().size(),
+ original_rtp_data, rtp_len, encrypted_header_ids,
+ false);
+
+ // Do the same thing in the opposite direction.
+ ASSERT_TRUE(dtls_srtp_transport2_->SendRtpPacket(&rtp_packet2to1, options,
+ cricket::PF_SRTP_BYPASS));
+ ASSERT_TRUE(transport_observer1_.last_recv_rtp_packet().data());
+ EXPECT_EQ(0, memcmp(transport_observer1_.last_recv_rtp_packet().data(),
+ original_rtp_data, rtp_len));
+ // Get the encrypted packet from underneath packet transport and verify the
+ // data and header extension are actually encrypted.
+ fake_dtls_transport = static_cast<FakeDtlsTransport*>(
+ dtls_srtp_transport2_->rtp_packet_transport());
+ fake_ice_transport =
+ static_cast<FakeIceTransport*>(fake_dtls_transport->ice_transport());
+ EXPECT_NE(0, memcmp(fake_ice_transport->last_sent_packet().data(),
+ original_rtp_data, rtp_len));
+ CompareHeaderExtensions(reinterpret_cast<const char*>(
+ fake_ice_transport->last_sent_packet().data()),
+ fake_ice_transport->last_sent_packet().size(),
+ original_rtp_data, rtp_len, encrypted_header_ids,
+ false);
+ }
+
+ void SendRecvPackets() {
+ SendRecvRtpPackets();
+ SendRecvRtcpPackets();
+ }
+
+ rtc::AutoThread main_thread_;
+ std::unique_ptr<DtlsSrtpTransport> dtls_srtp_transport1_;
+ std::unique_ptr<DtlsSrtpTransport> dtls_srtp_transport2_;
+ webrtc::TransportObserver transport_observer1_;
+ webrtc::TransportObserver transport_observer2_;
+
+ int sequence_number_ = 0;
+ webrtc::test::ScopedKeyValueConfig field_trials_;
+};
+
+// Tests that if RTCP muxing is enabled and transports are set after RTP
+// transport finished the handshake, SRTP is set up.
+TEST_F(DtlsSrtpTransportTest, SetTransportsAfterHandshakeCompleteWithRtcpMux) {
+ auto rtp_dtls1 = std::make_unique<FakeDtlsTransport>(
+ "video", cricket::ICE_CANDIDATE_COMPONENT_RTP);
+ auto rtp_dtls2 = std::make_unique<FakeDtlsTransport>(
+ "video", cricket::ICE_CANDIDATE_COMPONENT_RTP);
+
+ MakeDtlsSrtpTransports(rtp_dtls1.get(), nullptr, rtp_dtls2.get(), nullptr,
+ /*rtcp_mux_enabled=*/true);
+
+ auto rtp_dtls3 = std::make_unique<FakeDtlsTransport>(
+ "audio", cricket::ICE_CANDIDATE_COMPONENT_RTP);
+ auto rtp_dtls4 = std::make_unique<FakeDtlsTransport>(
+ "audio", cricket::ICE_CANDIDATE_COMPONENT_RTP);
+
+ CompleteDtlsHandshake(rtp_dtls3.get(), rtp_dtls4.get());
+
+ dtls_srtp_transport1_->SetDtlsTransports(rtp_dtls3.get(), nullptr);
+ dtls_srtp_transport2_->SetDtlsTransports(rtp_dtls4.get(), nullptr);
+
+ SendRecvPackets();
+}
+
+// Tests that if RTCP muxing is not enabled and transports are set after both
+// RTP and RTCP transports finished the handshake, SRTP is set up.
+TEST_F(DtlsSrtpTransportTest,
+ SetTransportsAfterHandshakeCompleteWithoutRtcpMux) {
+ auto rtp_dtls1 = std::make_unique<FakeDtlsTransport>(
+ "video", cricket::ICE_CANDIDATE_COMPONENT_RTP);
+ auto rtcp_dtls1 = std::make_unique<FakeDtlsTransport>(
+ "video", cricket::ICE_CANDIDATE_COMPONENT_RTCP);
+ auto rtp_dtls2 = std::make_unique<FakeDtlsTransport>(
+ "video", cricket::ICE_CANDIDATE_COMPONENT_RTP);
+ auto rtcp_dtls2 = std::make_unique<FakeDtlsTransport>(
+ "video", cricket::ICE_CANDIDATE_COMPONENT_RTCP);
+
+ MakeDtlsSrtpTransports(rtp_dtls1.get(), rtcp_dtls1.get(), rtp_dtls2.get(),
+ rtcp_dtls2.get(), /*rtcp_mux_enabled=*/false);
+
+ auto rtp_dtls3 = std::make_unique<FakeDtlsTransport>(
+ "audio", cricket::ICE_CANDIDATE_COMPONENT_RTP);
+ auto rtcp_dtls3 = std::make_unique<FakeDtlsTransport>(
+ "audio", cricket::ICE_CANDIDATE_COMPONENT_RTCP);
+ auto rtp_dtls4 = std::make_unique<FakeDtlsTransport>(
+ "audio", cricket::ICE_CANDIDATE_COMPONENT_RTP);
+ auto rtcp_dtls4 = std::make_unique<FakeDtlsTransport>(
+ "audio", cricket::ICE_CANDIDATE_COMPONENT_RTCP);
+ CompleteDtlsHandshake(rtp_dtls3.get(), rtp_dtls4.get());
+ CompleteDtlsHandshake(rtcp_dtls3.get(), rtcp_dtls4.get());
+
+ dtls_srtp_transport1_->SetDtlsTransports(rtp_dtls3.get(), rtcp_dtls3.get());
+ dtls_srtp_transport2_->SetDtlsTransports(rtp_dtls4.get(), rtcp_dtls4.get());
+
+ SendRecvPackets();
+}
+
+// Tests if RTCP muxing is enabled, SRTP is set up as soon as the RTP DTLS
+// handshake is finished.
+TEST_F(DtlsSrtpTransportTest, SetTransportsBeforeHandshakeCompleteWithRtcpMux) {
+ auto rtp_dtls1 = std::make_unique<FakeDtlsTransport>(
+ "audio", cricket::ICE_CANDIDATE_COMPONENT_RTP);
+ auto rtcp_dtls1 = std::make_unique<FakeDtlsTransport>(
+ "audio", cricket::ICE_CANDIDATE_COMPONENT_RTCP);
+ auto rtp_dtls2 = std::make_unique<FakeDtlsTransport>(
+ "audio", cricket::ICE_CANDIDATE_COMPONENT_RTP);
+ auto rtcp_dtls2 = std::make_unique<FakeDtlsTransport>(
+ "audio", cricket::ICE_CANDIDATE_COMPONENT_RTCP);
+
+ MakeDtlsSrtpTransports(rtp_dtls1.get(), rtcp_dtls1.get(), rtp_dtls2.get(),
+ rtcp_dtls2.get(),
+ /*rtcp_mux_enabled=*/false);
+
+ dtls_srtp_transport1_->SetRtcpMuxEnabled(true);
+ dtls_srtp_transport2_->SetRtcpMuxEnabled(true);
+ CompleteDtlsHandshake(rtp_dtls1.get(), rtp_dtls2.get());
+ SendRecvPackets();
+}
+
+// Tests if RTCP muxing is not enabled, SRTP is set up when both the RTP and
+// RTCP DTLS handshake are finished.
+TEST_F(DtlsSrtpTransportTest,
+ SetTransportsBeforeHandshakeCompleteWithoutRtcpMux) {
+ auto rtp_dtls1 = std::make_unique<FakeDtlsTransport>(
+ "audio", cricket::ICE_CANDIDATE_COMPONENT_RTP);
+ auto rtcp_dtls1 = std::make_unique<FakeDtlsTransport>(
+ "audio", cricket::ICE_CANDIDATE_COMPONENT_RTCP);
+ auto rtp_dtls2 = std::make_unique<FakeDtlsTransport>(
+ "audio", cricket::ICE_CANDIDATE_COMPONENT_RTP);
+ auto rtcp_dtls2 = std::make_unique<FakeDtlsTransport>(
+ "audio", cricket::ICE_CANDIDATE_COMPONENT_RTCP);
+
+ MakeDtlsSrtpTransports(rtp_dtls1.get(), rtcp_dtls1.get(), rtp_dtls2.get(),
+ rtcp_dtls2.get(), /*rtcp_mux_enabled=*/false);
+
+ CompleteDtlsHandshake(rtp_dtls1.get(), rtp_dtls2.get());
+ EXPECT_FALSE(dtls_srtp_transport1_->IsSrtpActive());
+ EXPECT_FALSE(dtls_srtp_transport2_->IsSrtpActive());
+ CompleteDtlsHandshake(rtcp_dtls1.get(), rtcp_dtls2.get());
+ SendRecvPackets();
+}
+
+// Tests that if the DtlsTransport underneath is changed, the previous DTLS-SRTP
+// context will be reset and will be re-setup once the new transports' handshake
+// complete.
+TEST_F(DtlsSrtpTransportTest, DtlsSrtpResetAfterDtlsTransportChange) {
+ auto rtp_dtls1 = std::make_unique<FakeDtlsTransport>(
+ "audio", cricket::ICE_CANDIDATE_COMPONENT_RTP);
+ auto rtp_dtls2 = std::make_unique<FakeDtlsTransport>(
+ "audio", cricket::ICE_CANDIDATE_COMPONENT_RTP);
+
+ MakeDtlsSrtpTransports(rtp_dtls1.get(), nullptr, rtp_dtls2.get(), nullptr,
+ /*rtcp_mux_enabled=*/true);
+
+ CompleteDtlsHandshake(rtp_dtls1.get(), rtp_dtls2.get());
+ EXPECT_TRUE(dtls_srtp_transport1_->IsSrtpActive());
+ EXPECT_TRUE(dtls_srtp_transport2_->IsSrtpActive());
+
+ auto rtp_dtls3 = std::make_unique<FakeDtlsTransport>(
+ "audio", cricket::ICE_CANDIDATE_COMPONENT_RTP);
+ auto rtp_dtls4 = std::make_unique<FakeDtlsTransport>(
+ "audio", cricket::ICE_CANDIDATE_COMPONENT_RTP);
+
+ // The previous context is reset.
+ dtls_srtp_transport1_->SetDtlsTransports(rtp_dtls3.get(), nullptr);
+ dtls_srtp_transport2_->SetDtlsTransports(rtp_dtls4.get(), nullptr);
+ EXPECT_FALSE(dtls_srtp_transport1_->IsSrtpActive());
+ EXPECT_FALSE(dtls_srtp_transport2_->IsSrtpActive());
+
+ // Re-setup.
+ CompleteDtlsHandshake(rtp_dtls3.get(), rtp_dtls4.get());
+ SendRecvPackets();
+}
+
+// Tests if only the RTP DTLS handshake complete, and then RTCP muxing is
+// enabled, SRTP is set up.
+TEST_F(DtlsSrtpTransportTest,
+ RtcpMuxEnabledAfterRtpTransportHandshakeComplete) {
+ auto rtp_dtls1 = std::make_unique<FakeDtlsTransport>(
+ "audio", cricket::ICE_CANDIDATE_COMPONENT_RTP);
+ auto rtcp_dtls1 = std::make_unique<FakeDtlsTransport>(
+ "audio", cricket::ICE_CANDIDATE_COMPONENT_RTCP);
+ auto rtp_dtls2 = std::make_unique<FakeDtlsTransport>(
+ "audio", cricket::ICE_CANDIDATE_COMPONENT_RTP);
+ auto rtcp_dtls2 = std::make_unique<FakeDtlsTransport>(
+ "audio", cricket::ICE_CANDIDATE_COMPONENT_RTCP);
+
+ MakeDtlsSrtpTransports(rtp_dtls1.get(), rtcp_dtls1.get(), rtp_dtls2.get(),
+ rtcp_dtls2.get(), /*rtcp_mux_enabled=*/false);
+
+ CompleteDtlsHandshake(rtp_dtls1.get(), rtp_dtls2.get());
+ // Inactive because the RTCP transport handshake didn't complete.
+ EXPECT_FALSE(dtls_srtp_transport1_->IsSrtpActive());
+ EXPECT_FALSE(dtls_srtp_transport2_->IsSrtpActive());
+
+ dtls_srtp_transport1_->SetRtcpMuxEnabled(true);
+ dtls_srtp_transport2_->SetRtcpMuxEnabled(true);
+ // The transports should be active and be able to send packets when the
+ // RTCP muxing is enabled.
+ SendRecvPackets();
+}
+
+// Tests that when SetSend/RecvEncryptedHeaderExtensionIds is called, the SRTP
+// sessions are updated with new encryped header extension IDs immediately.
+TEST_F(DtlsSrtpTransportTest, EncryptedHeaderExtensionIdUpdated) {
+ auto rtp_dtls1 = std::make_unique<FakeDtlsTransport>(
+ "audio", cricket::ICE_CANDIDATE_COMPONENT_RTP);
+ auto rtp_dtls2 = std::make_unique<FakeDtlsTransport>(
+ "audio", cricket::ICE_CANDIDATE_COMPONENT_RTP);
+
+ MakeDtlsSrtpTransports(rtp_dtls1.get(), nullptr, rtp_dtls2.get(), nullptr,
+ /*rtcp_mux_enabled=*/true);
+ CompleteDtlsHandshake(rtp_dtls1.get(), rtp_dtls2.get());
+
+ std::vector<int> encrypted_headers;
+ encrypted_headers.push_back(kHeaderExtensionIDs[0]);
+ encrypted_headers.push_back(kHeaderExtensionIDs[1]);
+
+ dtls_srtp_transport1_->UpdateSendEncryptedHeaderExtensionIds(
+ encrypted_headers);
+ dtls_srtp_transport1_->UpdateRecvEncryptedHeaderExtensionIds(
+ encrypted_headers);
+ dtls_srtp_transport2_->UpdateSendEncryptedHeaderExtensionIds(
+ encrypted_headers);
+ dtls_srtp_transport2_->UpdateRecvEncryptedHeaderExtensionIds(
+ encrypted_headers);
+}
+
+// Tests if RTCP muxing is enabled. DtlsSrtpTransport is ready to send once the
+// RTP DtlsTransport is ready.
+TEST_F(DtlsSrtpTransportTest, SignalReadyToSendFiredWithRtcpMux) {
+ auto rtp_dtls1 = std::make_unique<FakeDtlsTransport>(
+ "audio", cricket::ICE_CANDIDATE_COMPONENT_RTP);
+ auto rtp_dtls2 = std::make_unique<FakeDtlsTransport>(
+ "audio", cricket::ICE_CANDIDATE_COMPONENT_RTP);
+
+ MakeDtlsSrtpTransports(rtp_dtls1.get(), nullptr, rtp_dtls2.get(), nullptr,
+ /*rtcp_mux_enabled=*/true);
+
+ rtp_dtls1->SetDestination(rtp_dtls2.get());
+ EXPECT_TRUE(transport_observer1_.ready_to_send());
+ EXPECT_TRUE(transport_observer2_.ready_to_send());
+}
+
+// Tests if RTCP muxing is not enabled. DtlsSrtpTransport is ready to send once
+// both the RTP and RTCP DtlsTransport are ready.
+TEST_F(DtlsSrtpTransportTest, SignalReadyToSendFiredWithoutRtcpMux) {
+ auto rtp_dtls1 = std::make_unique<FakeDtlsTransport>(
+ "audio", cricket::ICE_CANDIDATE_COMPONENT_RTP);
+ auto rtcp_dtls1 = std::make_unique<FakeDtlsTransport>(
+ "audio", cricket::ICE_CANDIDATE_COMPONENT_RTCP);
+ auto rtp_dtls2 = std::make_unique<FakeDtlsTransport>(
+ "audio", cricket::ICE_CANDIDATE_COMPONENT_RTP);
+ auto rtcp_dtls2 = std::make_unique<FakeDtlsTransport>(
+ "audio", cricket::ICE_CANDIDATE_COMPONENT_RTCP);
+
+ MakeDtlsSrtpTransports(rtp_dtls1.get(), rtcp_dtls1.get(), rtp_dtls2.get(),
+ rtcp_dtls2.get(), /*rtcp_mux_enabled=*/false);
+
+ rtp_dtls1->SetDestination(rtp_dtls2.get());
+ EXPECT_FALSE(transport_observer1_.ready_to_send());
+ EXPECT_FALSE(transport_observer2_.ready_to_send());
+
+ rtcp_dtls1->SetDestination(rtcp_dtls2.get());
+ EXPECT_TRUE(transport_observer1_.ready_to_send());
+ EXPECT_TRUE(transport_observer2_.ready_to_send());
+}
+
+// Test that if an endpoint "fully" enables RTCP mux, setting the RTCP
+// transport to null, it *doesn't* reset its SRTP context. That would cause the
+// ROC and SRTCP index to be reset, causing replay detection and other errors
+// when attempting to unprotect packets.
+// Regression test for bugs.webrtc.org/8996
+TEST_F(DtlsSrtpTransportTest, SrtpSessionNotResetWhenRtcpTransportRemoved) {
+ auto rtp_dtls1 = std::make_unique<FakeDtlsTransport>(
+ "audio", cricket::ICE_CANDIDATE_COMPONENT_RTP);
+ auto rtcp_dtls1 = std::make_unique<FakeDtlsTransport>(
+ "audio", cricket::ICE_CANDIDATE_COMPONENT_RTCP);
+ auto rtp_dtls2 = std::make_unique<FakeDtlsTransport>(
+ "audio", cricket::ICE_CANDIDATE_COMPONENT_RTP);
+ auto rtcp_dtls2 = std::make_unique<FakeDtlsTransport>(
+ "audio", cricket::ICE_CANDIDATE_COMPONENT_RTCP);
+
+ MakeDtlsSrtpTransports(rtp_dtls1.get(), rtcp_dtls1.get(), rtp_dtls2.get(),
+ rtcp_dtls2.get(), /*rtcp_mux_enabled=*/true);
+ CompleteDtlsHandshake(rtp_dtls1.get(), rtp_dtls2.get());
+ CompleteDtlsHandshake(rtcp_dtls1.get(), rtcp_dtls2.get());
+
+ // Send some RTCP packets, causing the SRTCP index to be incremented.
+ SendRecvRtcpPackets();
+
+ // Set RTCP transport to null, which previously would trigger this problem.
+ dtls_srtp_transport1_->SetDtlsTransports(rtp_dtls1.get(), nullptr);
+
+ // Attempt to send more RTCP packets. If the issue occurred, one side would
+ // reset its context while the other would not, causing replay detection
+ // errors when a packet with a duplicate SRTCP index is received.
+ SendRecvRtcpPackets();
+}
+
+// Tests that RTCP packets can be sent and received if both sides actively reset
+// the SRTP parameters with the `active_reset_srtp_params_` flag.
+TEST_F(DtlsSrtpTransportTest, ActivelyResetSrtpParams) {
+ auto rtp_dtls1 = std::make_unique<FakeDtlsTransport>(
+ "audio", cricket::ICE_CANDIDATE_COMPONENT_RTP);
+ auto rtcp_dtls1 = std::make_unique<FakeDtlsTransport>(
+ "audio", cricket::ICE_CANDIDATE_COMPONENT_RTCP);
+ auto rtp_dtls2 = std::make_unique<FakeDtlsTransport>(
+ "audio", cricket::ICE_CANDIDATE_COMPONENT_RTP);
+ auto rtcp_dtls2 = std::make_unique<FakeDtlsTransport>(
+ "audio", cricket::ICE_CANDIDATE_COMPONENT_RTCP);
+
+ MakeDtlsSrtpTransports(rtp_dtls1.get(), rtcp_dtls1.get(), rtp_dtls2.get(),
+ rtcp_dtls2.get(), /*rtcp_mux_enabled=*/true);
+ CompleteDtlsHandshake(rtp_dtls1.get(), rtp_dtls2.get());
+ CompleteDtlsHandshake(rtcp_dtls1.get(), rtcp_dtls2.get());
+
+ // Send some RTCP packets, causing the SRTCP index to be incremented.
+ SendRecvRtcpPackets();
+
+ // Only set the `active_reset_srtp_params_` flag to be true one side.
+ dtls_srtp_transport1_->SetActiveResetSrtpParams(true);
+ // Set RTCP transport to null to trigger the SRTP parameters update.
+ dtls_srtp_transport1_->SetDtlsTransports(rtp_dtls1.get(), nullptr);
+ dtls_srtp_transport2_->SetDtlsTransports(rtp_dtls2.get(), nullptr);
+
+ // Sending some RTCP packets.
+ size_t rtcp_len = sizeof(kRtcpReport);
+ size_t packet_size = rtcp_len + 4 + kRtpAuthTagLen;
+ rtc::Buffer rtcp_packet_buffer(packet_size);
+ rtc::CopyOnWriteBuffer rtcp_packet(kRtcpReport, rtcp_len, packet_size);
+ int prev_received_packets = transport_observer2_.rtcp_count();
+ ASSERT_TRUE(dtls_srtp_transport1_->SendRtcpPacket(
+ &rtcp_packet, rtc::PacketOptions(), cricket::PF_SRTP_BYPASS));
+ // The RTCP packet is not exepected to be received because the SRTP parameters
+ // are only reset on one side and the SRTCP index is out of sync.
+ EXPECT_EQ(prev_received_packets, transport_observer2_.rtcp_count());
+
+ // Set the flag to be true on the other side.
+ dtls_srtp_transport2_->SetActiveResetSrtpParams(true);
+ // Set RTCP transport to null to trigger the SRTP parameters update.
+ dtls_srtp_transport1_->SetDtlsTransports(rtp_dtls1.get(), nullptr);
+ dtls_srtp_transport2_->SetDtlsTransports(rtp_dtls2.get(), nullptr);
+
+ // RTCP packets flow is expected to work just fine.
+ SendRecvRtcpPackets();
+}
diff --git a/third_party/libwebrtc/pc/dtls_transport.cc b/third_party/libwebrtc/pc/dtls_transport.cc
new file mode 100644
index 0000000000..15eed9e47b
--- /dev/null
+++ b/third_party/libwebrtc/pc/dtls_transport.cc
@@ -0,0 +1,148 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/dtls_transport.h"
+
+#include <utility>
+
+#include "absl/types/optional.h"
+#include "api/dtls_transport_interface.h"
+#include "api/make_ref_counted.h"
+#include "api/sequence_checker.h"
+#include "pc/ice_transport.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/ssl_stream_adapter.h"
+
+namespace webrtc {
+
+// Implementation of DtlsTransportInterface
+DtlsTransport::DtlsTransport(
+ std::unique_ptr<cricket::DtlsTransportInternal> internal)
+ : owner_thread_(rtc::Thread::Current()),
+ info_(DtlsTransportState::kNew),
+ internal_dtls_transport_(std::move(internal)),
+ ice_transport_(rtc::make_ref_counted<IceTransportWithPointer>(
+ internal_dtls_transport_->ice_transport())) {
+ RTC_DCHECK(internal_dtls_transport_.get());
+ internal_dtls_transport_->SubscribeDtlsTransportState(
+ [this](cricket::DtlsTransportInternal* transport,
+ DtlsTransportState state) {
+ OnInternalDtlsState(transport, state);
+ });
+ UpdateInformation();
+}
+
+DtlsTransport::~DtlsTransport() {
+ // We depend on the signaling thread to call Clear() before dropping
+ // its last reference to this object.
+ RTC_DCHECK(owner_thread_->IsCurrent() || !internal_dtls_transport_);
+}
+
+DtlsTransportInformation DtlsTransport::Information() {
+ MutexLock lock(&lock_);
+ return info_;
+}
+
+void DtlsTransport::RegisterObserver(DtlsTransportObserverInterface* observer) {
+ RTC_DCHECK_RUN_ON(owner_thread_);
+ RTC_DCHECK(observer);
+ observer_ = observer;
+}
+
+void DtlsTransport::UnregisterObserver() {
+ RTC_DCHECK_RUN_ON(owner_thread_);
+ observer_ = nullptr;
+}
+
+rtc::scoped_refptr<IceTransportInterface> DtlsTransport::ice_transport() {
+ return ice_transport_;
+}
+
+// Internal functions
+void DtlsTransport::Clear() {
+ RTC_DCHECK_RUN_ON(owner_thread_);
+ RTC_DCHECK(internal());
+ bool must_send_event =
+ (internal()->dtls_state() != DtlsTransportState::kClosed);
+ // The destructor of cricket::DtlsTransportInternal calls back
+ // into DtlsTransport, so we can't hold the lock while releasing.
+ std::unique_ptr<cricket::DtlsTransportInternal> transport_to_release;
+ {
+ MutexLock lock(&lock_);
+ transport_to_release = std::move(internal_dtls_transport_);
+ ice_transport_->Clear();
+ }
+ UpdateInformation();
+ if (observer_ && must_send_event) {
+ observer_->OnStateChange(Information());
+ }
+}
+
+void DtlsTransport::OnInternalDtlsState(
+ cricket::DtlsTransportInternal* transport,
+ DtlsTransportState state) {
+ RTC_DCHECK_RUN_ON(owner_thread_);
+ RTC_DCHECK(transport == internal());
+ RTC_DCHECK(state == internal()->dtls_state());
+ UpdateInformation();
+ if (observer_) {
+ observer_->OnStateChange(Information());
+ }
+}
+
+void DtlsTransport::UpdateInformation() {
+ RTC_DCHECK_RUN_ON(owner_thread_);
+ MutexLock lock(&lock_);
+ if (internal_dtls_transport_) {
+ if (internal_dtls_transport_->dtls_state() ==
+ DtlsTransportState::kConnected) {
+ bool success = true;
+ rtc::SSLRole internal_role;
+ absl::optional<DtlsTransportTlsRole> role;
+ int ssl_cipher_suite;
+ int tls_version;
+ int srtp_cipher;
+ success &= internal_dtls_transport_->GetDtlsRole(&internal_role);
+ if (success) {
+ switch (internal_role) {
+ case rtc::SSL_CLIENT:
+ role = DtlsTransportTlsRole::kClient;
+ break;
+ case rtc::SSL_SERVER:
+ role = DtlsTransportTlsRole::kServer;
+ break;
+ }
+ }
+ success &= internal_dtls_transport_->GetSslVersionBytes(&tls_version);
+ success &= internal_dtls_transport_->GetSslCipherSuite(&ssl_cipher_suite);
+ success &= internal_dtls_transport_->GetSrtpCryptoSuite(&srtp_cipher);
+ if (success) {
+ info_ = DtlsTransportInformation(
+ internal_dtls_transport_->dtls_state(), role, tls_version,
+ ssl_cipher_suite, srtp_cipher,
+ internal_dtls_transport_->GetRemoteSSLCertChain());
+ } else {
+ RTC_LOG(LS_ERROR) << "DtlsTransport in connected state has incomplete "
+ "TLS information";
+ info_ = DtlsTransportInformation(
+ internal_dtls_transport_->dtls_state(), role, absl::nullopt,
+ absl::nullopt, absl::nullopt,
+ internal_dtls_transport_->GetRemoteSSLCertChain());
+ }
+ } else {
+ info_ = DtlsTransportInformation(internal_dtls_transport_->dtls_state());
+ }
+ } else {
+ info_ = DtlsTransportInformation(DtlsTransportState::kClosed);
+ }
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/dtls_transport.h b/third_party/libwebrtc/pc/dtls_transport.h
new file mode 100644
index 0000000000..cca4cc980a
--- /dev/null
+++ b/third_party/libwebrtc/pc/dtls_transport.h
@@ -0,0 +1,76 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_DTLS_TRANSPORT_H_
+#define PC_DTLS_TRANSPORT_H_
+
+#include <memory>
+
+#include "api/dtls_transport_interface.h"
+#include "api/ice_transport_interface.h"
+#include "api/scoped_refptr.h"
+#include "p2p/base/dtls_transport.h"
+#include "p2p/base/dtls_transport_internal.h"
+#include "pc/ice_transport.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "rtc_base/thread.h"
+#include "rtc_base/thread_annotations.h"
+
+namespace webrtc {
+
+class IceTransportWithPointer;
+
+// This implementation wraps a cricket::DtlsTransport, and takes
+// ownership of it.
+class DtlsTransport : public DtlsTransportInterface {
+ public:
+ // This object must be constructed and updated on a consistent thread,
+ // the same thread as the one the cricket::DtlsTransportInternal object
+ // lives on.
+ // The Information() function can be called from a different thread,
+ // such as the signalling thread.
+ explicit DtlsTransport(
+ std::unique_ptr<cricket::DtlsTransportInternal> internal);
+
+ rtc::scoped_refptr<IceTransportInterface> ice_transport() override;
+ DtlsTransportInformation Information() override;
+ void RegisterObserver(DtlsTransportObserverInterface* observer) override;
+ void UnregisterObserver() override;
+ void Clear();
+
+ cricket::DtlsTransportInternal* internal() {
+ MutexLock lock(&lock_);
+ return internal_dtls_transport_.get();
+ }
+
+ const cricket::DtlsTransportInternal* internal() const {
+ MutexLock lock(&lock_);
+ return internal_dtls_transport_.get();
+ }
+
+ protected:
+ ~DtlsTransport();
+
+ private:
+ void OnInternalDtlsState(cricket::DtlsTransportInternal* transport,
+ DtlsTransportState state);
+ void UpdateInformation();
+
+ DtlsTransportObserverInterface* observer_ = nullptr;
+ rtc::Thread* owner_thread_;
+ mutable Mutex lock_;
+ DtlsTransportInformation info_ RTC_GUARDED_BY(lock_);
+ std::unique_ptr<cricket::DtlsTransportInternal> internal_dtls_transport_
+ RTC_GUARDED_BY(lock_);
+ const rtc::scoped_refptr<IceTransportWithPointer> ice_transport_;
+};
+
+} // namespace webrtc
+#endif // PC_DTLS_TRANSPORT_H_
diff --git a/third_party/libwebrtc/pc/dtls_transport_unittest.cc b/third_party/libwebrtc/pc/dtls_transport_unittest.cc
new file mode 100644
index 0000000000..c234176635
--- /dev/null
+++ b/third_party/libwebrtc/pc/dtls_transport_unittest.cc
@@ -0,0 +1,181 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/dtls_transport.h"
+
+#include <utility>
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "api/make_ref_counted.h"
+#include "api/rtc_error.h"
+#include "p2p/base/fake_dtls_transport.h"
+#include "p2p/base/p2p_constants.h"
+#include "rtc_base/fake_ssl_identity.h"
+#include "rtc_base/gunit.h"
+#include "rtc_base/rtc_certificate.h"
+#include "rtc_base/ssl_identity.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+
+constexpr int kDefaultTimeout = 1000; // milliseconds
+constexpr int kNonsenseCipherSuite = 1234;
+
+using cricket::FakeDtlsTransport;
+using ::testing::ElementsAre;
+
+namespace webrtc {
+
+class TestDtlsTransportObserver : public DtlsTransportObserverInterface {
+ public:
+ void OnStateChange(DtlsTransportInformation info) override {
+ state_change_called_ = true;
+ states_.push_back(info.state());
+ info_ = info;
+ }
+
+ void OnError(RTCError error) override {}
+
+ DtlsTransportState state() {
+ if (states_.size() > 0) {
+ return states_[states_.size() - 1];
+ } else {
+ return DtlsTransportState::kNew;
+ }
+ }
+
+ bool state_change_called_ = false;
+ DtlsTransportInformation info_;
+ std::vector<DtlsTransportState> states_;
+};
+
+class DtlsTransportTest : public ::testing::Test {
+ public:
+ DtlsTransport* transport() { return transport_.get(); }
+ DtlsTransportObserverInterface* observer() { return &observer_; }
+
+ void CreateTransport(rtc::FakeSSLCertificate* certificate = nullptr) {
+ auto cricket_transport = std::make_unique<FakeDtlsTransport>(
+ "audio", cricket::ICE_CANDIDATE_COMPONENT_RTP);
+ if (certificate) {
+ cricket_transport->SetRemoteSSLCertificate(certificate);
+ }
+ cricket_transport->SetSslCipherSuite(kNonsenseCipherSuite);
+ transport_ =
+ rtc::make_ref_counted<DtlsTransport>(std::move(cricket_transport));
+ }
+
+ void CompleteDtlsHandshake() {
+ auto fake_dtls1 = static_cast<FakeDtlsTransport*>(transport_->internal());
+ auto fake_dtls2 = std::make_unique<FakeDtlsTransport>(
+ "audio", cricket::ICE_CANDIDATE_COMPONENT_RTP);
+ auto cert1 = rtc::RTCCertificate::Create(
+ rtc::SSLIdentity::Create("session1", rtc::KT_DEFAULT));
+ fake_dtls1->SetLocalCertificate(cert1);
+ auto cert2 = rtc::RTCCertificate::Create(
+ rtc::SSLIdentity::Create("session1", rtc::KT_DEFAULT));
+ fake_dtls2->SetLocalCertificate(cert2);
+ fake_dtls1->SetDestination(fake_dtls2.get());
+ }
+
+ rtc::AutoThread main_thread_;
+ rtc::scoped_refptr<DtlsTransport> transport_;
+ TestDtlsTransportObserver observer_;
+};
+
+TEST_F(DtlsTransportTest, CreateClearDelete) {
+ auto cricket_transport = std::make_unique<FakeDtlsTransport>(
+ "audio", cricket::ICE_CANDIDATE_COMPONENT_RTP);
+ auto webrtc_transport =
+ rtc::make_ref_counted<DtlsTransport>(std::move(cricket_transport));
+ ASSERT_TRUE(webrtc_transport->internal());
+ ASSERT_EQ(DtlsTransportState::kNew, webrtc_transport->Information().state());
+ webrtc_transport->Clear();
+ ASSERT_FALSE(webrtc_transport->internal());
+ ASSERT_EQ(DtlsTransportState::kClosed,
+ webrtc_transport->Information().state());
+}
+
+TEST_F(DtlsTransportTest, EventsObservedWhenConnecting) {
+ CreateTransport();
+ transport()->RegisterObserver(observer());
+ CompleteDtlsHandshake();
+ ASSERT_TRUE_WAIT(observer_.state_change_called_, kDefaultTimeout);
+ EXPECT_THAT(
+ observer_.states_,
+ ElementsAre( // FakeDtlsTransport doesn't signal the "connecting" state.
+ // TODO(hta): fix FakeDtlsTransport or file bug on it.
+ // DtlsTransportState::kConnecting,
+ DtlsTransportState::kConnected));
+}
+
+TEST_F(DtlsTransportTest, CloseWhenClearing) {
+ CreateTransport();
+ transport()->RegisterObserver(observer());
+ CompleteDtlsHandshake();
+ ASSERT_TRUE_WAIT(observer_.state() == DtlsTransportState::kConnected,
+ kDefaultTimeout);
+ transport()->Clear();
+ ASSERT_TRUE_WAIT(observer_.state() == DtlsTransportState::kClosed,
+ kDefaultTimeout);
+}
+
+TEST_F(DtlsTransportTest, RoleAppearsOnConnect) {
+ rtc::FakeSSLCertificate fake_certificate("fake data");
+ CreateTransport(&fake_certificate);
+ transport()->RegisterObserver(observer());
+ EXPECT_FALSE(transport()->Information().role());
+ CompleteDtlsHandshake();
+ ASSERT_TRUE_WAIT(observer_.state() == DtlsTransportState::kConnected,
+ kDefaultTimeout);
+ EXPECT_TRUE(observer_.info_.role());
+ EXPECT_TRUE(transport()->Information().role());
+ EXPECT_EQ(transport()->Information().role(), DtlsTransportTlsRole::kClient);
+}
+
+TEST_F(DtlsTransportTest, CertificateAppearsOnConnect) {
+ rtc::FakeSSLCertificate fake_certificate("fake data");
+ CreateTransport(&fake_certificate);
+ transport()->RegisterObserver(observer());
+ CompleteDtlsHandshake();
+ ASSERT_TRUE_WAIT(observer_.state() == DtlsTransportState::kConnected,
+ kDefaultTimeout);
+ EXPECT_TRUE(observer_.info_.remote_ssl_certificates() != nullptr);
+}
+
+TEST_F(DtlsTransportTest, CertificateDisappearsOnClose) {
+ rtc::FakeSSLCertificate fake_certificate("fake data");
+ CreateTransport(&fake_certificate);
+ transport()->RegisterObserver(observer());
+ CompleteDtlsHandshake();
+ ASSERT_TRUE_WAIT(observer_.state() == DtlsTransportState::kConnected,
+ kDefaultTimeout);
+ EXPECT_TRUE(observer_.info_.remote_ssl_certificates() != nullptr);
+ transport()->Clear();
+ ASSERT_TRUE_WAIT(observer_.state() == DtlsTransportState::kClosed,
+ kDefaultTimeout);
+ EXPECT_FALSE(observer_.info_.remote_ssl_certificates());
+}
+
+TEST_F(DtlsTransportTest, CipherSuiteVisibleWhenConnected) {
+ CreateTransport();
+ transport()->RegisterObserver(observer());
+ CompleteDtlsHandshake();
+ ASSERT_TRUE_WAIT(observer_.state() == DtlsTransportState::kConnected,
+ kDefaultTimeout);
+ ASSERT_TRUE(observer_.info_.ssl_cipher_suite());
+ EXPECT_EQ(kNonsenseCipherSuite, *observer_.info_.ssl_cipher_suite());
+ transport()->Clear();
+ ASSERT_TRUE_WAIT(observer_.state() == DtlsTransportState::kClosed,
+ kDefaultTimeout);
+ EXPECT_FALSE(observer_.info_.ssl_cipher_suite());
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/dtmf_sender.cc b/third_party/libwebrtc/pc/dtmf_sender.cc
new file mode 100644
index 0000000000..45a4a58abb
--- /dev/null
+++ b/third_party/libwebrtc/pc/dtmf_sender.cc
@@ -0,0 +1,243 @@
+/*
+ * Copyright 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/dtmf_sender.h"
+
+#include <ctype.h>
+#include <string.h>
+
+#include "api/task_queue/pending_task_safety_flag.h"
+#include "api/task_queue/task_queue_base.h"
+#include "api/units/time_delta.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+
+namespace webrtc {
+
+// RFC4733
+// +-------+--------+------+---------+
+// | Event | Code | Type | Volume? |
+// +-------+--------+------+---------+
+// | 0--9 | 0--9 | tone | yes |
+// | * | 10 | tone | yes |
+// | # | 11 | tone | yes |
+// | A--D | 12--15 | tone | yes |
+// +-------+--------+------+---------+
+// The "," is a special event defined by the WebRTC spec. It means to delay for
+// 2 seconds before processing the next tone. We use -1 as its code.
+static const int kDtmfCommaDelay = -1;
+static const char kDtmfValidTones[] = ",0123456789*#ABCDabcd";
+static const char kDtmfTonesTable[] = ",0123456789*#ABCD";
+// The duration cannot be more than 6000ms or less than 40ms. The gap between
+// tones must be at least 50 ms.
+// Source for values: W3C WEBRTC specification.
+// https://w3c.github.io/webrtc-pc/#dom-rtcdtmfsender-insertdtmf
+static const int kDtmfDefaultDurationMs = 100;
+static const int kDtmfMinDurationMs = 40;
+static const int kDtmfMaxDurationMs = 6000;
+static const int kDtmfDefaultGapMs = 50;
+static const int kDtmfMinGapMs = 30;
+
+// Get DTMF code from the DTMF event character.
+bool GetDtmfCode(char tone, int* code) {
+ // Convert a-d to A-D.
+ char event = toupper(tone);
+ const char* p = strchr(kDtmfTonesTable, event);
+ if (!p) {
+ return false;
+ }
+ *code = p - kDtmfTonesTable - 1;
+ return true;
+}
+
+rtc::scoped_refptr<DtmfSender> DtmfSender::Create(
+ TaskQueueBase* signaling_thread,
+ DtmfProviderInterface* provider) {
+ if (!signaling_thread) {
+ return nullptr;
+ }
+ return rtc::make_ref_counted<DtmfSender>(signaling_thread, provider);
+}
+
+DtmfSender::DtmfSender(TaskQueueBase* signaling_thread,
+ DtmfProviderInterface* provider)
+ : observer_(nullptr),
+ signaling_thread_(signaling_thread),
+ provider_(provider),
+ duration_(kDtmfDefaultDurationMs),
+ inter_tone_gap_(kDtmfDefaultGapMs),
+ comma_delay_(kDtmfDefaultCommaDelayMs) {
+ RTC_DCHECK(signaling_thread_);
+ RTC_DCHECK(provider_);
+}
+
+void DtmfSender::OnDtmfProviderDestroyed() {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ RTC_DLOG(LS_INFO) << "The Dtmf provider is deleted. Clear the sending queue.";
+ StopSending();
+ provider_ = nullptr;
+}
+
+DtmfSender::~DtmfSender() {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ StopSending();
+}
+
+void DtmfSender::RegisterObserver(DtmfSenderObserverInterface* observer) {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ observer_ = observer;
+}
+
+void DtmfSender::UnregisterObserver() {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ observer_ = nullptr;
+}
+
+bool DtmfSender::CanInsertDtmf() {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ if (!provider_) {
+ return false;
+ }
+ return provider_->CanInsertDtmf();
+}
+
+bool DtmfSender::InsertDtmf(const std::string& tones,
+ int duration,
+ int inter_tone_gap,
+ int comma_delay) {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+
+ if (duration > kDtmfMaxDurationMs || duration < kDtmfMinDurationMs ||
+ inter_tone_gap < kDtmfMinGapMs || comma_delay < kDtmfMinGapMs) {
+ RTC_LOG(LS_ERROR)
+ << "InsertDtmf is called with invalid duration or tones gap. "
+ "The duration cannot be more than "
+ << kDtmfMaxDurationMs << "ms or less than " << kDtmfMinDurationMs
+ << "ms. The gap between tones must be at least " << kDtmfMinGapMs
+ << "ms.";
+ return false;
+ }
+
+ if (!CanInsertDtmf()) {
+ RTC_LOG(LS_ERROR)
+ << "InsertDtmf is called on DtmfSender that can't send DTMF.";
+ return false;
+ }
+
+ tones_ = tones;
+ duration_ = duration;
+ inter_tone_gap_ = inter_tone_gap;
+ comma_delay_ = comma_delay;
+
+ // Cancel any remaining tasks for previous tones.
+ if (safety_flag_) {
+ safety_flag_->SetNotAlive();
+ }
+ safety_flag_ = PendingTaskSafetyFlag::Create();
+ // Kick off a new DTMF task.
+ QueueInsertDtmf(1 /*ms*/);
+ return true;
+}
+
+std::string DtmfSender::tones() const {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ return tones_;
+}
+
+int DtmfSender::duration() const {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ return duration_;
+}
+
+int DtmfSender::inter_tone_gap() const {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ return inter_tone_gap_;
+}
+
+int DtmfSender::comma_delay() const {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ return comma_delay_;
+}
+
+void DtmfSender::QueueInsertDtmf(uint32_t delay_ms) {
+ signaling_thread_->PostDelayedHighPrecisionTask(
+ SafeTask(safety_flag_,
+ [this] {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ DoInsertDtmf();
+ }),
+ TimeDelta::Millis(delay_ms));
+}
+
+void DtmfSender::DoInsertDtmf() {
+ // Get the first DTMF tone from the tone buffer. Unrecognized characters will
+ // be ignored and skipped.
+ size_t first_tone_pos = tones_.find_first_of(kDtmfValidTones);
+ int code = 0;
+ if (first_tone_pos == std::string::npos) {
+ tones_.clear();
+ // Fire a “OnToneChange” event with an empty string and stop.
+ if (observer_) {
+ observer_->OnToneChange(std::string(), tones_);
+ observer_->OnToneChange(std::string());
+ }
+ return;
+ } else {
+ char tone = tones_[first_tone_pos];
+ if (!GetDtmfCode(tone, &code)) {
+ // The find_first_of(kDtmfValidTones) should have guarantee `tone` is
+ // a valid DTMF tone.
+ RTC_DCHECK_NOTREACHED();
+ }
+ }
+
+ int tone_gap = inter_tone_gap_;
+ if (code == kDtmfCommaDelay) {
+ // Special case defined by WebRTC - By default, the character ',' indicates
+ // a delay of 2 seconds before processing the next character in the tones
+ // parameter. The comma delay can be set to a non default value via
+ // InsertDtmf to comply with legacy WebRTC clients.
+ tone_gap = comma_delay_;
+ } else {
+ if (!provider_) {
+ RTC_LOG(LS_ERROR) << "The DtmfProvider has been destroyed.";
+ return;
+ }
+ // The provider starts playout of the given tone on the
+ // associated RTP media stream, using the appropriate codec.
+ if (!provider_->InsertDtmf(code, duration_)) {
+ RTC_LOG(LS_ERROR) << "The DtmfProvider can no longer send DTMF.";
+ return;
+ }
+ // Wait for the number of milliseconds specified by `duration_`.
+ tone_gap += duration_;
+ }
+
+ // Fire a “OnToneChange” event with the tone that's just processed.
+ if (observer_) {
+ observer_->OnToneChange(tones_.substr(first_tone_pos, 1),
+ tones_.substr(first_tone_pos + 1));
+ observer_->OnToneChange(tones_.substr(first_tone_pos, 1));
+ }
+
+ // Erase the unrecognized characters plus the tone that's just processed.
+ tones_.erase(0, first_tone_pos + 1);
+
+ // Continue with the next tone.
+ QueueInsertDtmf(tone_gap);
+}
+
+void DtmfSender::StopSending() {
+ if (safety_flag_) {
+ safety_flag_->SetNotAlive();
+ }
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/dtmf_sender.h b/third_party/libwebrtc/pc/dtmf_sender.h
new file mode 100644
index 0000000000..c99c7bee50
--- /dev/null
+++ b/third_party/libwebrtc/pc/dtmf_sender.h
@@ -0,0 +1,118 @@
+/*
+ * Copyright 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_DTMF_SENDER_H_
+#define PC_DTMF_SENDER_H_
+
+#include <stdint.h>
+
+#include <string>
+
+#include "api/dtmf_sender_interface.h"
+#include "api/scoped_refptr.h"
+#include "api/sequence_checker.h"
+#include "api/task_queue/pending_task_safety_flag.h"
+#include "api/task_queue/task_queue_base.h"
+#include "pc/proxy.h"
+#include "rtc_base/ref_count.h"
+#include "rtc_base/thread_annotations.h"
+
+// DtmfSender is the native implementation of the RTCDTMFSender defined by
+// the WebRTC W3C Editor's Draft.
+// https://w3c.github.io/webrtc-pc/#rtcdtmfsender
+
+namespace webrtc {
+
+// This interface is called by DtmfSender to talk to the actual audio channel
+// to send DTMF.
+class DtmfProviderInterface {
+ public:
+ // Returns true if the audio sender is capable of sending DTMF. Otherwise
+ // returns false.
+ virtual bool CanInsertDtmf() = 0;
+ // Sends DTMF `code`.
+ // The `duration` indicates the length of the DTMF tone in ms.
+ // Returns true on success and false on failure.
+ virtual bool InsertDtmf(int code, int duration) = 0;
+
+ protected:
+ virtual ~DtmfProviderInterface() {}
+};
+
+class DtmfSender : public DtmfSenderInterface {
+ public:
+ static rtc::scoped_refptr<DtmfSender> Create(TaskQueueBase* signaling_thread,
+ DtmfProviderInterface* provider);
+
+ void OnDtmfProviderDestroyed();
+
+ // Implements DtmfSenderInterface.
+ void RegisterObserver(DtmfSenderObserverInterface* observer) override;
+ void UnregisterObserver() override;
+ bool CanInsertDtmf() override;
+ bool InsertDtmf(const std::string& tones,
+ int duration,
+ int inter_tone_gap,
+ int comma_delay = kDtmfDefaultCommaDelayMs) override;
+ std::string tones() const override;
+ int duration() const override;
+ int inter_tone_gap() const override;
+ int comma_delay() const override;
+
+ protected:
+ DtmfSender(TaskQueueBase* signaling_thread, DtmfProviderInterface* provider);
+ virtual ~DtmfSender();
+
+ DtmfSender(const DtmfSender&) = delete;
+ DtmfSender& operator=(const DtmfSender&) = delete;
+
+ private:
+ DtmfSender();
+
+ void QueueInsertDtmf(uint32_t delay_ms) RTC_RUN_ON(signaling_thread_);
+
+ // The DTMF sending task.
+ void DoInsertDtmf() RTC_RUN_ON(signaling_thread_);
+
+ void StopSending() RTC_RUN_ON(signaling_thread_);
+
+ DtmfSenderObserverInterface* observer_ RTC_GUARDED_BY(signaling_thread_);
+ TaskQueueBase* const signaling_thread_;
+ DtmfProviderInterface* provider_ RTC_GUARDED_BY(signaling_thread_);
+ std::string tones_ RTC_GUARDED_BY(signaling_thread_);
+ int duration_ RTC_GUARDED_BY(signaling_thread_);
+ int inter_tone_gap_ RTC_GUARDED_BY(signaling_thread_);
+ int comma_delay_ RTC_GUARDED_BY(signaling_thread_);
+
+ // For cancelling the tasks which feed the DTMF provider one tone at a time.
+ rtc::scoped_refptr<PendingTaskSafetyFlag> safety_flag_ RTC_GUARDED_BY(
+ signaling_thread_) RTC_PT_GUARDED_BY(signaling_thread_) = nullptr;
+};
+
+// Define proxy for DtmfSenderInterface.
+BEGIN_PRIMARY_PROXY_MAP(DtmfSender)
+
+PROXY_PRIMARY_THREAD_DESTRUCTOR()
+PROXY_METHOD1(void, RegisterObserver, DtmfSenderObserverInterface*)
+PROXY_METHOD0(void, UnregisterObserver)
+PROXY_METHOD0(bool, CanInsertDtmf)
+PROXY_METHOD4(bool, InsertDtmf, const std::string&, int, int, int)
+PROXY_CONSTMETHOD0(std::string, tones)
+PROXY_CONSTMETHOD0(int, duration)
+PROXY_CONSTMETHOD0(int, inter_tone_gap)
+PROXY_CONSTMETHOD0(int, comma_delay)
+END_PROXY_MAP(DtmfSender)
+
+// Get DTMF code from the DTMF event character.
+bool GetDtmfCode(char tone, int* code);
+
+} // namespace webrtc
+
+#endif // PC_DTMF_SENDER_H_
diff --git a/third_party/libwebrtc/pc/dtmf_sender_unittest.cc b/third_party/libwebrtc/pc/dtmf_sender_unittest.cc
new file mode 100644
index 0000000000..ab5ba9eaee
--- /dev/null
+++ b/third_party/libwebrtc/pc/dtmf_sender_unittest.cc
@@ -0,0 +1,371 @@
+/*
+ * Copyright 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/dtmf_sender.h"
+
+#include <stddef.h>
+
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "rtc_base/fake_clock.h"
+#include "rtc_base/gunit.h"
+#include "rtc_base/time_utils.h"
+#include "test/gtest.h"
+
+using webrtc::DtmfProviderInterface;
+using webrtc::DtmfSender;
+using webrtc::DtmfSenderObserverInterface;
+
+// TODO(deadbeef): Even though this test now uses a fake clock, it has a
+// generous 3-second timeout for every test case. The timeout could be tuned
+// to each test based on the tones sent, instead.
+static const int kMaxWaitMs = 3000;
+
+class FakeDtmfObserver : public DtmfSenderObserverInterface {
+ public:
+ FakeDtmfObserver() : completed_(false) {}
+
+ // Implements DtmfSenderObserverInterface.
+ void OnToneChange(const std::string& tone) override {
+ tones_from_single_argument_callback_.push_back(tone);
+ if (tone.empty()) {
+ completed_ = true;
+ }
+ }
+ void OnToneChange(const std::string& tone,
+ const std::string& tone_buffer) override {
+ tones_.push_back(tone);
+ tones_remaining_ = tone_buffer;
+ if (tone.empty()) {
+ completed_ = true;
+ }
+ }
+
+ // getters
+ const std::vector<std::string>& tones() const { return tones_; }
+ const std::vector<std::string>& tones_from_single_argument_callback() const {
+ return tones_from_single_argument_callback_;
+ }
+ const std::string tones_remaining() { return tones_remaining_; }
+ bool completed() const { return completed_; }
+
+ private:
+ std::vector<std::string> tones_;
+ std::vector<std::string> tones_from_single_argument_callback_;
+ std::string tones_remaining_;
+ bool completed_;
+};
+
+class FakeDtmfProvider : public DtmfProviderInterface {
+ public:
+ struct DtmfInfo {
+ DtmfInfo(int code, int duration, int gap)
+ : code(code), duration(duration), gap(gap) {}
+ int code;
+ int duration;
+ int gap;
+ };
+
+ FakeDtmfProvider() : last_insert_dtmf_call_(0) {}
+
+ // Implements DtmfProviderInterface.
+ bool CanInsertDtmf() override { return can_insert_; }
+
+ bool InsertDtmf(int code, int duration) override {
+ int gap = 0;
+ // TODO(ronghuawu): Make the timer (basically the rtc::TimeNanos)
+ // mockable and use a fake timer in the unit tests.
+ if (last_insert_dtmf_call_ > 0) {
+ gap = static_cast<int>(rtc::TimeMillis() - last_insert_dtmf_call_);
+ }
+ last_insert_dtmf_call_ = rtc::TimeMillis();
+
+ dtmf_info_queue_.push_back(DtmfInfo(code, duration, gap));
+ return true;
+ }
+
+ // getter and setter
+ const std::vector<DtmfInfo>& dtmf_info_queue() const {
+ return dtmf_info_queue_;
+ }
+
+ // helper functions
+ void SetCanInsertDtmf(bool can_insert) { can_insert_ = can_insert; }
+
+ private:
+ bool can_insert_ = false;
+ std::vector<DtmfInfo> dtmf_info_queue_;
+ int64_t last_insert_dtmf_call_;
+};
+
+class DtmfSenderTest : public ::testing::Test {
+ protected:
+ DtmfSenderTest()
+ : observer_(new FakeDtmfObserver()), provider_(new FakeDtmfProvider()) {
+ provider_->SetCanInsertDtmf(true);
+ dtmf_ = DtmfSender::Create(rtc::Thread::Current(), provider_.get());
+ dtmf_->RegisterObserver(observer_.get());
+ }
+
+ ~DtmfSenderTest() {
+ if (dtmf_.get()) {
+ dtmf_->UnregisterObserver();
+ }
+ }
+
+ // Constructs a list of DtmfInfo from `tones`, `duration` and
+ // `inter_tone_gap`.
+ void GetDtmfInfoFromString(
+ const std::string& tones,
+ int duration,
+ int inter_tone_gap,
+ std::vector<FakeDtmfProvider::DtmfInfo>* dtmfs,
+ int comma_delay = webrtc::DtmfSender::kDtmfDefaultCommaDelayMs) {
+ // Init extra_delay as -inter_tone_gap - duration to ensure the first
+ // DtmfInfo's gap field will be 0.
+ int extra_delay = -1 * (inter_tone_gap + duration);
+
+ std::string::const_iterator it = tones.begin();
+ for (; it != tones.end(); ++it) {
+ char tone = *it;
+ int code = 0;
+ webrtc::GetDtmfCode(tone, &code);
+ if (tone == ',') {
+ extra_delay = comma_delay;
+ } else {
+ dtmfs->push_back(FakeDtmfProvider::DtmfInfo(
+ code, duration, duration + inter_tone_gap + extra_delay));
+ extra_delay = 0;
+ }
+ }
+ }
+
+ void VerifyExpectedState(const std::string& tones,
+ int duration,
+ int inter_tone_gap) {
+ EXPECT_EQ(tones, dtmf_->tones());
+ EXPECT_EQ(duration, dtmf_->duration());
+ EXPECT_EQ(inter_tone_gap, dtmf_->inter_tone_gap());
+ }
+
+ // Verify the provider got all the expected calls.
+ void VerifyOnProvider(
+ const std::string& tones,
+ int duration,
+ int inter_tone_gap,
+ int comma_delay = webrtc::DtmfSender::kDtmfDefaultCommaDelayMs) {
+ std::vector<FakeDtmfProvider::DtmfInfo> dtmf_queue_ref;
+ GetDtmfInfoFromString(tones, duration, inter_tone_gap, &dtmf_queue_ref,
+ comma_delay);
+ VerifyOnProvider(dtmf_queue_ref);
+ }
+
+ void VerifyOnProvider(
+ const std::vector<FakeDtmfProvider::DtmfInfo>& dtmf_queue_ref) {
+ const std::vector<FakeDtmfProvider::DtmfInfo>& dtmf_queue =
+ provider_->dtmf_info_queue();
+ ASSERT_EQ(dtmf_queue_ref.size(), dtmf_queue.size());
+ std::vector<FakeDtmfProvider::DtmfInfo>::const_iterator it_ref =
+ dtmf_queue_ref.begin();
+ std::vector<FakeDtmfProvider::DtmfInfo>::const_iterator it =
+ dtmf_queue.begin();
+ while (it_ref != dtmf_queue_ref.end() && it != dtmf_queue.end()) {
+ EXPECT_EQ(it_ref->code, it->code);
+ EXPECT_EQ(it_ref->duration, it->duration);
+ // Allow ~10ms error (can be small since we're using a fake clock).
+ EXPECT_GE(it_ref->gap, it->gap - 10);
+ EXPECT_LE(it_ref->gap, it->gap + 10);
+ ++it_ref;
+ ++it;
+ }
+ }
+
+ // Verify the observer got all the expected callbacks.
+ void VerifyOnObserver(const std::string& tones_ref) {
+ const std::vector<std::string>& tones = observer_->tones();
+ // The observer will get an empty string at the end.
+ EXPECT_EQ(tones_ref.size() + 1, tones.size());
+ EXPECT_EQ(observer_->tones(),
+ observer_->tones_from_single_argument_callback());
+ EXPECT_TRUE(tones.back().empty());
+ EXPECT_TRUE(observer_->tones_remaining().empty());
+ std::string::const_iterator it_ref = tones_ref.begin();
+ std::vector<std::string>::const_iterator it = tones.begin();
+ while (it_ref != tones_ref.end() && it != tones.end()) {
+ EXPECT_EQ(*it_ref, it->at(0));
+ ++it_ref;
+ ++it;
+ }
+ }
+
+ rtc::AutoThread main_thread_;
+ std::unique_ptr<FakeDtmfObserver> observer_;
+ std::unique_ptr<FakeDtmfProvider> provider_;
+ rtc::scoped_refptr<DtmfSender> dtmf_;
+ rtc::ScopedFakeClock fake_clock_;
+};
+
+TEST_F(DtmfSenderTest, CanInsertDtmf) {
+ EXPECT_TRUE(dtmf_->CanInsertDtmf());
+ provider_->SetCanInsertDtmf(false);
+ EXPECT_FALSE(dtmf_->CanInsertDtmf());
+}
+
+TEST_F(DtmfSenderTest, InsertDtmf) {
+ std::string tones = "@1%a&*$";
+ int duration = 100;
+ int inter_tone_gap = 50;
+ EXPECT_TRUE(dtmf_->InsertDtmf(tones, duration, inter_tone_gap));
+ EXPECT_TRUE_SIMULATED_WAIT(observer_->completed(), kMaxWaitMs, fake_clock_);
+
+ // The unrecognized characters should be ignored.
+ std::string known_tones = "1a*";
+ VerifyOnProvider(known_tones, duration, inter_tone_gap);
+ VerifyOnObserver(known_tones);
+}
+
+TEST_F(DtmfSenderTest, InsertDtmfTwice) {
+ std::string tones1 = "12";
+ std::string tones2 = "ab";
+ int duration = 100;
+ int inter_tone_gap = 50;
+ EXPECT_TRUE(dtmf_->InsertDtmf(tones1, duration, inter_tone_gap));
+ VerifyExpectedState(tones1, duration, inter_tone_gap);
+ // Wait until the first tone got sent.
+ EXPECT_TRUE_SIMULATED_WAIT(observer_->tones().size() == 1, kMaxWaitMs,
+ fake_clock_);
+ VerifyExpectedState("2", duration, inter_tone_gap);
+ // Insert with another tone buffer.
+ EXPECT_TRUE(dtmf_->InsertDtmf(tones2, duration, inter_tone_gap));
+ VerifyExpectedState(tones2, duration, inter_tone_gap);
+ // Wait until it's completed.
+ EXPECT_TRUE_SIMULATED_WAIT(observer_->completed(), kMaxWaitMs, fake_clock_);
+
+ std::vector<FakeDtmfProvider::DtmfInfo> dtmf_queue_ref;
+ GetDtmfInfoFromString("1", duration, inter_tone_gap, &dtmf_queue_ref);
+ GetDtmfInfoFromString("ab", duration, inter_tone_gap, &dtmf_queue_ref);
+ VerifyOnProvider(dtmf_queue_ref);
+ VerifyOnObserver("1ab");
+}
+
+TEST_F(DtmfSenderTest, InsertDtmfWhileProviderIsDeleted) {
+ std::string tones = "@1%a&*$";
+ int duration = 100;
+ int inter_tone_gap = 50;
+ EXPECT_TRUE(dtmf_->InsertDtmf(tones, duration, inter_tone_gap));
+ // Wait until the first tone got sent.
+ EXPECT_TRUE_SIMULATED_WAIT(observer_->tones().size() == 1, kMaxWaitMs,
+ fake_clock_);
+ // Delete provider.
+ dtmf_->OnDtmfProviderDestroyed();
+ provider_.reset();
+ // The queue should be discontinued so no more tone callbacks.
+ SIMULATED_WAIT(false, 200, fake_clock_);
+ EXPECT_EQ(1U, observer_->tones().size());
+}
+
+TEST_F(DtmfSenderTest, InsertDtmfWhileSenderIsDeleted) {
+ std::string tones = "@1%a&*$";
+ int duration = 100;
+ int inter_tone_gap = 50;
+ EXPECT_TRUE(dtmf_->InsertDtmf(tones, duration, inter_tone_gap));
+ // Wait until the first tone got sent.
+ EXPECT_TRUE_SIMULATED_WAIT(observer_->tones().size() == 1, kMaxWaitMs,
+ fake_clock_);
+ // Delete the sender.
+ dtmf_ = NULL;
+ // The queue should be discontinued so no more tone callbacks.
+ SIMULATED_WAIT(false, 200, fake_clock_);
+ EXPECT_EQ(1U, observer_->tones().size());
+}
+
+TEST_F(DtmfSenderTest, InsertEmptyTonesToCancelPreviousTask) {
+ std::string tones1 = "12";
+ std::string tones2 = "";
+ int duration = 100;
+ int inter_tone_gap = 50;
+ EXPECT_TRUE(dtmf_->InsertDtmf(tones1, duration, inter_tone_gap));
+ // Wait until the first tone got sent.
+ EXPECT_TRUE_SIMULATED_WAIT(observer_->tones().size() == 1, kMaxWaitMs,
+ fake_clock_);
+ // Insert with another tone buffer.
+ EXPECT_TRUE(dtmf_->InsertDtmf(tones2, duration, inter_tone_gap));
+ // Wait until it's completed.
+ EXPECT_TRUE_SIMULATED_WAIT(observer_->completed(), kMaxWaitMs, fake_clock_);
+
+ std::vector<FakeDtmfProvider::DtmfInfo> dtmf_queue_ref;
+ GetDtmfInfoFromString("1", duration, inter_tone_gap, &dtmf_queue_ref);
+ VerifyOnProvider(dtmf_queue_ref);
+ VerifyOnObserver("1");
+}
+
+TEST_F(DtmfSenderTest, InsertDtmfWithDefaultCommaDelay) {
+ std::string tones = "3,4";
+ int duration = 100;
+ int inter_tone_gap = 50;
+ int default_comma_delay = webrtc::DtmfSender::kDtmfDefaultCommaDelayMs;
+ EXPECT_EQ(dtmf_->comma_delay(), default_comma_delay);
+ EXPECT_TRUE(dtmf_->InsertDtmf(tones, duration, inter_tone_gap));
+ EXPECT_TRUE_SIMULATED_WAIT(observer_->completed(), kMaxWaitMs, fake_clock_);
+
+ VerifyOnProvider(tones, duration, inter_tone_gap);
+ VerifyOnObserver(tones);
+ EXPECT_EQ(dtmf_->comma_delay(), default_comma_delay);
+}
+
+TEST_F(DtmfSenderTest, InsertDtmfWithNonDefaultCommaDelay) {
+ std::string tones = "3,4";
+ int duration = 100;
+ int inter_tone_gap = 50;
+ int default_comma_delay = webrtc::DtmfSender::kDtmfDefaultCommaDelayMs;
+ int comma_delay = 500;
+ EXPECT_EQ(dtmf_->comma_delay(), default_comma_delay);
+ EXPECT_TRUE(dtmf_->InsertDtmf(tones, duration, inter_tone_gap, comma_delay));
+ EXPECT_TRUE_SIMULATED_WAIT(observer_->completed(), kMaxWaitMs, fake_clock_);
+
+ VerifyOnProvider(tones, duration, inter_tone_gap, comma_delay);
+ VerifyOnObserver(tones);
+ EXPECT_EQ(dtmf_->comma_delay(), comma_delay);
+}
+
+TEST_F(DtmfSenderTest, TryInsertDtmfWhenItDoesNotWork) {
+ std::string tones = "3,4";
+ int duration = 100;
+ int inter_tone_gap = 50;
+ provider_->SetCanInsertDtmf(false);
+ EXPECT_FALSE(dtmf_->InsertDtmf(tones, duration, inter_tone_gap));
+}
+
+TEST_F(DtmfSenderTest, InsertDtmfWithInvalidDurationOrGap) {
+ std::string tones = "3,4";
+ int duration = 40;
+ int inter_tone_gap = 50;
+
+ EXPECT_FALSE(dtmf_->InsertDtmf(tones, 6001, inter_tone_gap));
+ EXPECT_FALSE(dtmf_->InsertDtmf(tones, 39, inter_tone_gap));
+ EXPECT_FALSE(dtmf_->InsertDtmf(tones, duration, 29));
+ EXPECT_FALSE(dtmf_->InsertDtmf(tones, duration, inter_tone_gap, 29));
+
+ EXPECT_TRUE(dtmf_->InsertDtmf(tones, duration, inter_tone_gap));
+}
+
+TEST_F(DtmfSenderTest, InsertDtmfSendsAfterWait) {
+ std::string tones = "ABC";
+ int duration = 100;
+ int inter_tone_gap = 50;
+ EXPECT_TRUE(dtmf_->InsertDtmf(tones, duration, inter_tone_gap));
+ VerifyExpectedState("ABC", duration, inter_tone_gap);
+ // Wait until the first tone got sent.
+ EXPECT_TRUE_SIMULATED_WAIT(observer_->tones().size() == 1, kMaxWaitMs,
+ fake_clock_);
+ VerifyExpectedState("BC", duration, inter_tone_gap);
+}
diff --git a/third_party/libwebrtc/pc/external_hmac.cc b/third_party/libwebrtc/pc/external_hmac.cc
new file mode 100644
index 0000000000..27b5d0e5ab
--- /dev/null
+++ b/third_party/libwebrtc/pc/external_hmac.cc
@@ -0,0 +1,143 @@
+/*
+ * Copyright 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/external_hmac.h"
+
+#include <stdlib.h> // For malloc/free.
+#include <string.h>
+
+#include "rtc_base/logging.h"
+#include "rtc_base/zero_memory.h"
+#include "third_party/libsrtp/include/srtp.h"
+
+// Begin test case 0 */
+static const uint8_t kExternalHmacTestCase0Key[20] = {
+ 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b,
+ 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b};
+
+static const uint8_t kExternalHmacTestCase0Data[8] = {
+ 0x48, 0x69, 0x20, 0x54, 0x68, 0x65, 0x72, 0x65 // "Hi There"
+};
+
+static const uint8_t kExternalHmacFakeTag[10] = {0xba, 0xdd, 0xba, 0xdd, 0xba,
+ 0xdd, 0xba, 0xdd, 0xba, 0xdd};
+
+static const srtp_auth_test_case_t kExternalHmacTestCase0 = {
+ 20, // Octets in key
+ const_cast<uint8_t*>(kExternalHmacTestCase0Key), // Key
+ 8, // Octets in data
+ const_cast<uint8_t*>(kExternalHmacTestCase0Data), // Data
+ 10, // Octets in tag
+ const_cast<uint8_t*>(kExternalHmacFakeTag), // Tag
+ NULL // Pointer to next
+ // testcase
+};
+
+static const char kExternalHmacDescription[] =
+ "external hmac sha-1 authentication";
+
+// srtp_auth_type_t external_hmac is the hmac metaobject
+
+static const srtp_auth_type_t external_hmac = {
+ external_hmac_alloc,
+ external_hmac_dealloc,
+ external_hmac_init,
+ external_hmac_compute,
+ external_hmac_update,
+ external_hmac_start,
+ const_cast<char*>(kExternalHmacDescription),
+ const_cast<srtp_auth_test_case_t*>(&kExternalHmacTestCase0),
+ EXTERNAL_HMAC_SHA1};
+
+srtp_err_status_t external_hmac_alloc(srtp_auth_t** a,
+ int key_len,
+ int out_len) {
+ uint8_t* pointer;
+
+ // Check key length - note that we don't support keys larger
+ // than 20 bytes yet
+ if (key_len > 20)
+ return srtp_err_status_bad_param;
+
+ // Check output length - should be less than 20 bytes/
+ if (out_len > 20)
+ return srtp_err_status_bad_param;
+
+ // Allocate memory for auth and hmac_ctx_t structures.
+ pointer = new uint8_t[(sizeof(ExternalHmacContext) + sizeof(srtp_auth_t))];
+ if (pointer == NULL)
+ return srtp_err_status_alloc_fail;
+
+ // Set pointers
+ *a = reinterpret_cast<srtp_auth_t*>(pointer);
+ // `external_hmac` is const and libsrtp expects `type` to be non-const.
+ // const conversion is required. `external_hmac` is constant because we don't
+ // want to increase global count in Chrome.
+ (*a)->type = const_cast<srtp_auth_type_t*>(&external_hmac);
+ (*a)->state = pointer + sizeof(srtp_auth_t);
+ (*a)->out_len = out_len;
+ (*a)->key_len = key_len;
+ (*a)->prefix_len = 0;
+
+ return srtp_err_status_ok;
+}
+
+srtp_err_status_t external_hmac_dealloc(srtp_auth_t* a) {
+ rtc::ExplicitZeroMemory(a, sizeof(ExternalHmacContext) + sizeof(srtp_auth_t));
+
+ // Free memory
+ delete[] a;
+
+ return srtp_err_status_ok;
+}
+
+srtp_err_status_t external_hmac_init(void* state,
+ const uint8_t* key,
+ int key_len) {
+ if (key_len > HMAC_KEY_LENGTH)
+ return srtp_err_status_bad_param;
+
+ ExternalHmacContext* context = static_cast<ExternalHmacContext*>(state);
+ memcpy(context->key, key, key_len);
+ context->key_length = key_len;
+ return srtp_err_status_ok;
+}
+
+srtp_err_status_t external_hmac_start(void* /*state*/) {
+ return srtp_err_status_ok;
+}
+
+srtp_err_status_t external_hmac_update(void* /*state*/,
+ const uint8_t* /*message*/,
+ int /*msg_octets*/) {
+ return srtp_err_status_ok;
+}
+
+srtp_err_status_t external_hmac_compute(void* /*state*/,
+ const uint8_t* /*message*/,
+ int /*msg_octets*/,
+ int tag_len,
+ uint8_t* result) {
+ memcpy(result, kExternalHmacFakeTag, tag_len);
+ return srtp_err_status_ok;
+}
+
+srtp_err_status_t external_crypto_init() {
+ // `external_hmac` is const. const_cast is required as libsrtp expects
+ // non-const.
+ srtp_err_status_t status = srtp_replace_auth_type(
+ const_cast<srtp_auth_type_t*>(&external_hmac), EXTERNAL_HMAC_SHA1);
+ if (status) {
+ RTC_LOG(LS_ERROR) << "Error in replacing default auth module, error: "
+ << status;
+ return srtp_err_status_fail;
+ }
+ return srtp_err_status_ok;
+}
diff --git a/third_party/libwebrtc/pc/external_hmac.h b/third_party/libwebrtc/pc/external_hmac.h
new file mode 100644
index 0000000000..c5071fc192
--- /dev/null
+++ b/third_party/libwebrtc/pc/external_hmac.h
@@ -0,0 +1,72 @@
+/*
+ * Copyright 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_EXTERNAL_HMAC_H_
+#define PC_EXTERNAL_HMAC_H_
+
+// External libsrtp HMAC auth module which implements methods defined in
+// auth_type_t.
+// The default auth module will be replaced only when the ENABLE_EXTERNAL_AUTH
+// flag is enabled. This allows us to access to authentication keys,
+// as the default auth implementation doesn't provide access and avoids
+// hashing each packet twice.
+
+// How will libsrtp select this module?
+// Libsrtp defines authentication function types identified by an unsigned
+// integer, e.g. SRTP_HMAC_SHA1 is 3. Using authentication ids, the
+// application can plug any desired authentication modules into libsrtp.
+// libsrtp also provides a mechanism to select different auth functions for
+// individual streams. This can be done by setting the right value in
+// the auth_type of srtp_policy_t. The application must first register auth
+// functions and the corresponding authentication id using
+// crypto_kernel_replace_auth_type function.
+
+#include <stdint.h>
+
+#include "third_party/libsrtp/crypto/include/crypto_types.h"
+#include "third_party/libsrtp/include/srtp.h"
+#include "third_party/libsrtp/include/srtp_priv.h"
+
+#define EXTERNAL_HMAC_SHA1 SRTP_HMAC_SHA1 + 1
+#define HMAC_KEY_LENGTH 20
+
+// The HMAC context structure used to store authentication keys.
+// The pointer to the key will be allocated in the external_hmac_init function.
+// This pointer is owned by srtp_t in a template context.
+typedef struct {
+ uint8_t key[HMAC_KEY_LENGTH];
+ int key_length;
+} ExternalHmacContext;
+
+srtp_err_status_t external_hmac_alloc(srtp_auth_t** a,
+ int key_len,
+ int out_len);
+
+srtp_err_status_t external_hmac_dealloc(srtp_auth_t* a);
+
+srtp_err_status_t external_hmac_init(void* state,
+ const uint8_t* key,
+ int key_len);
+
+srtp_err_status_t external_hmac_start(void* state);
+
+srtp_err_status_t external_hmac_update(void* state,
+ const uint8_t* message,
+ int msg_octets);
+
+srtp_err_status_t external_hmac_compute(void* state,
+ const uint8_t* message,
+ int msg_octets,
+ int tag_len,
+ uint8_t* result);
+
+srtp_err_status_t external_crypto_init();
+
+#endif // PC_EXTERNAL_HMAC_H_
diff --git a/third_party/libwebrtc/pc/g3doc/dtls_transport.md b/third_party/libwebrtc/pc/g3doc/dtls_transport.md
new file mode 100644
index 0000000000..28d6739413
--- /dev/null
+++ b/third_party/libwebrtc/pc/g3doc/dtls_transport.md
@@ -0,0 +1,53 @@
+<!-- go/cmark -->
+<!--* freshness: {owner: 'hta' reviewed: '2021-05-07'} *-->
+
+## Overview
+
+WebRTC uses DTLS in two ways:
+
+* to negotiate keys for SRTP encryption using
+ [DTLS-SRTP](https://www.rfc-editor.org/info/rfc5763)
+* as a transport for SCTP which is used by the Datachannel API
+
+The W3C WebRTC API represents this as the
+[DtlsTransport](https://w3c.github.io/webrtc-pc/#rtcdtlstransport-interface).
+
+The DTLS handshake happens after the ICE transport becomes writable and has
+found a valid pair. It results in a set of keys being derived for DTLS-SRTP as
+well as a fingerprint of the remote certificate which is compared to the one
+given in the SDP `a=fingerprint:` line.
+
+This documentation provides an overview of how DTLS is implemented, i.e how the
+following classes interact.
+
+## webrtc::DtlsTransport
+
+The [`webrtc::DtlsTransport`][1] class is a wrapper around the
+`cricket::DtlsTransportInternal` and allows registering observers implementing
+the `webrtc::DtlsTransportObserverInterface`. The
+[`webrtc::DtlsTransportObserverInterface`][2] will provide updates to the
+observers, passing around a snapshot of the transports state such as the
+connection state, the remote certificate(s) and the SRTP ciphers as
+[`DtlsTransportInformation`][3].
+
+## cricket::DtlsTransportInternal
+
+The [`cricket::DtlsTransportInternal`][4] class is an interface. Its
+implementation is [`cricket::DtlsTransport`][5]. The `cricket::DtlsTransport`
+sends and receives network packets via an ICE transport. It also demultiplexes
+DTLS packets and SRTP packets according to the scheme described in
+[RFC 5764](https://tools.ietf.org/html/rfc5764#section-5.1.2).
+
+## webrtc::DtlsSrtpTranport
+
+The [`webrtc::DtlsSrtpTransport`][6] class is responsіble for extracting the
+SRTP keys after the DTLS handshake as well as protection and unprotection of
+SRTP packets via its [`cricket::SrtpSession`][7].
+
+[1]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/pc/dtls_transport.h;l=32;drc=6a55e7307b78edb50f94a1ff1ef8393d58218369
+[2]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/api/dtls_transport_interface.h;l=76;drc=34437d5660a80393d631657329ef74c6538be25a
+[3]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/api/dtls_transport_interface.h;l=41;drc=34437d5660a80393d631657329ef74c6538be25a
+[4]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/p2p/base/dtls_transport_internal.h;l=63;drc=34437d5660a80393d631657329ef74c6538be25a
+[5]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/p2p/base/dtls_transport.h;l=94;drc=653bab6790ac92c513b7cf4cd3ad59039c589a95
+[6]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/pc/dtls_srtp_transport.h;l=31;drc=c32f00ea9ddf3267257fe6b45d4d79c6f6bcb829
+[7]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/pc/srtp_session.h;l=33;drc=be66d95ab7f9428028806bbf66cb83800bda9241
diff --git a/third_party/libwebrtc/pc/g3doc/peer_connection.md b/third_party/libwebrtc/pc/g3doc/peer_connection.md
new file mode 100644
index 0000000000..cd01265cff
--- /dev/null
+++ b/third_party/libwebrtc/pc/g3doc/peer_connection.md
@@ -0,0 +1,59 @@
+<!-- go/cmark -->
+<!--* freshness: {owner: 'hta' reviewed: '2021-05-07'} *-->
+
+# PeerConnection and friends
+
+The PeerConnection is the C++-level implementation of the Javascript
+object "RTCPeerConnection" from the
+[WEBRTC specification](https://w3c.github.io/webrtc-pc/).
+
+Like many objects in WebRTC, the PeerConnection is used via a factory and an
+observer:
+
+ * PeerConnectionFactory, which is created via a static Create method and takes
+ a PeerConnectionFactoryDependencies structure listing such things as
+ non-default threads and factories for use by all PeerConnections using
+ the same factory. (Using more than one factory should be avoided, since
+ it takes more resources.)
+ * PeerConnection itself, which is created by the method called
+ PeerConnectionFactory::CreatePeerConnectionOrError, and takes a
+ PeerConnectionInterface::RTCConfiguration argument, as well as
+ a PeerConnectionDependencies (even more factories, plus other stuff).
+ * PeerConnectionObserver (a member of PeerConnectionDependencies), which
+ contains the functions that will be called on events in the PeerConnection
+
+These types are visible in the API.
+
+## Internal structure of PeerConnection and friends
+
+The PeerConnection is, to a large extent, a "God object" - most things
+that are done in WebRTC require a PeerConnection.
+
+Internally, it is divided into several objects, each with its own
+responsibilities, all of which are owned by the PeerConnection and live
+as long as the PeerConnection:
+
+ * SdpOfferAnswerHandler takes care of negotiating configurations with
+ a remote peer, using SDP-formatted descriptions.
+ * RtpTransmissionManager takes care of the lists of RtpSenders,
+ RtpReceivers and RtpTransceivers that form the heart of the transmission
+ service.
+ * DataChannelController takes care of managing the PeerConnection's
+ DataChannels and its SctpTransport.
+ * JsepTransportController takes care of configuring the details of senders
+ and receivers.
+ * Call does management of overall call state.
+ * RtcStatsCollector (and its obsolete sibling, StatsCollector) collects
+ statistics from all the objects comprising the PeerConnection when
+ requested.
+
+There are a number of other smaller objects that are also owned by
+the PeerConnection, but it would take too much space to describe them
+all here; please consult the .h files.
+
+PeerConnectionFactory owns an object called ConnectionContext, and a
+reference to this is passed to each PeerConnection. It is referenced
+via an rtc::scoped_refptr, which means that it is guaranteed to be
+alive as long as either the factory or one of the PeerConnections
+is using it.
+
diff --git a/third_party/libwebrtc/pc/g3doc/rtp.md b/third_party/libwebrtc/pc/g3doc/rtp.md
new file mode 100644
index 0000000000..6dd7b99b09
--- /dev/null
+++ b/third_party/libwebrtc/pc/g3doc/rtp.md
@@ -0,0 +1,99 @@
+<!-- go/cmark -->
+<!--* freshness: {owner: 'hta' reviewed: '2021-06-03'} *-->
+
+# RTP in WebRTC
+
+WebRTC uses the RTP protocol described in
+[RFC3550](https://datatracker.ietf.org/doc/html/rfc3550) for transporting audio
+and video. Media is encrypted using [SRTP](./srtp.md).
+
+## Allocation of payload types
+
+RTP packets have a payload type field that describes which media codec can be
+used to handle a packet. For some (older) codecs like PCMU the payload type is
+assigned statically as described in
+[RFC3551](https://datatracker.ietf.org/doc/html/rfc3551). For others, it is
+assigned dynamically through the SDP. **Note:** there are no guarantees on the
+stability of a payload type assignment.
+
+For this allocation, the range from 96 to 127 is used. When this range is
+exhausted, the allocation falls back to the range from 35 to 63 as permitted by
+[section 5.1 of RFC3550][1]. Note that older versions of WebRTC failed to
+recognize payload types in the lower range. Newer codecs (such as flexfec-03 and
+AV1) will by default be allocated in that range.
+
+Payload types in the range 64 to 95 are not used to avoid confusion with RTCP as
+described in [RFC5761](https://datatracker.ietf.org/doc/html/rfc5761).
+
+## Allocation of audio payload types
+
+Audio payload types are assigned from a table by the [PayloadTypeMapper][2]
+class. New audio codecs should be allocated in the lower dynamic range [35,63],
+starting at 63, to reduce collisions with payload types
+
+## Allocation of video payload types
+
+Video payload types are allocated by the
+[GetPayloadTypesAndDefaultCodecs method][3]. The set of codecs depends on the
+platform, in particular for H264 codecs and their different profiles. Payload
+numbers are assigned ascending from 96 for video codecs and their
+[associated retransmission format](https://datatracker.ietf.org/doc/html/rfc4588).
+Some codecs like flexfec-03 and AV1 are assigned to the lower range [35,63] for
+reasons explained above. When the upper range [96,127] is exhausted, payload
+types are assigned to the lower range [35,63], starting at 35.
+
+## Handling of payload type collisions
+
+Due to the requirement that payload types must be uniquely identifiable when
+using [BUNDLE](https://datatracker.ietf.org/doc/html/rfc8829) collisions between
+the assignments of the audio and video payload types may arise. These are
+resolved by the [UsedPayloadTypes][4] class which will reassign payload type
+numbers descending from 127.
+
+# Bitrate probing
+
+Bandwidth estimation sometimes requires sending RTP packets to ramp up the
+estimate above a certain treshold. WebRTC uses two techniques for that:
+
+* Packets that only consist of RTP padding
+* RTX packets
+
+At the receiving end, both types of probing packets do not interfere with media processing.
+After being taken into account for bandwidth estimation, probing packets only consisting
+of padding can be dropped and RTX packets act as redundancy.
+
+Using RTX for probing is generally preferred as padding probes are limited to 256 bytes
+of RTP payload which results in a larger number of packets being used for probing which
+is a disadvantage from a congestion control point of view.
+
+## Padding probes
+
+Padding probes consist of RTP packets with header extensions (either abs-send time or
+the transport-wide-cc sequence number) and set the RTP "P" bit. The last byte of the
+RTP payload which specifies the amount of padding is set to 255 and preceeded by 255
+bytes of all zeroes. See [section 5.1 of RFC3550][1].
+Padding probes use the RTX RTP stream (i.e. payload type, sequence number and timestamp)
+when RTX is negotiated or share the same RTP stream as the media packets otherwise.
+
+Padding probes are used either when
+* RTX is not negotiated (such as for audio, less commonly for video) or
+* no suitable original packet is found for RTX probing.
+
+Padding probes should not be interleaved with packets of a video frame.
+
+## RTX probes
+
+RTX probes are resends of previous packets that use RTX retransmissions specified in
+[RFC4588](https://www.rfc-editor.org/rfc/rfc4588) as payload format when negotiated with
+the peer. These packets will have a different abs-send-time or transport-wide-cc sequence
+number and use the RTX RTP stream (i.e. RTX payload type, sequence number and timestamp)
+associated with the media RTP stream.
+
+RTX probing uses recently sent RTP packets that have not yet been acknowledged by
+the remote side. Sending these packets again has a small chance of being useful when the
+original packet is lost and will not affect RTP processing at the receiver otherwise.
+
+[1]: https://datatracker.ietf.org/doc/html/rfc3550#section-5.1
+[2]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/media/engine/payload_type_mapper.cc;l=25;drc=4f26a3c7e8e20e0e0ca4ca67a6ebdf3f5543dc3f
+[3]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/media/engine/webrtc_video_engine.cc;l=119;drc=b412efdb780c86e6530493afa403783d14985347
+[4]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/pc/used_ids.h;l=94;drc=b412efdb780c86e6530493afa403783d14985347
diff --git a/third_party/libwebrtc/pc/g3doc/sctp_transport.md b/third_party/libwebrtc/pc/g3doc/sctp_transport.md
new file mode 100644
index 0000000000..100eb92e47
--- /dev/null
+++ b/third_party/libwebrtc/pc/g3doc/sctp_transport.md
@@ -0,0 +1,42 @@
+<!-- go/cmark -->
+<!--* freshness: {owner: 'hta' reviewed: '2021-04-13'} *-->
+
+# SctpTransport
+
+## webrtc::SctpTransport
+
+The [`webrtc::SctpTransport`](https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/pc/sctp_transport.h;l=33?q=class%20webrtc::SctpTransport) class encapsulates an SCTP association, and exposes a
+few properties of this association to the WebRTC user (such as Chrome).
+
+The SctpTransport is used to support Datachannels, as described in the [WebRTC
+specification for the Peer-to-peer Data
+API](https://w3c.github.io/webrtc-pc/#peer-to-peer-data-api).
+
+The public interface ([`webrtc::SctpTransportInterface`](https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/api/sctp_transport_interface.h?q=webrtc::SctpTransportInterface)) exposes an observer
+interface where the user can define a callback to be called whenever the state
+of an SctpTransport changes; this callback is called on the network thread (as
+set during PeerConnectionFactory initialization).
+
+The implementation of this object lives in pc/sctp_transport.{h,cc}, and is
+basically a wrapper around a `cricket::SctpTransportInternal`, hiding its
+implementation details and APIs that shouldn't be accessed from the user.
+
+The `webrtc::SctpTransport` is a ref counted object; it should be regarded
+as owned by the PeerConnection, and will be closed when the PeerConnection
+closes, but the object itself may survive longer than the PeerConnection.
+
+## cricket::SctpTransportInternal
+
+[`cricket::SctpTransportInternal`](https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/media/sctp/sctp_transport_internal.h?q=cricket::SctpTransportInternal) owns two objects: The SCTP association object
+and the DTLS transport, which is the object used to send and receive messages
+as emitted from or consumed by the sctp library.
+
+It communicates state changes and events using sigslot.
+
+See header files for details.
+
+
+
+
+
+
diff --git a/third_party/libwebrtc/pc/g3doc/srtp.md b/third_party/libwebrtc/pc/g3doc/srtp.md
new file mode 100644
index 0000000000..eb457efacf
--- /dev/null
+++ b/third_party/libwebrtc/pc/g3doc/srtp.md
@@ -0,0 +1,72 @@
+<!-- go/cmark -->
+<!--* freshness: {owner: 'hta' reviewed: '2021-05-13'} *-->
+
+# SRTP in WebRTC
+
+WebRTC mandates encryption of media by means of the Secure Realtime Protocol, or
+SRTP, which is described in
+[RFC 3711](https://datatracker.ietf.org/doc/html/rfc3711).
+
+The key negotiation in WebRTC happens using DTLS-SRTP which is described in
+[RFC 5764](https://datatracker.ietf.org/doc/html/rfc5764). The older
+[SDES protocol](https://datatracker.ietf.org/doc/html/rfc4568) is implemented
+but not enabled by default.
+
+Unencrypted RTP can be enabled for debugging purposes by setting the
+PeerConnections [`disable_encryption`][1] option to true.
+
+## Supported cipher suites
+
+The implementation supports the following cipher suites:
+
+* SRTP_AES128_CM_HMAC_SHA1_80
+* SRTP_AEAD_AES_128_GCM
+* SRTP_AEAD_AES_256_GCM
+
+The SRTP_AES128_CM_HMAC_SHA1_32 cipher suite is accepted for audio-only
+connections if offered by the other side. It is not actively supported, see
+[SelectCrypto][2] for details.
+
+The cipher suite ordering allows a non-WebRTC peer to prefer GCM cipher suites,
+however they are not selected as default by two instances of the WebRTC library.
+
+## cricket::SrtpSession
+
+The [`cricket::SrtpSession`][3] is providing encryption and decryption of SRTP
+packets using [`libsrtp`](https://github.com/cisco/libsrtp). Keys will be
+provided by `SrtpTransport` or `DtlsSrtpTransport` in the [`SetSend`][4] and
+[`SetRecv`][5] methods.
+
+Encryption and decryption happens in-place in the [`ProtectRtp`][6],
+[`ProtectRtcp`][7], [`UnprotectRtp`][8] and [`UnprotectRtcp`][9] methods. The
+`SrtpSession` class also takes care of initializing and deinitializing `libsrtp`
+by keeping track of how many instances are being used.
+
+## webrtc::SrtpTransport and webrtc::DtlsSrtpTransport
+
+The [`webrtc::SrtpTransport`][10] class is controlling the `SrtpSession`
+instances for RTP and RTCP. When
+[rtcp-mux](https://datatracker.ietf.org/doc/html/rfc5761) is used, the
+`SrtpSession` for RTCP is not needed.
+
+[`webrtc:DtlsSrtpTransport`][11] is a subclass of the `SrtpTransport` that
+extracts the keying material when the DTLS handshake is done and configures it
+in its base class. It will also become writable only once the DTLS handshake is
+done.
+
+## cricket::SrtpFilter
+
+The [`cricket::SrtpFilter`][12] class is used to negotiate SDES.
+
+[1]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/api/peer_connection_interface.h;l=1413;drc=f467b445631189557d44de86a77ca6a0c3e2108d
+[2]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/pc/media_session.cc;l=297;drc=3ac73bd0aa5322abee98f1ff8705af64a184bf61
+[3]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/pc/srtp_session.h;l=33;drc=be66d95ab7f9428028806bbf66cb83800bda9241
+[4]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/pc/srtp_session.h;l=40;drc=be66d95ab7f9428028806bbf66cb83800bda9241
+[5]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/pc/srtp_session.h;l=51;drc=be66d95ab7f9428028806bbf66cb83800bda9241
+[6]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/pc/srtp_session.h;l=62;drc=be66d95ab7f9428028806bbf66cb83800bda9241
+[7]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/pc/srtp_session.h;l=69;drc=be66d95ab7f9428028806bbf66cb83800bda9241
+[8]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/pc/srtp_session.h;l=72;drc=be66d95ab7f9428028806bbf66cb83800bda9241
+[9]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/pc/srtp_session.h;l=73;drc=be66d95ab7f9428028806bbf66cb83800bda9241
+[10]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/pc/srtp_transport.h;l=37;drc=a4d873786f10eedd72de25ad0d94ad7c53c1f68a
+[11]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/pc/dtls_srtp_transport.h;l=31;drc=2f8e0536eb97ce2131e7a74e3ca06077aa0b64b3
+[12]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/pc/srtp_filter.h;drc=d15a575ec3528c252419149d35977e55269d8a41
diff --git a/third_party/libwebrtc/pc/ice_server_parsing.cc b/third_party/libwebrtc/pc/ice_server_parsing.cc
new file mode 100644
index 0000000000..896305c54b
--- /dev/null
+++ b/third_party/libwebrtc/pc/ice_server_parsing.cc
@@ -0,0 +1,360 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/ice_server_parsing.h"
+
+#include <stddef.h>
+
+#include <cctype> // For std::isdigit.
+#include <string>
+#include <tuple>
+
+#include "p2p/base/port_interface.h"
+#include "rtc_base/arraysize.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/ip_address.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/socket_address.h"
+#include "rtc_base/string_encode.h"
+#include "rtc_base/string_to_number.h"
+
+namespace webrtc {
+
+namespace {
+// Number of tokens must be preset when TURN uri has transport param.
+const size_t kTurnTransportTokensNum = 2;
+// The default stun port.
+const int kDefaultStunPort = 3478;
+const int kDefaultStunTlsPort = 5349;
+const char kTransport[] = "transport";
+
+// Allowed characters in hostname per RFC 3986 Appendix A "reg-name"
+const char kRegNameCharacters[] =
+ "abcdefghijklmnopqrstuvwxyz"
+ "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
+ "0123456789"
+ "-._~" // unreserved
+ "%" // pct-encoded
+ "!$&'()*+,;="; // sub-delims
+
+// NOTE: Must be in the same order as the ServiceType enum.
+const char* kValidIceServiceTypes[] = {"stun", "stuns", "turn", "turns"};
+
+// NOTE: A loop below assumes that the first value of this enum is 0 and all
+// other values are incremental.
+enum class ServiceType {
+ STUN = 0, // Indicates a STUN server.
+ STUNS, // Indicates a STUN server used with a TLS session.
+ TURN, // Indicates a TURN server
+ TURNS, // Indicates a TURN server used with a TLS session.
+ INVALID, // Unknown.
+};
+static_assert(static_cast<size_t>(ServiceType::INVALID) ==
+ arraysize(kValidIceServiceTypes),
+ "kValidIceServiceTypes must have as many strings as ServiceType "
+ "has values.");
+
+// `in_str` should follow of RFC 7064/7065 syntax, but with an optional
+// "?transport=" already stripped. I.e.,
+// stunURI = scheme ":" host [ ":" port ]
+// scheme = "stun" / "stuns" / "turn" / "turns"
+// host = IP-literal / IPv4address / reg-name
+// port = *DIGIT
+
+// Return tuple is service_type, host, with service_type == ServiceType::INVALID
+// on failure.
+std::tuple<ServiceType, absl::string_view> GetServiceTypeAndHostnameFromUri(
+ absl::string_view in_str) {
+ const auto colonpos = in_str.find(':');
+ if (colonpos == absl::string_view::npos) {
+ RTC_LOG(LS_WARNING) << "Missing ':' in ICE URI: " << in_str;
+ return {ServiceType::INVALID, ""};
+ }
+ if ((colonpos + 1) == in_str.length()) {
+ RTC_LOG(LS_WARNING) << "Empty hostname in ICE URI: " << in_str;
+ return {ServiceType::INVALID, ""};
+ }
+ for (size_t i = 0; i < arraysize(kValidIceServiceTypes); ++i) {
+ if (in_str.compare(0, colonpos, kValidIceServiceTypes[i]) == 0) {
+ return {static_cast<ServiceType>(i), in_str.substr(colonpos + 1)};
+ }
+ }
+ return {ServiceType::INVALID, ""};
+}
+
+absl::optional<int> ParsePort(absl::string_view in_str) {
+ // Make sure port only contains digits. StringToNumber doesn't check this.
+ for (const char& c : in_str) {
+ if (!std::isdigit(static_cast<unsigned char>(c))) {
+ return false;
+ }
+ }
+ return rtc::StringToNumber<int>(in_str);
+}
+
+// This method parses IPv6 and IPv4 literal strings, along with hostnames in
+// standard hostname:port format.
+// Consider following formats as correct.
+// `hostname:port`, |[IPV6 address]:port|, |IPv4 address|:port,
+// `hostname`, |[IPv6 address]|, |IPv4 address|.
+
+// Return tuple is success, host, port.
+std::tuple<bool, absl::string_view, int> ParseHostnameAndPortFromString(
+ absl::string_view in_str,
+ int default_port) {
+ if (in_str.empty()) {
+ return {false, "", 0};
+ }
+ absl::string_view host;
+ int port = default_port;
+
+ if (in_str.at(0) == '[') {
+ // IP_literal syntax
+ auto closebracket = in_str.rfind(']');
+ if (closebracket == absl::string_view::npos) {
+ return {false, "", 0};
+ }
+ auto colonpos = in_str.find(':', closebracket);
+ if (absl::string_view::npos != colonpos) {
+ if (absl::optional<int> opt_port =
+ ParsePort(in_str.substr(closebracket + 2))) {
+ port = *opt_port;
+ } else {
+ return {false, "", 0};
+ }
+ }
+ host = in_str.substr(1, closebracket - 1);
+ } else {
+ // IPv4address or reg-name syntax
+ auto colonpos = in_str.find(':');
+ if (absl::string_view::npos != colonpos) {
+ if (absl::optional<int> opt_port =
+ ParsePort(in_str.substr(colonpos + 1))) {
+ port = *opt_port;
+ } else {
+ return {false, "", 0};
+ }
+ host = in_str.substr(0, colonpos);
+ } else {
+ host = in_str;
+ }
+ // RFC 3986 section 3.2.2 and Appendix A - "reg-name" syntax
+ if (host.find_first_not_of(kRegNameCharacters) != absl::string_view::npos) {
+ return {false, "", 0};
+ }
+ }
+ return {!host.empty(), host, port};
+}
+
+// Adds a STUN or TURN server to the appropriate list,
+// by parsing `url` and using the username/password in `server`.
+RTCError ParseIceServerUrl(
+ const PeerConnectionInterface::IceServer& server,
+ absl::string_view url,
+ cricket::ServerAddresses* stun_servers,
+ std::vector<cricket::RelayServerConfig>* turn_servers) {
+ // RFC 7064
+ // stunURI = scheme ":" host [ ":" port ]
+ // scheme = "stun" / "stuns"
+
+ // RFC 7065
+ // turnURI = scheme ":" host [ ":" port ]
+ // [ "?transport=" transport ]
+ // scheme = "turn" / "turns"
+ // transport = "udp" / "tcp" / transport-ext
+ // transport-ext = 1*unreserved
+
+ // RFC 3986
+ // host = IP-literal / IPv4address / reg-name
+ // port = *DIGIT
+
+ RTC_DCHECK(stun_servers != nullptr);
+ RTC_DCHECK(turn_servers != nullptr);
+ cricket::ProtocolType turn_transport_type = cricket::PROTO_UDP;
+ RTC_DCHECK(!url.empty());
+ std::vector<absl::string_view> tokens = rtc::split(url, '?');
+ absl::string_view uri_without_transport = tokens[0];
+ // Let's look into transport= param, if it exists.
+ if (tokens.size() == kTurnTransportTokensNum) { // ?transport= is present.
+ std::vector<absl::string_view> transport_tokens =
+ rtc::split(tokens[1], '=');
+ if (transport_tokens[0] != kTransport) {
+ LOG_AND_RETURN_ERROR(
+ RTCErrorType::SYNTAX_ERROR,
+ "ICE server parsing failed: Invalid transport parameter key.");
+ }
+ if (transport_tokens.size() < 2) {
+ LOG_AND_RETURN_ERROR(
+ RTCErrorType::SYNTAX_ERROR,
+ "ICE server parsing failed: Transport parameter missing value.");
+ }
+
+ absl::optional<cricket::ProtocolType> proto =
+ cricket::StringToProto(transport_tokens[1]);
+ if (!proto ||
+ (*proto != cricket::PROTO_UDP && *proto != cricket::PROTO_TCP)) {
+ LOG_AND_RETURN_ERROR(
+ RTCErrorType::SYNTAX_ERROR,
+ "ICE server parsing failed: Transport parameter should "
+ "always be udp or tcp.");
+ }
+ turn_transport_type = *proto;
+ }
+
+ auto [service_type, hoststring] =
+ GetServiceTypeAndHostnameFromUri(uri_without_transport);
+ if (service_type == ServiceType::INVALID) {
+ RTC_LOG(LS_ERROR) << "Invalid transport parameter in ICE URI: " << url;
+ LOG_AND_RETURN_ERROR(
+ RTCErrorType::SYNTAX_ERROR,
+ "ICE server parsing failed: Invalid transport parameter in ICE URI");
+ }
+
+ // GetServiceTypeAndHostnameFromUri should never give an empty hoststring
+ RTC_DCHECK(!hoststring.empty());
+
+ // stun with ?transport (or any ?) is not valid.
+ if ((service_type == ServiceType::STUN ||
+ service_type == ServiceType::STUNS) &&
+ tokens.size() > 1) {
+ LOG_AND_RETURN_ERROR(
+ RTCErrorType::SYNTAX_ERROR,
+ "ICE server parsing failed: Invalid stun url with query parameters");
+ }
+
+ int default_port = kDefaultStunPort;
+ if (service_type == ServiceType::TURNS) {
+ default_port = kDefaultStunTlsPort;
+ turn_transport_type = cricket::PROTO_TLS;
+ }
+
+ if (hoststring.find('@') != absl::string_view::npos) {
+ RTC_LOG(LS_ERROR) << "Invalid url with long deprecated user@host syntax: "
+ << uri_without_transport;
+ LOG_AND_RETURN_ERROR(RTCErrorType::SYNTAX_ERROR,
+ "ICE server parsing failed: Invalid url with long "
+ "deprecated user@host syntax");
+ }
+
+ auto [success, address, port] =
+ ParseHostnameAndPortFromString(hoststring, default_port);
+ if (!success) {
+ RTC_LOG(LS_ERROR) << "Invalid hostname format: " << uri_without_transport;
+ LOG_AND_RETURN_ERROR(RTCErrorType::SYNTAX_ERROR,
+ "ICE server parsing failed: Invalid hostname format");
+ }
+
+ if (port <= 0 || port > 0xffff) {
+ RTC_LOG(LS_ERROR) << "Invalid port: " << port;
+ LOG_AND_RETURN_ERROR(RTCErrorType::SYNTAX_ERROR,
+ "ICE server parsing failed: Invalid port");
+ }
+
+ switch (service_type) {
+ case ServiceType::STUN:
+ case ServiceType::STUNS:
+ stun_servers->insert(rtc::SocketAddress(address, port));
+ break;
+ case ServiceType::TURN:
+ case ServiceType::TURNS: {
+ if (server.username.empty() || server.password.empty()) {
+ // The WebRTC spec requires throwing an InvalidAccessError when username
+ // or credential are ommitted; this is the native equivalent.
+ LOG_AND_RETURN_ERROR(
+ RTCErrorType::INVALID_PARAMETER,
+ "ICE server parsing failed: TURN server with empty "
+ "username or password");
+ }
+ // If the hostname field is not empty, then the server address must be
+ // the resolved IP for that host, the hostname is needed later for TLS
+ // handshake (SNI and Certificate verification).
+ absl::string_view hostname =
+ server.hostname.empty() ? address : server.hostname;
+ rtc::SocketAddress socket_address(hostname, port);
+ if (!server.hostname.empty()) {
+ rtc::IPAddress ip;
+ if (!IPFromString(address, &ip)) {
+ // When hostname is set, the server address must be a
+ // resolved ip address.
+ LOG_AND_RETURN_ERROR(
+ RTCErrorType::INVALID_PARAMETER,
+ "ICE server parsing failed: "
+ "IceServer has hostname field set, but URI does not "
+ "contain an IP address.");
+ }
+ socket_address.SetResolvedIP(ip);
+ }
+ cricket::RelayServerConfig config =
+ cricket::RelayServerConfig(socket_address, server.username,
+ server.password, turn_transport_type);
+ if (server.tls_cert_policy ==
+ PeerConnectionInterface::kTlsCertPolicyInsecureNoCheck) {
+ config.tls_cert_policy =
+ cricket::TlsCertPolicy::TLS_CERT_POLICY_INSECURE_NO_CHECK;
+ }
+ config.tls_alpn_protocols = server.tls_alpn_protocols;
+ config.tls_elliptic_curves = server.tls_elliptic_curves;
+
+ turn_servers->push_back(config);
+ break;
+ }
+ default:
+ // We shouldn't get to this point with an invalid service_type, we should
+ // have returned an error already.
+ LOG_AND_RETURN_ERROR(
+ RTCErrorType::INTERNAL_ERROR,
+ "ICE server parsing failed: Unexpected service type");
+ }
+ return RTCError::OK();
+}
+
+} // namespace
+
+RTCError ParseIceServersOrError(
+ const PeerConnectionInterface::IceServers& servers,
+ cricket::ServerAddresses* stun_servers,
+ std::vector<cricket::RelayServerConfig>* turn_servers) {
+ for (const PeerConnectionInterface::IceServer& server : servers) {
+ if (!server.urls.empty()) {
+ for (const std::string& url : server.urls) {
+ if (url.empty()) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::SYNTAX_ERROR,
+ "ICE server parsing failed: Empty uri.");
+ }
+ RTCError err =
+ ParseIceServerUrl(server, url, stun_servers, turn_servers);
+ if (!err.ok()) {
+ return err;
+ }
+ }
+ } else if (!server.uri.empty()) {
+ // Fallback to old .uri if new .urls isn't present.
+ RTCError err =
+ ParseIceServerUrl(server, server.uri, stun_servers, turn_servers);
+
+ if (!err.ok()) {
+ return err;
+ }
+ } else {
+ LOG_AND_RETURN_ERROR(RTCErrorType::SYNTAX_ERROR,
+ "ICE server parsing failed: Empty uri.");
+ }
+ }
+ return RTCError::OK();
+}
+
+RTCErrorType ParseIceServers(
+ const PeerConnectionInterface::IceServers& servers,
+ cricket::ServerAddresses* stun_servers,
+ std::vector<cricket::RelayServerConfig>* turn_servers) {
+ return ParseIceServersOrError(servers, stun_servers, turn_servers).type();
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/ice_server_parsing.h b/third_party/libwebrtc/pc/ice_server_parsing.h
new file mode 100644
index 0000000000..549964e285
--- /dev/null
+++ b/third_party/libwebrtc/pc/ice_server_parsing.h
@@ -0,0 +1,42 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_ICE_SERVER_PARSING_H_
+#define PC_ICE_SERVER_PARSING_H_
+
+#include <vector>
+
+#include "api/peer_connection_interface.h"
+#include "api/rtc_error.h"
+#include "p2p/base/port.h"
+#include "p2p/base/port_allocator.h"
+#include "rtc_base/system/rtc_export.h"
+
+namespace webrtc {
+
+// Parses the URLs for each server in `servers` to build `stun_servers` and
+// `turn_servers`. Can return SYNTAX_ERROR if the URL is malformed, or
+// INVALID_PARAMETER if a TURN server is missing `username` or `password`.
+//
+// Intended to be used to convert/validate the servers passed into a
+// PeerConnection through RTCConfiguration.
+RTC_EXPORT RTCError
+ParseIceServersOrError(const PeerConnectionInterface::IceServers& servers,
+ cricket::ServerAddresses* stun_servers,
+ std::vector<cricket::RelayServerConfig>* turn_servers);
+
+[[deprecated("use ParseIceServersOrError")]] RTC_EXPORT RTCErrorType
+ParseIceServers(const PeerConnectionInterface::IceServers& servers,
+ cricket::ServerAddresses* stun_servers,
+ std::vector<cricket::RelayServerConfig>* turn_servers);
+
+} // namespace webrtc
+
+#endif // PC_ICE_SERVER_PARSING_H_
diff --git a/third_party/libwebrtc/pc/ice_server_parsing_unittest.cc b/third_party/libwebrtc/pc/ice_server_parsing_unittest.cc
new file mode 100644
index 0000000000..4356b1efb0
--- /dev/null
+++ b/third_party/libwebrtc/pc/ice_server_parsing_unittest.cc
@@ -0,0 +1,242 @@
+/*
+ * Copyright 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/ice_server_parsing.h"
+
+#include <string>
+#include <vector>
+
+#include "p2p/base/port_interface.h"
+#include "rtc_base/ip_address.h"
+#include "rtc_base/socket_address.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+
+class IceServerParsingTest : public ::testing::Test {
+ public:
+ // Convenience functions for parsing a single URL. Result is stored in
+ // `stun_servers_` and `turn_servers_`.
+ bool ParseUrl(const std::string& url) {
+ return ParseUrl(url, std::string(), std::string());
+ }
+
+ bool ParseTurnUrl(const std::string& url) {
+ return ParseUrl(url, "username", "password");
+ }
+
+ bool ParseUrl(const std::string& url,
+ const std::string& username,
+ const std::string& password) {
+ return ParseUrl(
+ url, username, password,
+ PeerConnectionInterface::TlsCertPolicy::kTlsCertPolicySecure);
+ }
+
+ bool ParseUrl(const std::string& url,
+ const std::string& username,
+ const std::string& password,
+ PeerConnectionInterface::TlsCertPolicy tls_certificate_policy) {
+ return ParseUrl(url, username, password, tls_certificate_policy, "");
+ }
+
+ bool ParseUrl(const std::string& url,
+ const std::string& username,
+ const std::string& password,
+ PeerConnectionInterface::TlsCertPolicy tls_certificate_policy,
+ const std::string& hostname) {
+ stun_servers_.clear();
+ turn_servers_.clear();
+ PeerConnectionInterface::IceServers servers;
+ PeerConnectionInterface::IceServer server;
+ server.urls.push_back(url);
+ server.username = username;
+ server.password = password;
+ server.tls_cert_policy = tls_certificate_policy;
+ server.hostname = hostname;
+ servers.push_back(server);
+ return webrtc::ParseIceServersOrError(servers, &stun_servers_,
+ &turn_servers_)
+ .ok();
+ }
+
+ protected:
+ cricket::ServerAddresses stun_servers_;
+ std::vector<cricket::RelayServerConfig> turn_servers_;
+};
+
+// Make sure all STUN/TURN prefixes are parsed correctly.
+TEST_F(IceServerParsingTest, ParseStunPrefixes) {
+ EXPECT_TRUE(ParseUrl("stun:hostname"));
+ EXPECT_EQ(1U, stun_servers_.size());
+ EXPECT_EQ(0U, turn_servers_.size());
+
+ EXPECT_TRUE(ParseUrl("stuns:hostname"));
+ EXPECT_EQ(1U, stun_servers_.size());
+ EXPECT_EQ(0U, turn_servers_.size());
+
+ EXPECT_TRUE(ParseTurnUrl("turn:hostname"));
+ EXPECT_EQ(0U, stun_servers_.size());
+ EXPECT_EQ(1U, turn_servers_.size());
+ EXPECT_EQ(cricket::PROTO_UDP, turn_servers_[0].ports[0].proto);
+
+ EXPECT_TRUE(ParseTurnUrl("turns:hostname"));
+ EXPECT_EQ(0U, stun_servers_.size());
+ EXPECT_EQ(1U, turn_servers_.size());
+ EXPECT_EQ(cricket::PROTO_TLS, turn_servers_[0].ports[0].proto);
+ EXPECT_TRUE(turn_servers_[0].tls_cert_policy ==
+ cricket::TlsCertPolicy::TLS_CERT_POLICY_SECURE);
+
+ EXPECT_TRUE(ParseUrl(
+ "turns:hostname", "username", "password",
+ PeerConnectionInterface::TlsCertPolicy::kTlsCertPolicyInsecureNoCheck));
+ EXPECT_EQ(0U, stun_servers_.size());
+ EXPECT_EQ(1U, turn_servers_.size());
+ EXPECT_TRUE(turn_servers_[0].tls_cert_policy ==
+ cricket::TlsCertPolicy::TLS_CERT_POLICY_INSECURE_NO_CHECK);
+ EXPECT_EQ(cricket::PROTO_TLS, turn_servers_[0].ports[0].proto);
+
+ // invalid prefixes
+ EXPECT_FALSE(ParseUrl("stunn:hostname"));
+ EXPECT_FALSE(ParseUrl(":hostname"));
+ EXPECT_FALSE(ParseUrl(":"));
+ EXPECT_FALSE(ParseUrl(""));
+}
+
+TEST_F(IceServerParsingTest, VerifyDefaults) {
+ // TURNS defaults
+ EXPECT_TRUE(ParseTurnUrl("turns:hostname"));
+ EXPECT_EQ(1U, turn_servers_.size());
+ EXPECT_EQ(5349, turn_servers_[0].ports[0].address.port());
+ EXPECT_EQ(cricket::PROTO_TLS, turn_servers_[0].ports[0].proto);
+
+ // TURN defaults
+ EXPECT_TRUE(ParseTurnUrl("turn:hostname"));
+ EXPECT_EQ(1U, turn_servers_.size());
+ EXPECT_EQ(3478, turn_servers_[0].ports[0].address.port());
+ EXPECT_EQ(cricket::PROTO_UDP, turn_servers_[0].ports[0].proto);
+
+ // STUN defaults
+ EXPECT_TRUE(ParseUrl("stun:hostname"));
+ EXPECT_EQ(1U, stun_servers_.size());
+ EXPECT_EQ(3478, stun_servers_.begin()->port());
+}
+
+// Check that the 6 combinations of IPv4/IPv6/hostname and with/without port
+// can be parsed correctly.
+TEST_F(IceServerParsingTest, ParseHostnameAndPort) {
+ EXPECT_TRUE(ParseUrl("stun:1.2.3.4:1234"));
+ EXPECT_EQ(1U, stun_servers_.size());
+ EXPECT_EQ("1.2.3.4", stun_servers_.begin()->hostname());
+ EXPECT_EQ(1234, stun_servers_.begin()->port());
+
+ EXPECT_TRUE(ParseUrl("stun:[1:2:3:4:5:6:7:8]:4321"));
+ EXPECT_EQ(1U, stun_servers_.size());
+ EXPECT_EQ("1:2:3:4:5:6:7:8", stun_servers_.begin()->hostname());
+ EXPECT_EQ(4321, stun_servers_.begin()->port());
+
+ EXPECT_TRUE(ParseUrl("stun:hostname:9999"));
+ EXPECT_EQ(1U, stun_servers_.size());
+ EXPECT_EQ("hostname", stun_servers_.begin()->hostname());
+ EXPECT_EQ(9999, stun_servers_.begin()->port());
+
+ EXPECT_TRUE(ParseUrl("stun:1.2.3.4"));
+ EXPECT_EQ(1U, stun_servers_.size());
+ EXPECT_EQ("1.2.3.4", stun_servers_.begin()->hostname());
+ EXPECT_EQ(3478, stun_servers_.begin()->port());
+
+ EXPECT_TRUE(ParseUrl("stun:[1:2:3:4:5:6:7:8]"));
+ EXPECT_EQ(1U, stun_servers_.size());
+ EXPECT_EQ("1:2:3:4:5:6:7:8", stun_servers_.begin()->hostname());
+ EXPECT_EQ(3478, stun_servers_.begin()->port());
+
+ EXPECT_TRUE(ParseUrl("stun:hostname"));
+ EXPECT_EQ(1U, stun_servers_.size());
+ EXPECT_EQ("hostname", stun_servers_.begin()->hostname());
+ EXPECT_EQ(3478, stun_servers_.begin()->port());
+
+ // Both TURN IP and host exist
+ EXPECT_TRUE(
+ ParseUrl("turn:1.2.3.4:1234", "username", "password",
+ PeerConnectionInterface::TlsCertPolicy::kTlsCertPolicySecure,
+ "hostname"));
+ EXPECT_EQ(1U, turn_servers_.size());
+ rtc::SocketAddress address = turn_servers_[0].ports[0].address;
+ EXPECT_EQ("hostname", address.hostname());
+ EXPECT_EQ(1234, address.port());
+ EXPECT_FALSE(address.IsUnresolvedIP());
+ EXPECT_EQ("1.2.3.4", address.ipaddr().ToString());
+
+ // Try some invalid hostname:port strings.
+ EXPECT_FALSE(ParseUrl("stun:hostname:99a99"));
+ EXPECT_FALSE(ParseUrl("stun:hostname:-1"));
+ EXPECT_FALSE(ParseUrl("stun:hostname:port:more"));
+ EXPECT_FALSE(ParseUrl("stun:hostname:port more"));
+ EXPECT_FALSE(ParseUrl("stun:hostname:"));
+ EXPECT_FALSE(ParseUrl("stun:[1:2:3:4:5:6:7:8]junk:1000"));
+ EXPECT_FALSE(ParseUrl("stun::5555"));
+ EXPECT_FALSE(ParseUrl("stun:"));
+ // Test illegal URLs according to RFC 3986 (URI generic syntax)
+ // and RFC 7064 (URI schemes for STUN and TURN)
+ EXPECT_FALSE(ParseUrl("stun:/hostname")); // / is not allowed
+ EXPECT_FALSE(ParseUrl("stun:?hostname")); // ? is not allowed
+ EXPECT_FALSE(ParseUrl("stun:#hostname")); // # is not allowed
+ // STUN explicitly forbids query parameters.
+ EXPECT_FALSE(ParseUrl("stun:hostname?transport=udp"));
+}
+
+// Test parsing the "?transport=xxx" part of the URL.
+TEST_F(IceServerParsingTest, ParseTransport) {
+ EXPECT_TRUE(ParseTurnUrl("turn:hostname:1234?transport=tcp"));
+ EXPECT_EQ(1U, turn_servers_.size());
+ EXPECT_EQ(cricket::PROTO_TCP, turn_servers_[0].ports[0].proto);
+
+ EXPECT_TRUE(ParseTurnUrl("turn:hostname?transport=udp"));
+ EXPECT_EQ(1U, turn_servers_.size());
+ EXPECT_EQ(cricket::PROTO_UDP, turn_servers_[0].ports[0].proto);
+
+ EXPECT_FALSE(ParseTurnUrl("turn:hostname?transport=invalid"));
+ EXPECT_FALSE(ParseTurnUrl("turn:hostname?transport="));
+ EXPECT_FALSE(ParseTurnUrl("turn:hostname?="));
+ EXPECT_FALSE(ParseTurnUrl("turn:hostname?"));
+ EXPECT_FALSE(ParseTurnUrl("?"));
+}
+
+// Reject pre-RFC 7065 syntax with ICE username contained in URL.
+TEST_F(IceServerParsingTest, ParseRejectsUsername) {
+ EXPECT_FALSE(ParseTurnUrl("turn:user@hostname"));
+}
+
+// Test that username and password from IceServer is copied into the resulting
+// RelayServerConfig.
+TEST_F(IceServerParsingTest, CopyUsernameAndPasswordFromIceServer) {
+ EXPECT_TRUE(ParseUrl("turn:hostname", "username", "password"));
+ EXPECT_EQ(1U, turn_servers_.size());
+ EXPECT_EQ("username", turn_servers_[0].credentials.username);
+ EXPECT_EQ("password", turn_servers_[0].credentials.password);
+}
+
+// Ensure that if a server has multiple URLs, each one is parsed.
+TEST_F(IceServerParsingTest, ParseMultipleUrls) {
+ PeerConnectionInterface::IceServers servers;
+ PeerConnectionInterface::IceServer server;
+ server.urls.push_back("stun:hostname");
+ server.urls.push_back("turn:hostname");
+ server.username = "foo";
+ server.password = "bar";
+ servers.push_back(server);
+ EXPECT_TRUE(
+ webrtc::ParseIceServersOrError(servers, &stun_servers_, &turn_servers_)
+ .ok());
+ EXPECT_EQ(1U, stun_servers_.size());
+ EXPECT_EQ(1U, turn_servers_.size());
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/ice_transport.cc b/third_party/libwebrtc/pc/ice_transport.cc
new file mode 100644
index 0000000000..205846755d
--- /dev/null
+++ b/third_party/libwebrtc/pc/ice_transport.cc
@@ -0,0 +1,36 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/ice_transport.h"
+
+#include "api/sequence_checker.h"
+
+namespace webrtc {
+
+IceTransportWithPointer::~IceTransportWithPointer() {
+ // We depend on the networking thread to call Clear() before dropping
+ // its last reference to this object; if the destructor is called
+ // on the networking thread, it's OK to not have called Clear().
+ if (internal_) {
+ RTC_DCHECK_RUN_ON(creator_thread_);
+ }
+}
+
+cricket::IceTransportInternal* IceTransportWithPointer::internal() {
+ RTC_DCHECK_RUN_ON(creator_thread_);
+ return internal_;
+}
+
+void IceTransportWithPointer::Clear() {
+ RTC_DCHECK_RUN_ON(creator_thread_);
+ internal_ = nullptr;
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/ice_transport.h b/third_party/libwebrtc/pc/ice_transport.h
new file mode 100644
index 0000000000..e31ec546b2
--- /dev/null
+++ b/third_party/libwebrtc/pc/ice_transport.h
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_ICE_TRANSPORT_H_
+#define PC_ICE_TRANSPORT_H_
+
+#include "api/ice_transport_interface.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/thread.h"
+#include "rtc_base/thread_annotations.h"
+
+namespace webrtc {
+
+// Implementation of IceTransportInterface that does not take ownership
+// of its underlying IceTransport. It depends on its creator class to
+// ensure that Clear() is called before the underlying IceTransport
+// is deallocated.
+class IceTransportWithPointer : public IceTransportInterface {
+ public:
+ explicit IceTransportWithPointer(cricket::IceTransportInternal* internal)
+ : creator_thread_(rtc::Thread::Current()), internal_(internal) {
+ RTC_DCHECK(internal_);
+ }
+
+ IceTransportWithPointer() = delete;
+ IceTransportWithPointer(const IceTransportWithPointer&) = delete;
+ IceTransportWithPointer& operator=(const IceTransportWithPointer&) = delete;
+
+ cricket::IceTransportInternal* internal() override;
+ // This call will ensure that the pointer passed at construction is
+ // no longer in use by this object. Later calls to internal() will return
+ // null.
+ void Clear();
+
+ protected:
+ ~IceTransportWithPointer() override;
+
+ private:
+ const rtc::Thread* creator_thread_;
+ cricket::IceTransportInternal* internal_ RTC_GUARDED_BY(creator_thread_);
+};
+
+} // namespace webrtc
+
+#endif // PC_ICE_TRANSPORT_H_
diff --git a/third_party/libwebrtc/pc/ice_transport_unittest.cc b/third_party/libwebrtc/pc/ice_transport_unittest.cc
new file mode 100644
index 0000000000..aaf9f2e57a
--- /dev/null
+++ b/third_party/libwebrtc/pc/ice_transport_unittest.cc
@@ -0,0 +1,64 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/ice_transport.h"
+
+#include <memory>
+#include <utility>
+
+#include "api/ice_transport_factory.h"
+#include "api/make_ref_counted.h"
+#include "api/scoped_refptr.h"
+#include "p2p/base/fake_ice_transport.h"
+#include "p2p/base/fake_port_allocator.h"
+#include "rtc_base/internal/default_socket_server.h"
+#include "test/gtest.h"
+#include "test/scoped_key_value_config.h"
+
+namespace webrtc {
+
+class IceTransportTest : public ::testing::Test {
+ protected:
+ IceTransportTest()
+ : socket_server_(rtc::CreateDefaultSocketServer()),
+ main_thread_(socket_server_.get()) {}
+
+ rtc::SocketServer* socket_server() const { return socket_server_.get(); }
+
+ webrtc::test::ScopedKeyValueConfig field_trials_;
+
+ private:
+ std::unique_ptr<rtc::SocketServer> socket_server_;
+ rtc::AutoSocketServerThread main_thread_;
+};
+
+TEST_F(IceTransportTest, CreateNonSelfDeletingTransport) {
+ auto cricket_transport =
+ std::make_unique<cricket::FakeIceTransport>("name", 0, nullptr);
+ auto ice_transport =
+ rtc::make_ref_counted<IceTransportWithPointer>(cricket_transport.get());
+ EXPECT_EQ(ice_transport->internal(), cricket_transport.get());
+ ice_transport->Clear();
+ EXPECT_NE(ice_transport->internal(), cricket_transport.get());
+}
+
+TEST_F(IceTransportTest, CreateSelfDeletingTransport) {
+ std::unique_ptr<cricket::FakePortAllocator> port_allocator(
+ std::make_unique<cricket::FakePortAllocator>(
+ nullptr,
+ std::make_unique<rtc::BasicPacketSocketFactory>(socket_server()),
+ &field_trials_));
+ IceTransportInit init;
+ init.set_port_allocator(port_allocator.get());
+ auto ice_transport = CreateIceTransport(std::move(init));
+ EXPECT_NE(nullptr, ice_transport->internal());
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/jitter_buffer_delay.cc b/third_party/libwebrtc/pc/jitter_buffer_delay.cc
new file mode 100644
index 0000000000..f22b0650f9
--- /dev/null
+++ b/third_party/libwebrtc/pc/jitter_buffer_delay.cc
@@ -0,0 +1,38 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/jitter_buffer_delay.h"
+
+#include "api/sequence_checker.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/numerics/safe_conversions.h"
+#include "rtc_base/numerics/safe_minmax.h"
+
+namespace {
+constexpr int kDefaultDelay = 0;
+constexpr int kMaximumDelayMs = 10000;
+} // namespace
+
+namespace webrtc {
+
+void JitterBufferDelay::Set(absl::optional<double> delay_seconds) {
+ RTC_DCHECK_RUN_ON(&worker_thread_checker_);
+ cached_delay_seconds_ = delay_seconds;
+}
+
+int JitterBufferDelay::GetMs() const {
+ RTC_DCHECK_RUN_ON(&worker_thread_checker_);
+ return rtc::SafeClamp(
+ rtc::saturated_cast<int>(cached_delay_seconds_.value_or(kDefaultDelay) *
+ 1000),
+ 0, kMaximumDelayMs);
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/jitter_buffer_delay.h b/third_party/libwebrtc/pc/jitter_buffer_delay.h
new file mode 100644
index 0000000000..caf713b045
--- /dev/null
+++ b/third_party/libwebrtc/pc/jitter_buffer_delay.h
@@ -0,0 +1,42 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_JITTER_BUFFER_DELAY_H_
+#define PC_JITTER_BUFFER_DELAY_H_
+
+#include <stdint.h>
+
+#include "absl/types/optional.h"
+#include "api/sequence_checker.h"
+#include "rtc_base/system/no_unique_address.h"
+#include "rtc_base/thread_annotations.h"
+
+namespace webrtc {
+
+// JitterBufferDelay converts delay from seconds to milliseconds for the
+// underlying media channel. It also handles cases when user sets delay before
+// the start of media_channel by caching its request.
+class JitterBufferDelay {
+ public:
+ JitterBufferDelay() = default;
+
+ void Set(absl::optional<double> delay_seconds);
+ int GetMs() const;
+
+ private:
+ RTC_NO_UNIQUE_ADDRESS SequenceChecker worker_thread_checker_{
+ SequenceChecker::kDetached};
+ absl::optional<double> cached_delay_seconds_
+ RTC_GUARDED_BY(&worker_thread_checker_);
+};
+
+} // namespace webrtc
+
+#endif // PC_JITTER_BUFFER_DELAY_H_
diff --git a/third_party/libwebrtc/pc/jitter_buffer_delay_unittest.cc b/third_party/libwebrtc/pc/jitter_buffer_delay_unittest.cc
new file mode 100644
index 0000000000..79c39fffb8
--- /dev/null
+++ b/third_party/libwebrtc/pc/jitter_buffer_delay_unittest.cc
@@ -0,0 +1,56 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/jitter_buffer_delay.h"
+
+#include "test/gtest.h"
+
+namespace webrtc {
+
+class JitterBufferDelayTest : public ::testing::Test {
+ public:
+ JitterBufferDelayTest() {}
+
+ protected:
+ JitterBufferDelay delay_;
+};
+
+TEST_F(JitterBufferDelayTest, Set) {
+ // Delay in seconds.
+ delay_.Set(3.0);
+ EXPECT_EQ(delay_.GetMs(), 3000);
+}
+
+TEST_F(JitterBufferDelayTest, DefaultValue) {
+ EXPECT_EQ(delay_.GetMs(), 0); // Default value is 0ms.
+}
+
+TEST_F(JitterBufferDelayTest, Clamping) {
+ // In current Jitter Buffer implementation (Audio or Video) maximum supported
+ // value is 10000 milliseconds.
+ delay_.Set(10.5);
+ EXPECT_EQ(delay_.GetMs(), 10000);
+
+ // Test int overflow.
+ delay_.Set(21474836470.0);
+ EXPECT_EQ(delay_.GetMs(), 10000);
+
+ delay_.Set(-21474836470.0);
+ EXPECT_EQ(delay_.GetMs(), 0);
+
+ // Boundary value in seconds to milliseconds conversion.
+ delay_.Set(0.0009);
+ EXPECT_EQ(delay_.GetMs(), 0);
+
+ delay_.Set(-2.0);
+ EXPECT_EQ(delay_.GetMs(), 0);
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/jsep_ice_candidate.cc b/third_party/libwebrtc/pc/jsep_ice_candidate.cc
new file mode 100644
index 0000000000..1e97ad42d8
--- /dev/null
+++ b/third_party/libwebrtc/pc/jsep_ice_candidate.cc
@@ -0,0 +1,76 @@
+/*
+ * Copyright 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/jsep_ice_candidate.h"
+
+#include "pc/webrtc_sdp.h"
+
+// This file contains JsepIceCandidate-related functions that are not
+// included in api/jsep_ice_candidate.cc. Some of these link to SDP
+// parsing/serializing functions, which some users may not want.
+// TODO(bugs.webrtc.org/12330): Merge the two .cc files somehow.
+
+namespace webrtc {
+
+IceCandidateInterface* CreateIceCandidate(const std::string& sdp_mid,
+ int sdp_mline_index,
+ const std::string& sdp,
+ SdpParseError* error) {
+ JsepIceCandidate* jsep_ice = new JsepIceCandidate(sdp_mid, sdp_mline_index);
+ if (!jsep_ice->Initialize(sdp, error)) {
+ delete jsep_ice;
+ return NULL;
+ }
+ return jsep_ice;
+}
+
+std::unique_ptr<IceCandidateInterface> CreateIceCandidate(
+ const std::string& sdp_mid,
+ int sdp_mline_index,
+ const cricket::Candidate& candidate) {
+ return std::make_unique<JsepIceCandidate>(sdp_mid, sdp_mline_index,
+ candidate);
+}
+
+JsepIceCandidate::JsepIceCandidate(const std::string& sdp_mid,
+ int sdp_mline_index)
+ : sdp_mid_(sdp_mid), sdp_mline_index_(sdp_mline_index) {}
+
+JsepIceCandidate::JsepIceCandidate(const std::string& sdp_mid,
+ int sdp_mline_index,
+ const cricket::Candidate& candidate)
+ : sdp_mid_(sdp_mid),
+ sdp_mline_index_(sdp_mline_index),
+ candidate_(candidate) {}
+
+JsepIceCandidate::~JsepIceCandidate() {}
+
+JsepCandidateCollection JsepCandidateCollection::Clone() const {
+ JsepCandidateCollection new_collection;
+ for (const auto& candidate : candidates_) {
+ new_collection.candidates_.push_back(std::make_unique<JsepIceCandidate>(
+ candidate->sdp_mid(), candidate->sdp_mline_index(),
+ candidate->candidate()));
+ }
+ return new_collection;
+}
+
+bool JsepIceCandidate::Initialize(const std::string& sdp, SdpParseError* err) {
+ return SdpDeserializeCandidate(sdp, this, err);
+}
+
+bool JsepIceCandidate::ToString(std::string* out) const {
+ if (!out)
+ return false;
+ *out = SdpSerializeCandidate(*this);
+ return !out->empty();
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/jsep_session_description.cc b/third_party/libwebrtc/pc/jsep_session_description.cc
new file mode 100644
index 0000000000..885c1eb310
--- /dev/null
+++ b/third_party/libwebrtc/pc/jsep_session_description.cc
@@ -0,0 +1,367 @@
+/*
+ * Copyright 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/jsep_session_description.h"
+
+#include <memory>
+#include <utility>
+
+#include "absl/types/optional.h"
+#include "p2p/base/p2p_constants.h"
+#include "p2p/base/port.h"
+#include "p2p/base/transport_description.h"
+#include "p2p/base/transport_info.h"
+#include "pc/media_session.h" // IWYU pragma: keep
+#include "pc/webrtc_sdp.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/ip_address.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/net_helper.h"
+#include "rtc_base/socket_address.h"
+
+using cricket::SessionDescription;
+
+namespace webrtc {
+namespace {
+
+// RFC 5245
+// It is RECOMMENDED that default candidates be chosen based on the
+// likelihood of those candidates to work with the peer that is being
+// contacted. It is RECOMMENDED that relayed > reflexive > host.
+constexpr int kPreferenceUnknown = 0;
+constexpr int kPreferenceHost = 1;
+constexpr int kPreferenceReflexive = 2;
+constexpr int kPreferenceRelayed = 3;
+
+constexpr char kDummyAddress[] = "0.0.0.0";
+constexpr int kDummyPort = 9;
+
+int GetCandidatePreferenceFromType(const std::string& type) {
+ int preference = kPreferenceUnknown;
+ if (type == cricket::LOCAL_PORT_TYPE) {
+ preference = kPreferenceHost;
+ } else if (type == cricket::STUN_PORT_TYPE) {
+ preference = kPreferenceReflexive;
+ } else if (type == cricket::RELAY_PORT_TYPE) {
+ preference = kPreferenceRelayed;
+ } else {
+ preference = kPreferenceUnknown;
+ }
+ return preference;
+}
+
+// Update the connection address for the MediaContentDescription based on the
+// candidates.
+void UpdateConnectionAddress(
+ const JsepCandidateCollection& candidate_collection,
+ cricket::MediaContentDescription* media_desc) {
+ int port = kDummyPort;
+ std::string ip = kDummyAddress;
+ std::string hostname;
+ int current_preference = kPreferenceUnknown;
+ int current_family = AF_UNSPEC;
+ for (size_t i = 0; i < candidate_collection.count(); ++i) {
+ const IceCandidateInterface* jsep_candidate = candidate_collection.at(i);
+ if (jsep_candidate->candidate().component() !=
+ cricket::ICE_CANDIDATE_COMPONENT_RTP) {
+ continue;
+ }
+ // Default destination should be UDP only.
+ if (jsep_candidate->candidate().protocol() != cricket::UDP_PROTOCOL_NAME) {
+ continue;
+ }
+ const int preference =
+ GetCandidatePreferenceFromType(jsep_candidate->candidate().type());
+ const int family = jsep_candidate->candidate().address().ipaddr().family();
+ // See if this candidate is more preferable then the current one if it's the
+ // same family. Or if the current family is IPv4 already so we could safely
+ // ignore all IPv6 ones. WebRTC bug 4269.
+ // http://code.google.com/p/webrtc/issues/detail?id=4269
+ if ((preference <= current_preference && current_family == family) ||
+ (current_family == AF_INET && family == AF_INET6)) {
+ continue;
+ }
+ current_preference = preference;
+ current_family = family;
+ const rtc::SocketAddress& candidate_addr =
+ jsep_candidate->candidate().address();
+ port = candidate_addr.port();
+ ip = candidate_addr.ipaddr().ToString();
+ hostname = candidate_addr.hostname();
+ }
+ rtc::SocketAddress connection_addr(ip, port);
+ if (rtc::IPIsUnspec(connection_addr.ipaddr()) && !hostname.empty()) {
+ // When a hostname candidate becomes the (default) connection address,
+ // we use the dummy address 0.0.0.0 and port 9 in the c= and the m= lines.
+ //
+ // We have observed in deployment that with a FQDN in a c= line, SDP parsing
+ // could fail in other JSEP implementations. We note that the wildcard
+ // addresses (0.0.0.0 or ::) with port 9 are given the exception as the
+ // connection address that will not result in an ICE mismatch
+ // (draft-ietf-mmusic-ice-sip-sdp). Also, 0.0.0.0 or :: can be used as the
+ // connection address in the initial offer or answer with trickle ICE
+ // if the offerer or answerer does not want to include the host IP address
+ // (draft-ietf-mmusic-trickle-ice-sip), and in particular 0.0.0.0 has been
+ // widely deployed for this use without outstanding compatibility issues.
+ // Combining the above considerations, we use 0.0.0.0 with port 9 to
+ // populate the c= and the m= lines. See `BuildMediaDescription` in
+ // webrtc_sdp.cc for the SDP generation with
+ // `media_desc->connection_address()`.
+ connection_addr = rtc::SocketAddress(kDummyAddress, kDummyPort);
+ }
+ media_desc->set_connection_address(connection_addr);
+}
+
+} // namespace
+
+// TODO(steveanton): Remove this default implementation once Chromium has been
+// updated.
+SdpType SessionDescriptionInterface::GetType() const {
+ absl::optional<SdpType> maybe_type = SdpTypeFromString(type());
+ if (maybe_type) {
+ return *maybe_type;
+ } else {
+ RTC_LOG(LS_WARNING) << "Default implementation of "
+ "SessionDescriptionInterface::GetType does not "
+ "recognize the result from type(), returning "
+ "kOffer.";
+ return SdpType::kOffer;
+ }
+}
+
+SessionDescriptionInterface* CreateSessionDescription(const std::string& type,
+ const std::string& sdp,
+ SdpParseError* error) {
+ absl::optional<SdpType> maybe_type = SdpTypeFromString(type);
+ if (!maybe_type) {
+ return nullptr;
+ }
+
+ return CreateSessionDescription(*maybe_type, sdp, error).release();
+}
+
+std::unique_ptr<SessionDescriptionInterface> CreateSessionDescription(
+ SdpType type,
+ const std::string& sdp) {
+ return CreateSessionDescription(type, sdp, nullptr);
+}
+
+std::unique_ptr<SessionDescriptionInterface> CreateSessionDescription(
+ SdpType type,
+ const std::string& sdp,
+ SdpParseError* error_out) {
+ auto jsep_desc = std::make_unique<JsepSessionDescription>(type);
+ if (type != SdpType::kRollback) {
+ if (!SdpDeserialize(sdp, jsep_desc.get(), error_out)) {
+ return nullptr;
+ }
+ }
+ return std::move(jsep_desc);
+}
+
+std::unique_ptr<SessionDescriptionInterface> CreateSessionDescription(
+ SdpType type,
+ const std::string& session_id,
+ const std::string& session_version,
+ std::unique_ptr<cricket::SessionDescription> description) {
+ auto jsep_description = std::make_unique<JsepSessionDescription>(type);
+ bool initialize_success = jsep_description->Initialize(
+ std::move(description), session_id, session_version);
+ RTC_DCHECK(initialize_success);
+ return std::move(jsep_description);
+}
+
+JsepSessionDescription::JsepSessionDescription(SdpType type) : type_(type) {}
+
+JsepSessionDescription::JsepSessionDescription(const std::string& type) {
+ absl::optional<SdpType> maybe_type = SdpTypeFromString(type);
+ if (maybe_type) {
+ type_ = *maybe_type;
+ } else {
+ RTC_LOG(LS_WARNING)
+ << "JsepSessionDescription constructed with invalid type string: "
+ << type << ". Assuming it is an offer.";
+ type_ = SdpType::kOffer;
+ }
+}
+
+JsepSessionDescription::JsepSessionDescription(
+ SdpType type,
+ std::unique_ptr<cricket::SessionDescription> description,
+ absl::string_view session_id,
+ absl::string_view session_version)
+ : description_(std::move(description)),
+ session_id_(session_id),
+ session_version_(session_version),
+ type_(type) {
+ RTC_DCHECK(description_);
+ candidate_collection_.resize(number_of_mediasections());
+}
+
+JsepSessionDescription::~JsepSessionDescription() {}
+
+bool JsepSessionDescription::Initialize(
+ std::unique_ptr<cricket::SessionDescription> description,
+ const std::string& session_id,
+ const std::string& session_version) {
+ if (!description)
+ return false;
+
+ session_id_ = session_id;
+ session_version_ = session_version;
+ description_ = std::move(description);
+ candidate_collection_.resize(number_of_mediasections());
+ return true;
+}
+
+std::unique_ptr<SessionDescriptionInterface> JsepSessionDescription::Clone()
+ const {
+ auto new_description = std::make_unique<JsepSessionDescription>(type_);
+ new_description->session_id_ = session_id_;
+ new_description->session_version_ = session_version_;
+ if (description_) {
+ new_description->description_ = description_->Clone();
+ }
+ for (const auto& collection : candidate_collection_) {
+ new_description->candidate_collection_.push_back(collection.Clone());
+ }
+ return new_description;
+}
+
+bool JsepSessionDescription::AddCandidate(
+ const IceCandidateInterface* candidate) {
+ if (!candidate)
+ return false;
+ size_t mediasection_index = 0;
+ if (!GetMediasectionIndex(candidate, &mediasection_index)) {
+ return false;
+ }
+ if (mediasection_index >= number_of_mediasections())
+ return false;
+ const std::string& content_name =
+ description_->contents()[mediasection_index].name;
+ const cricket::TransportInfo* transport_info =
+ description_->GetTransportInfoByName(content_name);
+ if (!transport_info) {
+ return false;
+ }
+
+ cricket::Candidate updated_candidate = candidate->candidate();
+ if (updated_candidate.username().empty()) {
+ updated_candidate.set_username(transport_info->description.ice_ufrag);
+ }
+ if (updated_candidate.password().empty()) {
+ updated_candidate.set_password(transport_info->description.ice_pwd);
+ }
+
+ std::unique_ptr<JsepIceCandidate> updated_candidate_wrapper(
+ new JsepIceCandidate(candidate->sdp_mid(),
+ static_cast<int>(mediasection_index),
+ updated_candidate));
+ if (!candidate_collection_[mediasection_index].HasCandidate(
+ updated_candidate_wrapper.get())) {
+ candidate_collection_[mediasection_index].add(
+ updated_candidate_wrapper.release());
+ UpdateConnectionAddress(
+ candidate_collection_[mediasection_index],
+ description_->contents()[mediasection_index].media_description());
+ }
+
+ return true;
+}
+
+size_t JsepSessionDescription::RemoveCandidates(
+ const std::vector<cricket::Candidate>& candidates) {
+ size_t num_removed = 0;
+ for (auto& candidate : candidates) {
+ int mediasection_index = GetMediasectionIndex(candidate);
+ if (mediasection_index < 0) {
+ // Not found.
+ continue;
+ }
+ num_removed += candidate_collection_[mediasection_index].remove(candidate);
+ UpdateConnectionAddress(
+ candidate_collection_[mediasection_index],
+ description_->contents()[mediasection_index].media_description());
+ }
+ return num_removed;
+}
+
+size_t JsepSessionDescription::number_of_mediasections() const {
+ if (!description_)
+ return 0;
+ return description_->contents().size();
+}
+
+const IceCandidateCollection* JsepSessionDescription::candidates(
+ size_t mediasection_index) const {
+ if (mediasection_index >= candidate_collection_.size())
+ return NULL;
+ return &candidate_collection_[mediasection_index];
+}
+
+bool JsepSessionDescription::ToString(std::string* out) const {
+ if (!description_ || !out) {
+ return false;
+ }
+ *out = SdpSerialize(*this);
+ return !out->empty();
+}
+
+bool JsepSessionDescription::GetMediasectionIndex(
+ const IceCandidateInterface* candidate,
+ size_t* index) {
+ if (!candidate || !index) {
+ return false;
+ }
+
+ // If the candidate has no valid mline index or sdp_mid, it is impossible
+ // to find a match.
+ if (candidate->sdp_mid().empty() &&
+ (candidate->sdp_mline_index() < 0 ||
+ static_cast<size_t>(candidate->sdp_mline_index()) >=
+ description_->contents().size())) {
+ return false;
+ }
+
+ if (candidate->sdp_mline_index() >= 0)
+ *index = static_cast<size_t>(candidate->sdp_mline_index());
+ if (description_ && !candidate->sdp_mid().empty()) {
+ bool found = false;
+ // Try to match the sdp_mid with content name.
+ for (size_t i = 0; i < description_->contents().size(); ++i) {
+ if (candidate->sdp_mid() == description_->contents().at(i).name) {
+ *index = i;
+ found = true;
+ break;
+ }
+ }
+ if (!found) {
+ // If the sdp_mid is presented but we can't find a match, we consider
+ // this as an error.
+ return false;
+ }
+ }
+ return true;
+}
+
+int JsepSessionDescription::GetMediasectionIndex(
+ const cricket::Candidate& candidate) {
+ // Find the description with a matching transport name of the candidate.
+ const std::string& transport_name = candidate.transport_name();
+ for (size_t i = 0; i < description_->contents().size(); ++i) {
+ if (transport_name == description_->contents().at(i).name) {
+ return static_cast<int>(i);
+ }
+ }
+ return -1;
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/jsep_session_description_unittest.cc b/third_party/libwebrtc/pc/jsep_session_description_unittest.cc
new file mode 100644
index 0000000000..c4b993d687
--- /dev/null
+++ b/third_party/libwebrtc/pc/jsep_session_description_unittest.cc
@@ -0,0 +1,530 @@
+/*
+ * Copyright 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/jsep_session_description.h"
+
+#include <stddef.h>
+#include <stdint.h>
+
+#include <utility>
+#include <vector>
+
+#include "api/candidate.h"
+#include "api/jsep.h"
+#include "api/jsep_ice_candidate.h"
+#include "media/base/codec.h"
+#include "p2p/base/p2p_constants.h"
+#include "p2p/base/port.h"
+#include "p2p/base/transport_description.h"
+#include "p2p/base/transport_info.h"
+#include "pc/session_description.h"
+#include "pc/webrtc_sdp.h"
+#include "rtc_base/helpers.h"
+#include "rtc_base/net_helper.h"
+#include "rtc_base/socket_address.h"
+#include "rtc_base/string_encode.h"
+#include "test/gtest.h"
+
+using cricket::MediaProtocolType;
+using ::testing::Values;
+using webrtc::IceCandidateCollection;
+using webrtc::IceCandidateInterface;
+using webrtc::JsepIceCandidate;
+using webrtc::JsepSessionDescription;
+using webrtc::SdpType;
+using webrtc::SessionDescriptionInterface;
+
+static const char kCandidateUfrag[] = "ufrag";
+static const char kCandidatePwd[] = "pwd";
+static const char kCandidateUfragVoice[] = "ufrag_voice";
+static const char kCandidatePwdVoice[] = "pwd_voice";
+static const char kCandidateUfragVideo[] = "ufrag_video";
+static const char kCandidatePwdVideo[] = "pwd_video";
+static const char kCandidateFoundation[] = "a0+B/1";
+static const uint32_t kCandidatePriority = 2130706432U; // pref = 1.0
+static const uint32_t kCandidateGeneration = 2;
+
+// This creates a session description with both audio and video media contents.
+// In SDP this is described by two m lines, one audio and one video.
+static std::unique_ptr<cricket::SessionDescription>
+CreateCricketSessionDescription() {
+ auto desc = std::make_unique<cricket::SessionDescription>();
+
+ // AudioContentDescription
+ auto audio = std::make_unique<cricket::AudioContentDescription>();
+ // VideoContentDescription
+ auto video = std::make_unique<cricket::VideoContentDescription>();
+
+ audio->AddCodec(cricket::CreateAudioCodec(103, "ISAC", 16000, 0));
+ desc->AddContent(cricket::CN_AUDIO, MediaProtocolType::kRtp,
+ std::move(audio));
+
+ video->AddCodec(cricket::CreateVideoCodec(120, "VP8"));
+ desc->AddContent(cricket::CN_VIDEO, MediaProtocolType::kRtp,
+ std::move(video));
+
+ desc->AddTransportInfo(cricket::TransportInfo(
+ cricket::CN_AUDIO,
+ cricket::TransportDescription(
+ std::vector<std::string>(), kCandidateUfragVoice, kCandidatePwdVoice,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_NONE, NULL)));
+ desc->AddTransportInfo(cricket::TransportInfo(
+ cricket::CN_VIDEO,
+ cricket::TransportDescription(
+ std::vector<std::string>(), kCandidateUfragVideo, kCandidatePwdVideo,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_NONE, NULL)));
+ return desc;
+}
+
+class JsepSessionDescriptionTest : public ::testing::Test {
+ protected:
+ virtual void SetUp() {
+ int port = 1234;
+ rtc::SocketAddress address("127.0.0.1", port++);
+ cricket::Candidate candidate(cricket::ICE_CANDIDATE_COMPONENT_RTP, "udp",
+ address, 1, "", "", "local", 0, "1");
+ candidate_ = candidate;
+ const std::string session_id = rtc::ToString(rtc::CreateRandomId64());
+ const std::string session_version = rtc::ToString(rtc::CreateRandomId());
+ jsep_desc_ = std::make_unique<JsepSessionDescription>(SdpType::kOffer);
+ ASSERT_TRUE(jsep_desc_->Initialize(CreateCricketSessionDescription(),
+ session_id, session_version));
+ }
+
+ std::string Serialize(const SessionDescriptionInterface* desc) {
+ std::string sdp;
+ EXPECT_TRUE(desc->ToString(&sdp));
+ EXPECT_FALSE(sdp.empty());
+ return sdp;
+ }
+
+ std::unique_ptr<SessionDescriptionInterface> DeSerialize(
+ const std::string& sdp) {
+ auto jsep_desc = std::make_unique<JsepSessionDescription>(SdpType::kOffer);
+ EXPECT_TRUE(webrtc::SdpDeserialize(sdp, jsep_desc.get(), nullptr));
+ return std::move(jsep_desc);
+ }
+
+ cricket::Candidate candidate_;
+ std::unique_ptr<JsepSessionDescription> jsep_desc_;
+};
+
+TEST_F(JsepSessionDescriptionTest, CloneDefault) {
+ auto new_desc = jsep_desc_->Clone();
+ EXPECT_EQ(jsep_desc_->type(), new_desc->type());
+ std::string old_desc_string;
+ std::string new_desc_string;
+ EXPECT_TRUE(jsep_desc_->ToString(&old_desc_string));
+ EXPECT_TRUE(new_desc->ToString(&new_desc_string));
+ EXPECT_EQ(old_desc_string, new_desc_string);
+ EXPECT_EQ(jsep_desc_->session_id(), new_desc->session_id());
+ EXPECT_EQ(jsep_desc_->session_version(), new_desc->session_version());
+}
+
+TEST_F(JsepSessionDescriptionTest, CloneRollback) {
+ auto jsep_desc = std::make_unique<JsepSessionDescription>(SdpType::kRollback);
+ auto new_desc = jsep_desc->Clone();
+ EXPECT_EQ(jsep_desc->type(), new_desc->type());
+}
+
+TEST_F(JsepSessionDescriptionTest, CloneWithCandidates) {
+ cricket::Candidate candidate_v4(
+ cricket::ICE_CANDIDATE_COMPONENT_RTP, "udp",
+ rtc::SocketAddress("192.168.1.5", 1234), kCandidatePriority, "", "",
+ cricket::STUN_PORT_TYPE, kCandidateGeneration, kCandidateFoundation);
+ cricket::Candidate candidate_v6(
+ cricket::ICE_CANDIDATE_COMPONENT_RTP, "udp",
+ rtc::SocketAddress("::1", 1234), kCandidatePriority, "", "",
+ cricket::LOCAL_PORT_TYPE, kCandidateGeneration, kCandidateFoundation);
+
+ JsepIceCandidate jice_v4("audio", 0, candidate_v4);
+ JsepIceCandidate jice_v6("audio", 0, candidate_v6);
+ JsepIceCandidate jice_v4_video("video", 0, candidate_v4);
+ JsepIceCandidate jice_v6_video("video", 0, candidate_v6);
+ ASSERT_TRUE(jsep_desc_->AddCandidate(&jice_v4));
+ ASSERT_TRUE(jsep_desc_->AddCandidate(&jice_v6));
+ ASSERT_TRUE(jsep_desc_->AddCandidate(&jice_v4_video));
+ ASSERT_TRUE(jsep_desc_->AddCandidate(&jice_v6_video));
+ auto new_desc = jsep_desc_->Clone();
+ EXPECT_EQ(jsep_desc_->type(), new_desc->type());
+ std::string old_desc_string;
+ std::string new_desc_string;
+ EXPECT_TRUE(jsep_desc_->ToString(&old_desc_string));
+ EXPECT_TRUE(new_desc->ToString(&new_desc_string));
+ EXPECT_EQ(old_desc_string, new_desc_string);
+}
+
+// Test that number_of_mediasections() returns the number of media contents in
+// a session description.
+TEST_F(JsepSessionDescriptionTest, CheckSessionDescription) {
+ EXPECT_EQ(2u, jsep_desc_->number_of_mediasections());
+}
+
+// Test that we can add a candidate to a session description without MID.
+TEST_F(JsepSessionDescriptionTest, AddCandidateWithoutMid) {
+ JsepIceCandidate jsep_candidate("", 0, candidate_);
+ EXPECT_TRUE(jsep_desc_->AddCandidate(&jsep_candidate));
+ const IceCandidateCollection* ice_candidates = jsep_desc_->candidates(0);
+ ASSERT_TRUE(ice_candidates != NULL);
+ EXPECT_EQ(1u, ice_candidates->count());
+ const IceCandidateInterface* ice_candidate = ice_candidates->at(0);
+ ASSERT_TRUE(ice_candidate != NULL);
+ candidate_.set_username(kCandidateUfragVoice);
+ candidate_.set_password(kCandidatePwdVoice);
+ EXPECT_TRUE(ice_candidate->candidate().IsEquivalent(candidate_));
+ EXPECT_EQ(0, ice_candidate->sdp_mline_index());
+ EXPECT_EQ(0u, jsep_desc_->candidates(1)->count());
+}
+
+// Test that we can add and remove candidates to a session description with
+// MID. Removing candidates requires MID (transport_name).
+TEST_F(JsepSessionDescriptionTest, AddAndRemoveCandidatesWithMid) {
+ // mid and m-line index don't match, in this case mid is preferred.
+ std::string mid = "video";
+ JsepIceCandidate jsep_candidate(mid, 0, candidate_);
+ EXPECT_TRUE(jsep_desc_->AddCandidate(&jsep_candidate));
+ EXPECT_EQ(0u, jsep_desc_->candidates(0)->count());
+ const IceCandidateCollection* ice_candidates = jsep_desc_->candidates(1);
+ ASSERT_TRUE(ice_candidates != NULL);
+ EXPECT_EQ(1u, ice_candidates->count());
+ const IceCandidateInterface* ice_candidate = ice_candidates->at(0);
+ ASSERT_TRUE(ice_candidate != NULL);
+ candidate_.set_username(kCandidateUfragVideo);
+ candidate_.set_password(kCandidatePwdVideo);
+ EXPECT_TRUE(ice_candidate->candidate().IsEquivalent(candidate_));
+ // The mline index should have been updated according to mid.
+ EXPECT_EQ(1, ice_candidate->sdp_mline_index());
+
+ std::vector<cricket::Candidate> candidates(1, candidate_);
+ candidates[0].set_transport_name(mid);
+ EXPECT_EQ(1u, jsep_desc_->RemoveCandidates(candidates));
+ EXPECT_EQ(0u, jsep_desc_->candidates(0)->count());
+ EXPECT_EQ(0u, jsep_desc_->candidates(1)->count());
+}
+
+TEST_F(JsepSessionDescriptionTest, AddCandidateAlreadyHasUfrag) {
+ candidate_.set_username(kCandidateUfrag);
+ candidate_.set_password(kCandidatePwd);
+ JsepIceCandidate jsep_candidate("audio", 0, candidate_);
+ EXPECT_TRUE(jsep_desc_->AddCandidate(&jsep_candidate));
+ const IceCandidateCollection* ice_candidates = jsep_desc_->candidates(0);
+ ASSERT_TRUE(ice_candidates != NULL);
+ EXPECT_EQ(1u, ice_candidates->count());
+ const IceCandidateInterface* ice_candidate = ice_candidates->at(0);
+ ASSERT_TRUE(ice_candidate != NULL);
+ candidate_.set_username(kCandidateUfrag);
+ candidate_.set_password(kCandidatePwd);
+ EXPECT_TRUE(ice_candidate->candidate().IsEquivalent(candidate_));
+
+ EXPECT_EQ(0u, jsep_desc_->candidates(1)->count());
+}
+
+// Test that we can not add a candidate if there is no corresponding media
+// content in the session description.
+TEST_F(JsepSessionDescriptionTest, AddBadCandidate) {
+ JsepIceCandidate bad_candidate1("", 55, candidate_);
+ EXPECT_FALSE(jsep_desc_->AddCandidate(&bad_candidate1));
+
+ JsepIceCandidate bad_candidate2("some weird mid", 0, candidate_);
+ EXPECT_FALSE(jsep_desc_->AddCandidate(&bad_candidate2));
+}
+
+// Tests that repeatedly adding the same candidate, with or without credentials,
+// does not increase the number of candidates in the description.
+TEST_F(JsepSessionDescriptionTest, AddCandidateDuplicates) {
+ JsepIceCandidate jsep_candidate("", 0, candidate_);
+ EXPECT_TRUE(jsep_desc_->AddCandidate(&jsep_candidate));
+ EXPECT_EQ(1u, jsep_desc_->candidates(0)->count());
+
+ // Add the same candidate again. It should be ignored.
+ EXPECT_TRUE(jsep_desc_->AddCandidate(&jsep_candidate));
+ EXPECT_EQ(1u, jsep_desc_->candidates(0)->count());
+
+ // Create a new candidate, identical except that the ufrag and pwd are now
+ // populated.
+ candidate_.set_username(kCandidateUfragVoice);
+ candidate_.set_password(kCandidatePwdVoice);
+ JsepIceCandidate jsep_candidate_with_credentials("", 0, candidate_);
+
+ // This should also be identified as redundant and ignored.
+ EXPECT_TRUE(jsep_desc_->AddCandidate(&jsep_candidate_with_credentials));
+ EXPECT_EQ(1u, jsep_desc_->candidates(0)->count());
+}
+
+// Test that the connection address is set to a hostname address after adding a
+// hostname candidate.
+TEST_F(JsepSessionDescriptionTest, AddHostnameCandidate) {
+ cricket::Candidate c;
+ c.set_component(cricket::ICE_CANDIDATE_COMPONENT_RTP);
+ c.set_protocol(cricket::UDP_PROTOCOL_NAME);
+ c.set_address(rtc::SocketAddress("example.local", 1234));
+ c.set_type(cricket::LOCAL_PORT_TYPE);
+ const size_t audio_index = 0;
+ JsepIceCandidate hostname_candidate("audio", audio_index, c);
+ EXPECT_TRUE(jsep_desc_->AddCandidate(&hostname_candidate));
+
+ ASSERT_NE(nullptr, jsep_desc_->description());
+ ASSERT_EQ(2u, jsep_desc_->description()->contents().size());
+ const auto& content = jsep_desc_->description()->contents()[audio_index];
+ EXPECT_EQ("0.0.0.0:9",
+ content.media_description()->connection_address().ToString());
+}
+
+// Test that we can serialize a JsepSessionDescription and deserialize it again.
+TEST_F(JsepSessionDescriptionTest, SerializeDeserialize) {
+ std::string sdp = Serialize(jsep_desc_.get());
+
+ auto parsed_jsep_desc = DeSerialize(sdp);
+ EXPECT_EQ(2u, parsed_jsep_desc->number_of_mediasections());
+
+ std::string parsed_sdp = Serialize(parsed_jsep_desc.get());
+ EXPECT_EQ(sdp, parsed_sdp);
+}
+
+// Test that we can serialize a JsepSessionDescription when a hostname candidate
+// is the default destination and deserialize it again. The connection address
+// in the deserialized description should be the dummy address 0.0.0.0:9.
+TEST_F(JsepSessionDescriptionTest, SerializeDeserializeWithHostnameCandidate) {
+ cricket::Candidate c;
+ c.set_component(cricket::ICE_CANDIDATE_COMPONENT_RTP);
+ c.set_protocol(cricket::UDP_PROTOCOL_NAME);
+ c.set_address(rtc::SocketAddress("example.local", 1234));
+ c.set_type(cricket::LOCAL_PORT_TYPE);
+ const size_t audio_index = 0;
+ const size_t video_index = 1;
+ JsepIceCandidate hostname_candidate_audio("audio", audio_index, c);
+ JsepIceCandidate hostname_candidate_video("video", video_index, c);
+ EXPECT_TRUE(jsep_desc_->AddCandidate(&hostname_candidate_audio));
+ EXPECT_TRUE(jsep_desc_->AddCandidate(&hostname_candidate_video));
+
+ std::string sdp = Serialize(jsep_desc_.get());
+
+ auto parsed_jsep_desc = DeSerialize(sdp);
+ EXPECT_EQ(2u, parsed_jsep_desc->number_of_mediasections());
+
+ ASSERT_NE(nullptr, parsed_jsep_desc->description());
+ ASSERT_EQ(2u, parsed_jsep_desc->description()->contents().size());
+ const auto& audio_content =
+ parsed_jsep_desc->description()->contents()[audio_index];
+ const auto& video_content =
+ parsed_jsep_desc->description()->contents()[video_index];
+ EXPECT_EQ("0.0.0.0:9",
+ audio_content.media_description()->connection_address().ToString());
+ EXPECT_EQ("0.0.0.0:9",
+ video_content.media_description()->connection_address().ToString());
+}
+
+// Tests that we can serialize and deserialize a JsepSesssionDescription
+// with candidates.
+TEST_F(JsepSessionDescriptionTest, SerializeDeserializeWithCandidates) {
+ std::string sdp = Serialize(jsep_desc_.get());
+
+ // Add a candidate and check that the serialized result is different.
+ JsepIceCandidate jsep_candidate("audio", 0, candidate_);
+ EXPECT_TRUE(jsep_desc_->AddCandidate(&jsep_candidate));
+ std::string sdp_with_candidate = Serialize(jsep_desc_.get());
+ EXPECT_NE(sdp, sdp_with_candidate);
+
+ auto parsed_jsep_desc = DeSerialize(sdp_with_candidate);
+ std::string parsed_sdp_with_candidate = Serialize(parsed_jsep_desc.get());
+
+ EXPECT_EQ(sdp_with_candidate, parsed_sdp_with_candidate);
+}
+
+// TODO(zhihuang): Modify these tests. These are used to verify that after
+// adding the candidates, the connection_address field is set correctly. Modify
+// those so that the "connection address" is tested directly.
+// Tests serialization of SDP with only IPv6 candidates and verifies that IPv6
+// is used as default address in c line according to preference.
+TEST_F(JsepSessionDescriptionTest, SerializeSessionDescriptionWithIPv6Only) {
+ // Stun has a high preference than local host.
+ cricket::Candidate candidate1(
+ cricket::ICE_CANDIDATE_COMPONENT_RTP, "udp",
+ rtc::SocketAddress("::1", 1234), kCandidatePriority, "", "",
+ cricket::STUN_PORT_TYPE, kCandidateGeneration, kCandidateFoundation);
+ cricket::Candidate candidate2(
+ cricket::ICE_CANDIDATE_COMPONENT_RTP, "udp",
+ rtc::SocketAddress("::2", 1235), kCandidatePriority, "", "",
+ cricket::LOCAL_PORT_TYPE, kCandidateGeneration, kCandidateFoundation);
+
+ JsepIceCandidate jice1("audio", 0, candidate1);
+ JsepIceCandidate jice2("audio", 0, candidate2);
+ JsepIceCandidate jice3("video", 0, candidate1);
+ JsepIceCandidate jice4("video", 0, candidate2);
+ ASSERT_TRUE(jsep_desc_->AddCandidate(&jice1));
+ ASSERT_TRUE(jsep_desc_->AddCandidate(&jice2));
+ ASSERT_TRUE(jsep_desc_->AddCandidate(&jice3));
+ ASSERT_TRUE(jsep_desc_->AddCandidate(&jice4));
+ std::string message = Serialize(jsep_desc_.get());
+
+ // Should have a c line like this one.
+ EXPECT_NE(message.find("c=IN IP6 ::1"), std::string::npos);
+ // Shouldn't have a IP4 c line.
+ EXPECT_EQ(message.find("c=IN IP4"), std::string::npos);
+}
+
+// Tests serialization of SDP with both IPv4 and IPv6 candidates and
+// verifies that IPv4 is used as default address in c line even if the
+// preference of IPv4 is lower.
+TEST_F(JsepSessionDescriptionTest,
+ SerializeSessionDescriptionWithBothIPFamilies) {
+ cricket::Candidate candidate_v4(
+ cricket::ICE_CANDIDATE_COMPONENT_RTP, "udp",
+ rtc::SocketAddress("192.168.1.5", 1234), kCandidatePriority, "", "",
+ cricket::STUN_PORT_TYPE, kCandidateGeneration, kCandidateFoundation);
+ cricket::Candidate candidate_v6(
+ cricket::ICE_CANDIDATE_COMPONENT_RTP, "udp",
+ rtc::SocketAddress("::1", 1234), kCandidatePriority, "", "",
+ cricket::LOCAL_PORT_TYPE, kCandidateGeneration, kCandidateFoundation);
+
+ JsepIceCandidate jice_v4("audio", 0, candidate_v4);
+ JsepIceCandidate jice_v6("audio", 0, candidate_v6);
+ JsepIceCandidate jice_v4_video("video", 0, candidate_v4);
+ JsepIceCandidate jice_v6_video("video", 0, candidate_v6);
+ ASSERT_TRUE(jsep_desc_->AddCandidate(&jice_v4));
+ ASSERT_TRUE(jsep_desc_->AddCandidate(&jice_v6));
+ ASSERT_TRUE(jsep_desc_->AddCandidate(&jice_v4_video));
+ ASSERT_TRUE(jsep_desc_->AddCandidate(&jice_v6_video));
+ std::string message = Serialize(jsep_desc_.get());
+
+ // Should have a c line like this one.
+ EXPECT_NE(message.find("c=IN IP4 192.168.1.5"), std::string::npos);
+ // Shouldn't have a IP6 c line.
+ EXPECT_EQ(message.find("c=IN IP6"), std::string::npos);
+}
+
+// Tests serialization of SDP with both UDP and TCP candidates and
+// verifies that UDP is used as default address in c line even if the
+// preference of UDP is lower.
+TEST_F(JsepSessionDescriptionTest,
+ SerializeSessionDescriptionWithBothProtocols) {
+ // Stun has a high preference than local host.
+ cricket::Candidate candidate1(
+ cricket::ICE_CANDIDATE_COMPONENT_RTP, "tcp",
+ rtc::SocketAddress("::1", 1234), kCandidatePriority, "", "",
+ cricket::STUN_PORT_TYPE, kCandidateGeneration, kCandidateFoundation);
+ cricket::Candidate candidate2(
+ cricket::ICE_CANDIDATE_COMPONENT_RTP, "udp",
+ rtc::SocketAddress("fe80::1234:5678:abcd:ef12", 1235), kCandidatePriority,
+ "", "", cricket::LOCAL_PORT_TYPE, kCandidateGeneration,
+ kCandidateFoundation);
+
+ JsepIceCandidate jice1("audio", 0, candidate1);
+ JsepIceCandidate jice2("audio", 0, candidate2);
+ JsepIceCandidate jice3("video", 0, candidate1);
+ JsepIceCandidate jice4("video", 0, candidate2);
+ ASSERT_TRUE(jsep_desc_->AddCandidate(&jice1));
+ ASSERT_TRUE(jsep_desc_->AddCandidate(&jice2));
+ ASSERT_TRUE(jsep_desc_->AddCandidate(&jice3));
+ ASSERT_TRUE(jsep_desc_->AddCandidate(&jice4));
+ std::string message = Serialize(jsep_desc_.get());
+
+ // Should have a c line like this one.
+ EXPECT_NE(message.find("c=IN IP6 fe80::1234:5678:abcd:ef12"),
+ std::string::npos);
+ // Shouldn't have a IP4 c line.
+ EXPECT_EQ(message.find("c=IN IP4"), std::string::npos);
+}
+
+// Tests serialization of SDP with only TCP candidates and verifies that
+// null IPv4 is used as default address in c line.
+TEST_F(JsepSessionDescriptionTest, SerializeSessionDescriptionWithTCPOnly) {
+ // Stun has a high preference than local host.
+ cricket::Candidate candidate1(
+ cricket::ICE_CANDIDATE_COMPONENT_RTP, "tcp",
+ rtc::SocketAddress("::1", 1234), kCandidatePriority, "", "",
+ cricket::STUN_PORT_TYPE, kCandidateGeneration, kCandidateFoundation);
+ cricket::Candidate candidate2(
+ cricket::ICE_CANDIDATE_COMPONENT_RTP, "tcp",
+ rtc::SocketAddress("::2", 1235), kCandidatePriority, "", "",
+ cricket::LOCAL_PORT_TYPE, kCandidateGeneration, kCandidateFoundation);
+
+ JsepIceCandidate jice1("audio", 0, candidate1);
+ JsepIceCandidate jice2("audio", 0, candidate2);
+ JsepIceCandidate jice3("video", 0, candidate1);
+ JsepIceCandidate jice4("video", 0, candidate2);
+ ASSERT_TRUE(jsep_desc_->AddCandidate(&jice1));
+ ASSERT_TRUE(jsep_desc_->AddCandidate(&jice2));
+ ASSERT_TRUE(jsep_desc_->AddCandidate(&jice3));
+ ASSERT_TRUE(jsep_desc_->AddCandidate(&jice4));
+
+ std::string message = Serialize(jsep_desc_.get());
+ EXPECT_EQ(message.find("c=IN IP6 ::3"), std::string::npos);
+ // Should have a c line like this one when no any default exists.
+ EXPECT_NE(message.find("c=IN IP4 0.0.0.0"), std::string::npos);
+}
+
+// Tests that the connection address will be correctly set when the Candidate is
+// removed.
+TEST_F(JsepSessionDescriptionTest, RemoveCandidateAndSetConnectionAddress) {
+ cricket::Candidate candidate1(
+ cricket::ICE_CANDIDATE_COMPONENT_RTP, "udp",
+ rtc::SocketAddress("::1", 1234), kCandidatePriority, "", "",
+ cricket::LOCAL_PORT_TYPE, kCandidateGeneration, kCandidateFoundation);
+ candidate1.set_transport_name("audio");
+
+ cricket::Candidate candidate2(
+ cricket::ICE_CANDIDATE_COMPONENT_RTP, "tcp",
+ rtc::SocketAddress("::2", 1235), kCandidatePriority, "", "",
+ cricket::LOCAL_PORT_TYPE, kCandidateGeneration, kCandidateFoundation);
+ candidate2.set_transport_name("audio");
+
+ cricket::Candidate candidate3(
+ cricket::ICE_CANDIDATE_COMPONENT_RTP, "udp",
+ rtc::SocketAddress("192.168.1.1", 1236), kCandidatePriority, "", "",
+ cricket::LOCAL_PORT_TYPE, kCandidateGeneration, kCandidateFoundation);
+ candidate3.set_transport_name("audio");
+
+ JsepIceCandidate jice1("audio", 0, candidate1);
+ JsepIceCandidate jice2("audio", 0, candidate2);
+ JsepIceCandidate jice3("audio", 0, candidate3);
+
+ size_t audio_index = 0;
+ auto media_desc =
+ jsep_desc_->description()->contents()[audio_index].media_description();
+
+ ASSERT_TRUE(jsep_desc_->AddCandidate(&jice1));
+ ASSERT_TRUE(jsep_desc_->AddCandidate(&jice2));
+ ASSERT_TRUE(jsep_desc_->AddCandidate(&jice3));
+
+ std::vector<cricket::Candidate> candidates;
+ EXPECT_EQ("192.168.1.1:1236", media_desc->connection_address().ToString());
+
+ candidates.push_back(candidate3);
+ ASSERT_TRUE(jsep_desc_->RemoveCandidates(candidates));
+ EXPECT_EQ("[::1]:1234", media_desc->connection_address().ToString());
+
+ candidates.clear();
+ candidates.push_back(candidate2);
+ ASSERT_TRUE(jsep_desc_->RemoveCandidates(candidates));
+ EXPECT_EQ("[::1]:1234", media_desc->connection_address().ToString());
+
+ candidates.clear();
+ candidates.push_back(candidate1);
+ ASSERT_TRUE(jsep_desc_->RemoveCandidates(candidates));
+ EXPECT_EQ("0.0.0.0:9", media_desc->connection_address().ToString());
+}
+
+class EnumerateAllSdpTypesTest : public ::testing::Test,
+ public ::testing::WithParamInterface<SdpType> {
+};
+
+TEST_P(EnumerateAllSdpTypesTest, TestIdentity) {
+ SdpType type = GetParam();
+
+ const char* str = webrtc::SdpTypeToString(type);
+ EXPECT_EQ(type, webrtc::SdpTypeFromString(str));
+}
+
+INSTANTIATE_TEST_SUITE_P(JsepSessionDescriptionTest,
+ EnumerateAllSdpTypesTest,
+ Values(SdpType::kOffer,
+ SdpType::kPrAnswer,
+ SdpType::kAnswer));
diff --git a/third_party/libwebrtc/pc/jsep_transport.cc b/third_party/libwebrtc/pc/jsep_transport.cc
new file mode 100644
index 0000000000..2398a0ad2d
--- /dev/null
+++ b/third_party/libwebrtc/pc/jsep_transport.cc
@@ -0,0 +1,713 @@
+/*
+ * Copyright 2018 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/jsep_transport.h"
+
+#include <stddef.h>
+#include <stdint.h>
+
+#include <functional>
+#include <memory>
+#include <string>
+#include <utility>
+
+#include "api/array_view.h"
+#include "api/candidate.h"
+#include "p2p/base/p2p_constants.h"
+#include "p2p/base/p2p_transport_channel.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/copy_on_write_buffer.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/strings/string_builder.h"
+#include "rtc_base/trace_event.h"
+
+using webrtc::SdpType;
+
+namespace cricket {
+
+JsepTransportDescription::JsepTransportDescription() {}
+
+JsepTransportDescription::JsepTransportDescription(
+ bool rtcp_mux_enabled,
+ const std::vector<CryptoParams>& cryptos,
+ const std::vector<int>& encrypted_header_extension_ids,
+ int rtp_abs_sendtime_extn_id,
+ const TransportDescription& transport_desc)
+ : rtcp_mux_enabled(rtcp_mux_enabled),
+ cryptos(cryptos),
+ encrypted_header_extension_ids(encrypted_header_extension_ids),
+ rtp_abs_sendtime_extn_id(rtp_abs_sendtime_extn_id),
+ transport_desc(transport_desc) {}
+
+JsepTransportDescription::JsepTransportDescription(
+ const JsepTransportDescription& from)
+ : rtcp_mux_enabled(from.rtcp_mux_enabled),
+ cryptos(from.cryptos),
+ encrypted_header_extension_ids(from.encrypted_header_extension_ids),
+ rtp_abs_sendtime_extn_id(from.rtp_abs_sendtime_extn_id),
+ transport_desc(from.transport_desc) {}
+
+JsepTransportDescription::~JsepTransportDescription() = default;
+
+JsepTransportDescription& JsepTransportDescription::operator=(
+ const JsepTransportDescription& from) {
+ if (this == &from) {
+ return *this;
+ }
+ rtcp_mux_enabled = from.rtcp_mux_enabled;
+ cryptos = from.cryptos;
+ encrypted_header_extension_ids = from.encrypted_header_extension_ids;
+ rtp_abs_sendtime_extn_id = from.rtp_abs_sendtime_extn_id;
+ transport_desc = from.transport_desc;
+
+ return *this;
+}
+
+JsepTransport::JsepTransport(
+ const std::string& mid,
+ const rtc::scoped_refptr<rtc::RTCCertificate>& local_certificate,
+ rtc::scoped_refptr<webrtc::IceTransportInterface> ice_transport,
+ rtc::scoped_refptr<webrtc::IceTransportInterface> rtcp_ice_transport,
+ std::unique_ptr<webrtc::RtpTransport> unencrypted_rtp_transport,
+ std::unique_ptr<webrtc::SrtpTransport> sdes_transport,
+ std::unique_ptr<webrtc::DtlsSrtpTransport> dtls_srtp_transport,
+ std::unique_ptr<DtlsTransportInternal> rtp_dtls_transport,
+ std::unique_ptr<DtlsTransportInternal> rtcp_dtls_transport,
+ std::unique_ptr<SctpTransportInternal> sctp_transport,
+ std::function<void()> rtcp_mux_active_callback)
+ : network_thread_(rtc::Thread::Current()),
+ mid_(mid),
+ local_certificate_(local_certificate),
+ ice_transport_(std::move(ice_transport)),
+ rtcp_ice_transport_(std::move(rtcp_ice_transport)),
+ unencrypted_rtp_transport_(std::move(unencrypted_rtp_transport)),
+ sdes_transport_(std::move(sdes_transport)),
+ dtls_srtp_transport_(std::move(dtls_srtp_transport)),
+ rtp_dtls_transport_(rtp_dtls_transport
+ ? rtc::make_ref_counted<webrtc::DtlsTransport>(
+ std::move(rtp_dtls_transport))
+ : nullptr),
+ rtcp_dtls_transport_(rtcp_dtls_transport
+ ? rtc::make_ref_counted<webrtc::DtlsTransport>(
+ std::move(rtcp_dtls_transport))
+ : nullptr),
+ sctp_transport_(sctp_transport
+ ? rtc::make_ref_counted<webrtc::SctpTransport>(
+ std::move(sctp_transport))
+ : nullptr),
+ rtcp_mux_active_callback_(std::move(rtcp_mux_active_callback)) {
+ TRACE_EVENT0("webrtc", "JsepTransport::JsepTransport");
+ RTC_DCHECK(ice_transport_);
+ RTC_DCHECK(rtp_dtls_transport_);
+ // `rtcp_ice_transport_` must be present iff `rtcp_dtls_transport_` is
+ // present.
+ RTC_DCHECK_EQ((rtcp_ice_transport_ != nullptr),
+ (rtcp_dtls_transport_ != nullptr));
+ // Verify the "only one out of these three can be set" invariant.
+ if (unencrypted_rtp_transport_) {
+ RTC_DCHECK(!sdes_transport);
+ RTC_DCHECK(!dtls_srtp_transport);
+ } else if (sdes_transport_) {
+ RTC_DCHECK(!unencrypted_rtp_transport);
+ RTC_DCHECK(!dtls_srtp_transport);
+ } else {
+ RTC_DCHECK(dtls_srtp_transport_);
+ RTC_DCHECK(!unencrypted_rtp_transport);
+ RTC_DCHECK(!sdes_transport);
+ }
+
+ if (sctp_transport_) {
+ sctp_transport_->SetDtlsTransport(rtp_dtls_transport_);
+ }
+}
+
+JsepTransport::~JsepTransport() {
+ TRACE_EVENT0("webrtc", "JsepTransport::~JsepTransport");
+ if (sctp_transport_) {
+ sctp_transport_->Clear();
+ }
+
+ // Clear all DtlsTransports. There may be pointers to these from
+ // other places, so we can't assume they'll be deleted by the destructor.
+ rtp_dtls_transport_->Clear();
+ if (rtcp_dtls_transport_) {
+ rtcp_dtls_transport_->Clear();
+ }
+
+ // ICE will be the last transport to be deleted.
+}
+
+webrtc::RTCError JsepTransport::SetLocalJsepTransportDescription(
+ const JsepTransportDescription& jsep_description,
+ SdpType type) {
+ webrtc::RTCError error;
+ TRACE_EVENT0("webrtc", "JsepTransport::SetLocalJsepTransportDescription");
+ RTC_DCHECK_RUN_ON(network_thread_);
+
+ IceParameters ice_parameters =
+ jsep_description.transport_desc.GetIceParameters();
+ webrtc::RTCError ice_parameters_result = ice_parameters.Validate();
+ if (!ice_parameters_result.ok()) {
+ rtc::StringBuilder sb;
+ sb << "Invalid ICE parameters: " << ice_parameters_result.message();
+ return webrtc::RTCError(webrtc::RTCErrorType::INVALID_PARAMETER,
+ sb.Release());
+ }
+
+ if (!SetRtcpMux(jsep_description.rtcp_mux_enabled, type,
+ ContentSource::CS_LOCAL)) {
+ return webrtc::RTCError(webrtc::RTCErrorType::INVALID_PARAMETER,
+ "Failed to setup RTCP mux.");
+ }
+
+ // If doing SDES, setup the SDES crypto parameters.
+ if (sdes_transport_) {
+ RTC_DCHECK(!unencrypted_rtp_transport_);
+ RTC_DCHECK(!dtls_srtp_transport_);
+ if (!SetSdes(jsep_description.cryptos,
+ jsep_description.encrypted_header_extension_ids, type,
+ ContentSource::CS_LOCAL)) {
+ return webrtc::RTCError(webrtc::RTCErrorType::INVALID_PARAMETER,
+ "Failed to setup SDES crypto parameters.");
+ }
+ } else if (dtls_srtp_transport_) {
+ RTC_DCHECK(!unencrypted_rtp_transport_);
+ RTC_DCHECK(!sdes_transport_);
+ dtls_srtp_transport_->UpdateRecvEncryptedHeaderExtensionIds(
+ jsep_description.encrypted_header_extension_ids);
+ }
+ bool ice_restarting =
+ local_description_ != nullptr &&
+ IceCredentialsChanged(local_description_->transport_desc.ice_ufrag,
+ local_description_->transport_desc.ice_pwd,
+ ice_parameters.ufrag, ice_parameters.pwd);
+ local_description_.reset(new JsepTransportDescription(jsep_description));
+
+ rtc::SSLFingerprint* local_fp =
+ local_description_->transport_desc.identity_fingerprint.get();
+
+ if (!local_fp) {
+ local_certificate_ = nullptr;
+ } else {
+ error = VerifyCertificateFingerprint(local_certificate_.get(), local_fp);
+ if (!error.ok()) {
+ local_description_.reset();
+ return error;
+ }
+ }
+ RTC_DCHECK(rtp_dtls_transport_->internal());
+ rtp_dtls_transport_->internal()->ice_transport()->SetIceParameters(
+ ice_parameters);
+
+ if (rtcp_dtls_transport_) {
+ RTC_DCHECK(rtcp_dtls_transport_->internal());
+ rtcp_dtls_transport_->internal()->ice_transport()->SetIceParameters(
+ ice_parameters);
+ }
+ // If PRANSWER/ANSWER is set, we should decide transport protocol type.
+ if (type == SdpType::kPrAnswer || type == SdpType::kAnswer) {
+ error = NegotiateAndSetDtlsParameters(type);
+ }
+ if (!error.ok()) {
+ local_description_.reset();
+ return error;
+ }
+
+ if (needs_ice_restart_ && ice_restarting) {
+ needs_ice_restart_ = false;
+ RTC_LOG(LS_VERBOSE) << "needs-ice-restart flag cleared for transport "
+ << mid();
+ }
+
+ return webrtc::RTCError::OK();
+}
+
+webrtc::RTCError JsepTransport::SetRemoteJsepTransportDescription(
+ const JsepTransportDescription& jsep_description,
+ webrtc::SdpType type) {
+ TRACE_EVENT0("webrtc", "JsepTransport::SetLocalJsepTransportDescription");
+ webrtc::RTCError error;
+
+ RTC_DCHECK_RUN_ON(network_thread_);
+
+ IceParameters ice_parameters =
+ jsep_description.transport_desc.GetIceParameters();
+ webrtc::RTCError ice_parameters_result = ice_parameters.Validate();
+ if (!ice_parameters_result.ok()) {
+ remote_description_.reset();
+ rtc::StringBuilder sb;
+ sb << "Invalid ICE parameters: " << ice_parameters_result.message();
+ return webrtc::RTCError(webrtc::RTCErrorType::INVALID_PARAMETER,
+ sb.Release());
+ }
+
+ if (!SetRtcpMux(jsep_description.rtcp_mux_enabled, type,
+ ContentSource::CS_REMOTE)) {
+ return webrtc::RTCError(webrtc::RTCErrorType::INVALID_PARAMETER,
+ "Failed to setup RTCP mux.");
+ }
+
+ // If doing SDES, setup the SDES crypto parameters.
+ if (sdes_transport_) {
+ RTC_DCHECK(!unencrypted_rtp_transport_);
+ RTC_DCHECK(!dtls_srtp_transport_);
+ if (!SetSdes(jsep_description.cryptos,
+ jsep_description.encrypted_header_extension_ids, type,
+ ContentSource::CS_REMOTE)) {
+ return webrtc::RTCError(webrtc::RTCErrorType::INVALID_PARAMETER,
+ "Failed to setup SDES crypto parameters.");
+ }
+ sdes_transport_->CacheRtpAbsSendTimeHeaderExtension(
+ jsep_description.rtp_abs_sendtime_extn_id);
+ } else if (dtls_srtp_transport_) {
+ RTC_DCHECK(!unencrypted_rtp_transport_);
+ RTC_DCHECK(!sdes_transport_);
+ dtls_srtp_transport_->UpdateSendEncryptedHeaderExtensionIds(
+ jsep_description.encrypted_header_extension_ids);
+ dtls_srtp_transport_->CacheRtpAbsSendTimeHeaderExtension(
+ jsep_description.rtp_abs_sendtime_extn_id);
+ }
+
+ remote_description_.reset(new JsepTransportDescription(jsep_description));
+ RTC_DCHECK(rtp_dtls_transport());
+ SetRemoteIceParameters(ice_parameters, rtp_dtls_transport()->ice_transport());
+
+ if (rtcp_dtls_transport()) {
+ SetRemoteIceParameters(ice_parameters,
+ rtcp_dtls_transport()->ice_transport());
+ }
+
+ // If PRANSWER/ANSWER is set, we should decide transport protocol type.
+ if (type == SdpType::kPrAnswer || type == SdpType::kAnswer) {
+ error = NegotiateAndSetDtlsParameters(SdpType::kOffer);
+ }
+ if (!error.ok()) {
+ remote_description_.reset();
+ return error;
+ }
+ return webrtc::RTCError::OK();
+}
+
+webrtc::RTCError JsepTransport::AddRemoteCandidates(
+ const Candidates& candidates) {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ if (!local_description_ || !remote_description_) {
+ return webrtc::RTCError(webrtc::RTCErrorType::INVALID_STATE,
+ mid() +
+ " is not ready to use the remote candidate "
+ "because the local or remote description is "
+ "not set.");
+ }
+
+ for (const cricket::Candidate& candidate : candidates) {
+ auto transport =
+ candidate.component() == cricket::ICE_CANDIDATE_COMPONENT_RTP
+ ? rtp_dtls_transport_
+ : rtcp_dtls_transport_;
+ if (!transport) {
+ return webrtc::RTCError(webrtc::RTCErrorType::INVALID_PARAMETER,
+ "Candidate has an unknown component: " +
+ candidate.ToSensitiveString() + " for mid " +
+ mid());
+ }
+ RTC_DCHECK(transport->internal() && transport->internal()->ice_transport());
+ transport->internal()->ice_transport()->AddRemoteCandidate(candidate);
+ }
+ return webrtc::RTCError::OK();
+}
+
+void JsepTransport::SetNeedsIceRestartFlag() {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ if (!needs_ice_restart_) {
+ needs_ice_restart_ = true;
+ RTC_LOG(LS_VERBOSE) << "needs-ice-restart flag set for transport " << mid();
+ }
+}
+
+absl::optional<rtc::SSLRole> JsepTransport::GetDtlsRole() const {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ RTC_DCHECK(rtp_dtls_transport_);
+ RTC_DCHECK(rtp_dtls_transport_->internal());
+ rtc::SSLRole dtls_role;
+ if (!rtp_dtls_transport_->internal()->GetDtlsRole(&dtls_role)) {
+ return absl::optional<rtc::SSLRole>();
+ }
+
+ return absl::optional<rtc::SSLRole>(dtls_role);
+}
+
+bool JsepTransport::GetStats(TransportStats* stats) {
+ TRACE_EVENT0("webrtc", "JsepTransport::GetStats");
+ RTC_DCHECK_RUN_ON(network_thread_);
+ stats->transport_name = mid();
+ stats->channel_stats.clear();
+ RTC_DCHECK(rtp_dtls_transport_->internal());
+ bool ret = GetTransportStats(rtp_dtls_transport_->internal(),
+ ICE_CANDIDATE_COMPONENT_RTP, stats);
+
+ if (rtcp_dtls_transport_) {
+ RTC_DCHECK(rtcp_dtls_transport_->internal());
+ ret &= GetTransportStats(rtcp_dtls_transport_->internal(),
+ ICE_CANDIDATE_COMPONENT_RTCP, stats);
+ }
+ return ret;
+}
+
+webrtc::RTCError JsepTransport::VerifyCertificateFingerprint(
+ const rtc::RTCCertificate* certificate,
+ const rtc::SSLFingerprint* fingerprint) const {
+ TRACE_EVENT0("webrtc", "JsepTransport::VerifyCertificateFingerprint");
+ RTC_DCHECK_RUN_ON(network_thread_);
+ if (!fingerprint) {
+ return webrtc::RTCError(webrtc::RTCErrorType::INVALID_PARAMETER,
+ "No fingerprint");
+ }
+ if (!certificate) {
+ return webrtc::RTCError(webrtc::RTCErrorType::INVALID_PARAMETER,
+ "Fingerprint provided but no identity available.");
+ }
+ std::unique_ptr<rtc::SSLFingerprint> fp_tmp =
+ rtc::SSLFingerprint::CreateUnique(fingerprint->algorithm,
+ *certificate->identity());
+ RTC_DCHECK(fp_tmp.get() != NULL);
+ if (*fp_tmp == *fingerprint) {
+ return webrtc::RTCError::OK();
+ }
+ char ss_buf[1024];
+ rtc::SimpleStringBuilder desc(ss_buf);
+ desc << "Local fingerprint does not match identity. Expected: ";
+ desc << fp_tmp->ToString();
+ desc << " Got: " << fingerprint->ToString();
+ return webrtc::RTCError(webrtc::RTCErrorType::INVALID_PARAMETER,
+ std::string(desc.str()));
+}
+
+void JsepTransport::SetActiveResetSrtpParams(bool active_reset_srtp_params) {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ if (dtls_srtp_transport_) {
+ RTC_LOG(LS_INFO)
+ << "Setting active_reset_srtp_params of DtlsSrtpTransport to: "
+ << active_reset_srtp_params;
+ dtls_srtp_transport_->SetActiveResetSrtpParams(active_reset_srtp_params);
+ }
+}
+
+void JsepTransport::SetRemoteIceParameters(
+ const IceParameters& ice_parameters,
+ IceTransportInternal* ice_transport) {
+ TRACE_EVENT0("webrtc", "JsepTransport::SetRemoteIceParameters");
+ RTC_DCHECK_RUN_ON(network_thread_);
+ RTC_DCHECK(ice_transport);
+ RTC_DCHECK(remote_description_);
+ ice_transport->SetRemoteIceParameters(ice_parameters);
+ ice_transport->SetRemoteIceMode(remote_description_->transport_desc.ice_mode);
+}
+
+webrtc::RTCError JsepTransport::SetNegotiatedDtlsParameters(
+ DtlsTransportInternal* dtls_transport,
+ absl::optional<rtc::SSLRole> dtls_role,
+ rtc::SSLFingerprint* remote_fingerprint) {
+ RTC_DCHECK(dtls_transport);
+ return dtls_transport->SetRemoteParameters(
+ remote_fingerprint->algorithm, remote_fingerprint->digest.cdata(),
+ remote_fingerprint->digest.size(), dtls_role);
+}
+
+bool JsepTransport::SetRtcpMux(bool enable,
+ webrtc::SdpType type,
+ ContentSource source) {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ bool ret = false;
+ switch (type) {
+ case SdpType::kOffer:
+ ret = rtcp_mux_negotiator_.SetOffer(enable, source);
+ break;
+ case SdpType::kPrAnswer:
+ // This may activate RTCP muxing, but we don't yet destroy the transport
+ // because the final answer may deactivate it.
+ ret = rtcp_mux_negotiator_.SetProvisionalAnswer(enable, source);
+ break;
+ case SdpType::kAnswer:
+ ret = rtcp_mux_negotiator_.SetAnswer(enable, source);
+ if (ret && rtcp_mux_negotiator_.IsActive()) {
+ ActivateRtcpMux();
+ }
+ break;
+ default:
+ RTC_DCHECK_NOTREACHED();
+ }
+
+ if (!ret) {
+ return false;
+ }
+
+ auto transport = rtp_transport();
+ transport->SetRtcpMuxEnabled(rtcp_mux_negotiator_.IsActive());
+ return ret;
+}
+
+void JsepTransport::ActivateRtcpMux() {
+ if (unencrypted_rtp_transport_) {
+ RTC_DCHECK(!sdes_transport_);
+ RTC_DCHECK(!dtls_srtp_transport_);
+ unencrypted_rtp_transport_->SetRtcpPacketTransport(nullptr);
+ } else if (sdes_transport_) {
+ RTC_DCHECK(!unencrypted_rtp_transport_);
+ RTC_DCHECK(!dtls_srtp_transport_);
+ sdes_transport_->SetRtcpPacketTransport(nullptr);
+ } else if (dtls_srtp_transport_) {
+ RTC_DCHECK(dtls_srtp_transport_);
+ RTC_DCHECK(!unencrypted_rtp_transport_);
+ RTC_DCHECK(!sdes_transport_);
+ dtls_srtp_transport_->SetDtlsTransports(rtp_dtls_transport(),
+ /*rtcp_dtls_transport=*/nullptr);
+ }
+ rtcp_dtls_transport_ = nullptr; // Destroy this reference.
+ // Notify the JsepTransportController to update the aggregate states.
+ rtcp_mux_active_callback_();
+}
+
+bool JsepTransport::SetSdes(const std::vector<CryptoParams>& cryptos,
+ const std::vector<int>& encrypted_extension_ids,
+ webrtc::SdpType type,
+ ContentSource source) {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ bool ret = false;
+ ret = sdes_negotiator_.Process(cryptos, type, source);
+ if (!ret) {
+ return ret;
+ }
+
+ if (source == ContentSource::CS_LOCAL) {
+ recv_extension_ids_ = encrypted_extension_ids;
+ } else {
+ send_extension_ids_ = encrypted_extension_ids;
+ }
+
+ // If setting an SDES answer succeeded, apply the negotiated parameters
+ // to the SRTP transport.
+ if ((type == SdpType::kPrAnswer || type == SdpType::kAnswer) && ret) {
+ if (sdes_negotiator_.send_crypto_suite() &&
+ sdes_negotiator_.recv_crypto_suite()) {
+ RTC_DCHECK(send_extension_ids_);
+ RTC_DCHECK(recv_extension_ids_);
+ ret = sdes_transport_->SetRtpParams(
+ *(sdes_negotiator_.send_crypto_suite()),
+ sdes_negotiator_.send_key().data(),
+ static_cast<int>(sdes_negotiator_.send_key().size()),
+ *(send_extension_ids_), *(sdes_negotiator_.recv_crypto_suite()),
+ sdes_negotiator_.recv_key().data(),
+ static_cast<int>(sdes_negotiator_.recv_key().size()),
+ *(recv_extension_ids_));
+ } else {
+ RTC_LOG(LS_INFO) << "No crypto keys are provided for SDES.";
+ if (type == SdpType::kAnswer) {
+ // Explicitly reset the `sdes_transport_` if no crypto param is
+ // provided in the answer. No need to call `ResetParams()` for
+ // `sdes_negotiator_` because it resets the params inside `SetAnswer`.
+ sdes_transport_->ResetParams();
+ }
+ }
+ }
+ return ret;
+}
+
+webrtc::RTCError JsepTransport::NegotiateAndSetDtlsParameters(
+ SdpType local_description_type) {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ if (!local_description_ || !remote_description_) {
+ return webrtc::RTCError(webrtc::RTCErrorType::INVALID_STATE,
+ "Applying an answer transport description "
+ "without applying any offer.");
+ }
+ std::unique_ptr<rtc::SSLFingerprint> remote_fingerprint;
+ absl::optional<rtc::SSLRole> negotiated_dtls_role;
+
+ rtc::SSLFingerprint* local_fp =
+ local_description_->transport_desc.identity_fingerprint.get();
+ rtc::SSLFingerprint* remote_fp =
+ remote_description_->transport_desc.identity_fingerprint.get();
+ if (remote_fp && local_fp) {
+ remote_fingerprint = std::make_unique<rtc::SSLFingerprint>(*remote_fp);
+ webrtc::RTCError error =
+ NegotiateDtlsRole(local_description_type,
+ local_description_->transport_desc.connection_role,
+ remote_description_->transport_desc.connection_role,
+ &negotiated_dtls_role);
+ if (!error.ok()) {
+ return error;
+ }
+ } else if (local_fp && (local_description_type == SdpType::kAnswer)) {
+ return webrtc::RTCError(
+ webrtc::RTCErrorType::INVALID_PARAMETER,
+ "Local fingerprint supplied when caller didn't offer DTLS.");
+ } else {
+ // We are not doing DTLS
+ remote_fingerprint = std::make_unique<rtc::SSLFingerprint>(
+ "", rtc::ArrayView<const uint8_t>());
+ }
+ // Now that we have negotiated everything, push it downward.
+ // Note that we cache the result so that if we have race conditions
+ // between future SetRemote/SetLocal invocations and new transport
+ // creation, we have the negotiation state saved until a new
+ // negotiation happens.
+ RTC_DCHECK(rtp_dtls_transport());
+ webrtc::RTCError error = SetNegotiatedDtlsParameters(
+ rtp_dtls_transport(), negotiated_dtls_role, remote_fingerprint.get());
+ if (!error.ok()) {
+ return error;
+ }
+
+ if (rtcp_dtls_transport()) {
+ error = SetNegotiatedDtlsParameters(
+ rtcp_dtls_transport(), negotiated_dtls_role, remote_fingerprint.get());
+ }
+ return error;
+}
+
+webrtc::RTCError JsepTransport::NegotiateDtlsRole(
+ SdpType local_description_type,
+ ConnectionRole local_connection_role,
+ ConnectionRole remote_connection_role,
+ absl::optional<rtc::SSLRole>* negotiated_dtls_role) {
+ // From RFC 4145, section-4.1, The following are the values that the
+ // 'setup' attribute can take in an offer/answer exchange:
+ // Offer Answer
+ // ________________
+ // active passive / holdconn
+ // passive active / holdconn
+ // actpass active / passive / holdconn
+ // holdconn holdconn
+ //
+ // Set the role that is most conformant with RFC 5763, Section 5, bullet 1
+ // The endpoint MUST use the setup attribute defined in [RFC4145].
+ // The endpoint that is the offerer MUST use the setup attribute
+ // value of setup:actpass and be prepared to receive a client_hello
+ // before it receives the answer. The answerer MUST use either a
+ // setup attribute value of setup:active or setup:passive. Note that
+ // if the answerer uses setup:passive, then the DTLS handshake will
+ // not begin until the answerer is received, which adds additional
+ // latency. setup:active allows the answer and the DTLS handshake to
+ // occur in parallel. Thus, setup:active is RECOMMENDED. Whichever
+ // party is active MUST initiate a DTLS handshake by sending a
+ // ClientHello over each flow (host/port quartet).
+ // IOW - actpass and passive modes should be treated as server and
+ // active as client.
+ // RFC 8842 section 5.3 updates this text, so that it is mandated
+ // for the responder to handle offers with "active" and "passive"
+ // as well as "actpass"
+ bool is_remote_server = false;
+ if (local_description_type == SdpType::kOffer) {
+ if (local_connection_role != CONNECTIONROLE_ACTPASS) {
+ return webrtc::RTCError(
+ webrtc::RTCErrorType::INVALID_PARAMETER,
+ "Offerer must use actpass value for setup attribute.");
+ }
+
+ if (remote_connection_role == CONNECTIONROLE_ACTIVE ||
+ remote_connection_role == CONNECTIONROLE_PASSIVE ||
+ remote_connection_role == CONNECTIONROLE_NONE) {
+ is_remote_server = (remote_connection_role == CONNECTIONROLE_PASSIVE);
+ } else {
+ return webrtc::RTCError(
+ webrtc::RTCErrorType::INVALID_PARAMETER,
+ "Answerer must use either active or passive value "
+ "for setup attribute.");
+ }
+ // If remote is NONE or ACTIVE it will act as client.
+ } else {
+ if (remote_connection_role != CONNECTIONROLE_ACTPASS &&
+ remote_connection_role != CONNECTIONROLE_NONE) {
+ // Accept a remote role attribute that's not "actpass", but matches the
+ // current negotiated role. This is allowed by dtls-sdp, though our
+ // implementation will never generate such an offer as it's not
+ // recommended.
+ //
+ // See https://datatracker.ietf.org/doc/html/draft-ietf-mmusic-dtls-sdp,
+ // section 5.5.
+ auto current_dtls_role = GetDtlsRole();
+ if (!current_dtls_role) {
+ // Role not assigned yet. Verify that local role fits with remote role.
+ switch (remote_connection_role) {
+ case CONNECTIONROLE_ACTIVE:
+ if (local_connection_role != CONNECTIONROLE_PASSIVE) {
+ return webrtc::RTCError(
+ webrtc::RTCErrorType::INVALID_PARAMETER,
+ "Answerer must be passive when offerer is active");
+ }
+ break;
+ case CONNECTIONROLE_PASSIVE:
+ if (local_connection_role != CONNECTIONROLE_ACTIVE) {
+ return webrtc::RTCError(
+ webrtc::RTCErrorType::INVALID_PARAMETER,
+ "Answerer must be active when offerer is passive");
+ }
+ break;
+ default:
+ RTC_DCHECK_NOTREACHED();
+ break;
+ }
+ } else {
+ if ((*current_dtls_role == rtc::SSL_CLIENT &&
+ remote_connection_role == CONNECTIONROLE_ACTIVE) ||
+ (*current_dtls_role == rtc::SSL_SERVER &&
+ remote_connection_role == CONNECTIONROLE_PASSIVE)) {
+ return webrtc::RTCError(
+ webrtc::RTCErrorType::INVALID_PARAMETER,
+ "Offerer must use current negotiated role for "
+ "setup attribute.");
+ }
+ }
+ }
+
+ if (local_connection_role == CONNECTIONROLE_ACTIVE ||
+ local_connection_role == CONNECTIONROLE_PASSIVE) {
+ is_remote_server = (local_connection_role == CONNECTIONROLE_ACTIVE);
+ } else {
+ return webrtc::RTCError(
+ webrtc::RTCErrorType::INVALID_PARAMETER,
+ "Answerer must use either active or passive value "
+ "for setup attribute.");
+ }
+
+ // If local is passive, local will act as server.
+ }
+
+ *negotiated_dtls_role =
+ (is_remote_server ? rtc::SSL_CLIENT : rtc::SSL_SERVER);
+ return webrtc::RTCError::OK();
+}
+
+bool JsepTransport::GetTransportStats(DtlsTransportInternal* dtls_transport,
+ int component,
+ TransportStats* stats) {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ RTC_DCHECK(dtls_transport);
+ TransportChannelStats substats;
+ substats.component = component;
+ dtls_transport->GetSslVersionBytes(&substats.ssl_version_bytes);
+ dtls_transport->GetSrtpCryptoSuite(&substats.srtp_crypto_suite);
+ dtls_transport->GetSslCipherSuite(&substats.ssl_cipher_suite);
+ substats.dtls_state = dtls_transport->dtls_state();
+ rtc::SSLRole dtls_role;
+ if (dtls_transport->GetDtlsRole(&dtls_role)) {
+ substats.dtls_role = dtls_role;
+ }
+ if (!dtls_transport->ice_transport()->GetStats(
+ &substats.ice_transport_stats)) {
+ return false;
+ }
+ substats.ssl_peer_signature_algorithm =
+ dtls_transport->GetSslPeerSignatureAlgorithm();
+ stats->channel_stats.push_back(substats);
+ return true;
+}
+
+} // namespace cricket
diff --git a/third_party/libwebrtc/pc/jsep_transport.h b/third_party/libwebrtc/pc/jsep_transport.h
new file mode 100644
index 0000000000..f2643070a1
--- /dev/null
+++ b/third_party/libwebrtc/pc/jsep_transport.h
@@ -0,0 +1,330 @@
+/*
+ * Copyright 2018 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_JSEP_TRANSPORT_H_
+#define PC_JSEP_TRANSPORT_H_
+
+#include <functional>
+#include <map>
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "api/candidate.h"
+#include "api/crypto_params.h"
+#include "api/ice_transport_interface.h"
+#include "api/jsep.h"
+#include "api/rtc_error.h"
+#include "api/scoped_refptr.h"
+#include "api/sequence_checker.h"
+#include "api/transport/data_channel_transport_interface.h"
+#include "media/sctp/sctp_transport_internal.h"
+#include "p2p/base/dtls_transport.h"
+#include "p2p/base/dtls_transport_internal.h"
+#include "p2p/base/ice_transport_internal.h"
+#include "p2p/base/p2p_constants.h"
+#include "p2p/base/transport_description.h"
+#include "p2p/base/transport_info.h"
+#include "pc/dtls_srtp_transport.h"
+#include "pc/dtls_transport.h"
+#include "pc/rtcp_mux_filter.h"
+#include "pc/rtp_transport.h"
+#include "pc/rtp_transport_internal.h"
+#include "pc/sctp_transport.h"
+#include "pc/session_description.h"
+#include "pc/srtp_filter.h"
+#include "pc/srtp_transport.h"
+#include "pc/transport_stats.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/rtc_certificate.h"
+#include "rtc_base/ssl_fingerprint.h"
+#include "rtc_base/ssl_stream_adapter.h"
+#include "rtc_base/thread.h"
+#include "rtc_base/thread_annotations.h"
+
+namespace cricket {
+
+class DtlsTransportInternal;
+
+struct JsepTransportDescription {
+ public:
+ JsepTransportDescription();
+ JsepTransportDescription(
+ bool rtcp_mux_enabled,
+ const std::vector<CryptoParams>& cryptos,
+ const std::vector<int>& encrypted_header_extension_ids,
+ int rtp_abs_sendtime_extn_id,
+ const TransportDescription& transport_description);
+ JsepTransportDescription(const JsepTransportDescription& from);
+ ~JsepTransportDescription();
+
+ JsepTransportDescription& operator=(const JsepTransportDescription& from);
+
+ bool rtcp_mux_enabled = true;
+ std::vector<CryptoParams> cryptos;
+ std::vector<int> encrypted_header_extension_ids;
+ int rtp_abs_sendtime_extn_id = -1;
+ // TODO(zhihuang): Add the ICE and DTLS related variables and methods from
+ // TransportDescription and remove this extra layer of abstraction.
+ TransportDescription transport_desc;
+};
+
+// Helper class used by JsepTransportController that processes
+// TransportDescriptions. A TransportDescription represents the
+// transport-specific properties of an SDP m= section, processed according to
+// JSEP. Each transport consists of DTLS and ICE transport channels for RTP
+// (and possibly RTCP, if rtcp-mux isn't used).
+//
+// On Threading: JsepTransport performs work solely on the network thread, and
+// so its methods should only be called on the network thread.
+class JsepTransport {
+ public:
+ // `mid` is just used for log statements in order to identify the Transport.
+ // Note that `local_certificate` is allowed to be null since a remote
+ // description may be set before a local certificate is generated.
+ JsepTransport(
+ const std::string& mid,
+ const rtc::scoped_refptr<rtc::RTCCertificate>& local_certificate,
+ rtc::scoped_refptr<webrtc::IceTransportInterface> ice_transport,
+ rtc::scoped_refptr<webrtc::IceTransportInterface> rtcp_ice_transport,
+ std::unique_ptr<webrtc::RtpTransport> unencrypted_rtp_transport,
+ std::unique_ptr<webrtc::SrtpTransport> sdes_transport,
+ std::unique_ptr<webrtc::DtlsSrtpTransport> dtls_srtp_transport,
+ std::unique_ptr<DtlsTransportInternal> rtp_dtls_transport,
+ std::unique_ptr<DtlsTransportInternal> rtcp_dtls_transport,
+ std::unique_ptr<SctpTransportInternal> sctp_transport,
+ std::function<void()> rtcp_mux_active_callback);
+
+ ~JsepTransport();
+
+ JsepTransport(const JsepTransport&) = delete;
+ JsepTransport& operator=(const JsepTransport&) = delete;
+
+ // Returns the MID of this transport. This is only used for logging.
+ const std::string& mid() const { return mid_; }
+
+ // Must be called before applying local session description.
+ // Needed in order to verify the local fingerprint.
+ void SetLocalCertificate(
+ const rtc::scoped_refptr<rtc::RTCCertificate>& local_certificate) {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ local_certificate_ = local_certificate;
+ }
+
+ // Return the local certificate provided by SetLocalCertificate.
+ rtc::scoped_refptr<rtc::RTCCertificate> GetLocalCertificate() const {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ return local_certificate_;
+ }
+
+ webrtc::RTCError SetLocalJsepTransportDescription(
+ const JsepTransportDescription& jsep_description,
+ webrtc::SdpType type);
+
+ // Set the remote TransportDescription to be used by DTLS and ICE channels
+ // that are part of this Transport.
+ webrtc::RTCError SetRemoteJsepTransportDescription(
+ const JsepTransportDescription& jsep_description,
+ webrtc::SdpType type);
+ webrtc::RTCError AddRemoteCandidates(const Candidates& candidates);
+
+ // Set the "needs-ice-restart" flag as described in JSEP. After the flag is
+ // set, offers should generate new ufrags/passwords until an ICE restart
+ // occurs.
+ //
+ // This and `needs_ice_restart()` must be called on the network thread.
+ void SetNeedsIceRestartFlag();
+
+ // Returns true if the ICE restart flag above was set, and no ICE restart has
+ // occurred yet for this transport (by applying a local description with
+ // changed ufrag/password).
+ bool needs_ice_restart() const {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ return needs_ice_restart_;
+ }
+
+ // Returns role if negotiated, or empty absl::optional if it hasn't been
+ // negotiated yet.
+ absl::optional<rtc::SSLRole> GetDtlsRole() const;
+
+ // TODO(deadbeef): Make this const. See comment in transportcontroller.h.
+ bool GetStats(TransportStats* stats);
+
+ const JsepTransportDescription* local_description() const {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ return local_description_.get();
+ }
+
+ const JsepTransportDescription* remote_description() const {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ return remote_description_.get();
+ }
+
+ // Returns the rtp transport, if any.
+ webrtc::RtpTransportInternal* rtp_transport() const {
+ if (dtls_srtp_transport_) {
+ return dtls_srtp_transport_.get();
+ }
+ if (sdes_transport_) {
+ return sdes_transport_.get();
+ }
+ if (unencrypted_rtp_transport_) {
+ return unencrypted_rtp_transport_.get();
+ }
+ return nullptr;
+ }
+
+ const DtlsTransportInternal* rtp_dtls_transport() const {
+ if (rtp_dtls_transport_) {
+ return rtp_dtls_transport_->internal();
+ }
+ return nullptr;
+ }
+
+ DtlsTransportInternal* rtp_dtls_transport() {
+ if (rtp_dtls_transport_) {
+ return rtp_dtls_transport_->internal();
+ }
+ return nullptr;
+ }
+
+ const DtlsTransportInternal* rtcp_dtls_transport() const {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ if (rtcp_dtls_transport_) {
+ return rtcp_dtls_transport_->internal();
+ }
+ return nullptr;
+ }
+
+ DtlsTransportInternal* rtcp_dtls_transport() {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ if (rtcp_dtls_transport_) {
+ return rtcp_dtls_transport_->internal();
+ }
+ return nullptr;
+ }
+
+ rtc::scoped_refptr<webrtc::DtlsTransport> RtpDtlsTransport() {
+ return rtp_dtls_transport_;
+ }
+
+ rtc::scoped_refptr<webrtc::SctpTransport> SctpTransport() const {
+ return sctp_transport_;
+ }
+
+ // TODO(bugs.webrtc.org/9719): Delete method, update callers to use
+ // SctpTransport() instead.
+ webrtc::DataChannelTransportInterface* data_channel_transport() const {
+ return sctp_transport_.get();
+ }
+
+ // TODO(deadbeef): The methods below are only public for testing. Should make
+ // them utility functions or objects so they can be tested independently from
+ // this class.
+
+ // Returns an error if the certificate's identity does not match the
+ // fingerprint, or either is NULL.
+ webrtc::RTCError VerifyCertificateFingerprint(
+ const rtc::RTCCertificate* certificate,
+ const rtc::SSLFingerprint* fingerprint) const;
+
+ void SetActiveResetSrtpParams(bool active_reset_srtp_params);
+
+ private:
+ bool SetRtcpMux(bool enable, webrtc::SdpType type, ContentSource source);
+
+ void ActivateRtcpMux() RTC_RUN_ON(network_thread_);
+
+ bool SetSdes(const std::vector<CryptoParams>& cryptos,
+ const std::vector<int>& encrypted_extension_ids,
+ webrtc::SdpType type,
+ ContentSource source);
+
+ // Negotiates and sets the DTLS parameters based on the current local and
+ // remote transport description, such as the DTLS role to use, and whether
+ // DTLS should be activated.
+ //
+ // Called when an answer TransportDescription is applied.
+ webrtc::RTCError NegotiateAndSetDtlsParameters(
+ webrtc::SdpType local_description_type);
+
+ // Negotiates the DTLS role based off the offer and answer as specified by
+ // RFC 4145, section-4.1. Returns an RTCError if role cannot be determined
+ // from the local description and remote description.
+ webrtc::RTCError NegotiateDtlsRole(
+ webrtc::SdpType local_description_type,
+ ConnectionRole local_connection_role,
+ ConnectionRole remote_connection_role,
+ absl::optional<rtc::SSLRole>* negotiated_dtls_role);
+
+ // Pushes down the ICE parameters from the remote description.
+ void SetRemoteIceParameters(const IceParameters& ice_parameters,
+ IceTransportInternal* ice);
+
+ // Pushes down the DTLS parameters obtained via negotiation.
+ static webrtc::RTCError SetNegotiatedDtlsParameters(
+ DtlsTransportInternal* dtls_transport,
+ absl::optional<rtc::SSLRole> dtls_role,
+ rtc::SSLFingerprint* remote_fingerprint);
+
+ bool GetTransportStats(DtlsTransportInternal* dtls_transport,
+ int component,
+ TransportStats* stats);
+
+ // Owning thread, for safety checks
+ const rtc::Thread* const network_thread_;
+ const std::string mid_;
+ // needs-ice-restart bit as described in JSEP.
+ bool needs_ice_restart_ RTC_GUARDED_BY(network_thread_) = false;
+ rtc::scoped_refptr<rtc::RTCCertificate> local_certificate_
+ RTC_GUARDED_BY(network_thread_);
+ std::unique_ptr<JsepTransportDescription> local_description_
+ RTC_GUARDED_BY(network_thread_);
+ std::unique_ptr<JsepTransportDescription> remote_description_
+ RTC_GUARDED_BY(network_thread_);
+
+ // Ice transport which may be used by any of upper-layer transports (below).
+ // Owned by JsepTransport and guaranteed to outlive the transports below.
+ const rtc::scoped_refptr<webrtc::IceTransportInterface> ice_transport_;
+ const rtc::scoped_refptr<webrtc::IceTransportInterface> rtcp_ice_transport_;
+
+ // To avoid downcasting and make it type safe, keep three unique pointers for
+ // different SRTP mode and only one of these is non-nullptr.
+ const std::unique_ptr<webrtc::RtpTransport> unencrypted_rtp_transport_;
+ const std::unique_ptr<webrtc::SrtpTransport> sdes_transport_;
+ const std::unique_ptr<webrtc::DtlsSrtpTransport> dtls_srtp_transport_;
+
+ const rtc::scoped_refptr<webrtc::DtlsTransport> rtp_dtls_transport_;
+ // The RTCP transport is const for all usages, except that it is cleared
+ // when RTCP multiplexing is turned on; this happens on the network thread.
+ rtc::scoped_refptr<webrtc::DtlsTransport> rtcp_dtls_transport_
+ RTC_GUARDED_BY(network_thread_);
+
+ const rtc::scoped_refptr<webrtc::SctpTransport> sctp_transport_;
+
+ SrtpFilter sdes_negotiator_ RTC_GUARDED_BY(network_thread_);
+ RtcpMuxFilter rtcp_mux_negotiator_ RTC_GUARDED_BY(network_thread_);
+
+ // Cache the encrypted header extension IDs for SDES negoitation.
+ absl::optional<std::vector<int>> send_extension_ids_
+ RTC_GUARDED_BY(network_thread_);
+ absl::optional<std::vector<int>> recv_extension_ids_
+ RTC_GUARDED_BY(network_thread_);
+
+ // This is invoked when RTCP-mux becomes active and
+ // `rtcp_dtls_transport_` is destroyed. The JsepTransportController will
+ // receive the callback and update the aggregate transport states.
+ std::function<void()> rtcp_mux_active_callback_;
+};
+
+} // namespace cricket
+
+#endif // PC_JSEP_TRANSPORT_H_
diff --git a/third_party/libwebrtc/pc/jsep_transport_collection.cc b/third_party/libwebrtc/pc/jsep_transport_collection.cc
new file mode 100644
index 0000000000..b50d303d77
--- /dev/null
+++ b/third_party/libwebrtc/pc/jsep_transport_collection.cc
@@ -0,0 +1,390 @@
+/*
+ * Copyright 2021 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/jsep_transport_collection.h"
+
+#include <algorithm>
+#include <map>
+#include <set>
+#include <type_traits>
+#include <utility>
+
+#include "p2p/base/p2p_constants.h"
+#include "rtc_base/logging.h"
+
+namespace webrtc {
+
+void BundleManager::Update(const cricket::SessionDescription* description,
+ SdpType type) {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ // Rollbacks should call Rollback, not Update.
+ RTC_DCHECK(type != SdpType::kRollback);
+ bool bundle_groups_changed = false;
+ // TODO(bugs.webrtc.org/3349): Do this for kPrAnswer as well. To make this
+ // work, we also need to make sure PRANSWERs don't call
+ // MaybeDestroyJsepTransport, because the final answer may need the destroyed
+ // transport if it changes the BUNDLE group.
+ if (bundle_policy_ == PeerConnectionInterface::kBundlePolicyMaxBundle ||
+ type == SdpType::kAnswer) {
+ // If our policy is "max-bundle" or this is an answer, update all bundle
+ // groups.
+ bundle_groups_changed = true;
+ bundle_groups_.clear();
+ for (const cricket::ContentGroup* new_bundle_group :
+ description->GetGroupsByName(cricket::GROUP_TYPE_BUNDLE)) {
+ bundle_groups_.push_back(
+ std::make_unique<cricket::ContentGroup>(*new_bundle_group));
+ RTC_DLOG(LS_VERBOSE) << "Establishing bundle group "
+ << new_bundle_group->ToString();
+ }
+ } else if (type == SdpType::kOffer) {
+ // If this is an offer, update existing bundle groups.
+ // We do this because as per RFC 8843, section 7.3.2, the answerer cannot
+ // remove an m= section from an existing BUNDLE group without rejecting it.
+ // Thus any m= sections added to a BUNDLE group in this offer can
+ // preemptively start using the bundled transport, as there is no possible
+ // non-bundled fallback.
+ for (const cricket::ContentGroup* new_bundle_group :
+ description->GetGroupsByName(cricket::GROUP_TYPE_BUNDLE)) {
+ // Attempt to find a matching existing group.
+ for (const std::string& mid : new_bundle_group->content_names()) {
+ auto it = established_bundle_groups_by_mid_.find(mid);
+ if (it != established_bundle_groups_by_mid_.end()) {
+ *it->second = *new_bundle_group;
+ bundle_groups_changed = true;
+ RTC_DLOG(LS_VERBOSE)
+ << "Establishing bundle group " << new_bundle_group->ToString();
+ break;
+ }
+ }
+ }
+ }
+ if (bundle_groups_changed) {
+ RefreshEstablishedBundleGroupsByMid();
+ }
+}
+
+const cricket::ContentGroup* BundleManager::LookupGroupByMid(
+ const std::string& mid) const {
+ auto it = established_bundle_groups_by_mid_.find(mid);
+ return it != established_bundle_groups_by_mid_.end() ? it->second : nullptr;
+}
+bool BundleManager::IsFirstMidInGroup(const std::string& mid) const {
+ auto group = LookupGroupByMid(mid);
+ if (!group) {
+ return true; // Unbundled MIDs are considered group leaders
+ }
+ return mid == *(group->FirstContentName());
+}
+
+cricket::ContentGroup* BundleManager::LookupGroupByMid(const std::string& mid) {
+ auto it = established_bundle_groups_by_mid_.find(mid);
+ return it != established_bundle_groups_by_mid_.end() ? it->second : nullptr;
+}
+
+void BundleManager::DeleteMid(const cricket::ContentGroup* bundle_group,
+ const std::string& mid) {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ RTC_LOG(LS_VERBOSE) << "Deleting mid " << mid << " from bundle group "
+ << bundle_group->ToString();
+ // Remove the rejected content from the `bundle_group`.
+ // The const pointer arg is used to identify the group, we verify
+ // it before we use it to make a modification.
+ auto bundle_group_it = std::find_if(
+ bundle_groups_.begin(), bundle_groups_.end(),
+ [bundle_group](std::unique_ptr<cricket::ContentGroup>& group) {
+ return bundle_group == group.get();
+ });
+ RTC_DCHECK(bundle_group_it != bundle_groups_.end());
+ (*bundle_group_it)->RemoveContentName(mid);
+ established_bundle_groups_by_mid_.erase(
+ established_bundle_groups_by_mid_.find(mid));
+}
+
+void BundleManager::DeleteGroup(const cricket::ContentGroup* bundle_group) {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ RTC_DLOG(LS_VERBOSE) << "Deleting bundle group " << bundle_group->ToString();
+
+ auto bundle_group_it = std::find_if(
+ bundle_groups_.begin(), bundle_groups_.end(),
+ [bundle_group](std::unique_ptr<cricket::ContentGroup>& group) {
+ return bundle_group == group.get();
+ });
+ RTC_DCHECK(bundle_group_it != bundle_groups_.end());
+ auto mid_list = (*bundle_group_it)->content_names();
+ for (const auto& content_name : mid_list) {
+ DeleteMid(bundle_group, content_name);
+ }
+ bundle_groups_.erase(bundle_group_it);
+}
+
+void BundleManager::Rollback() {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ bundle_groups_.clear();
+ for (const auto& bundle_group : stable_bundle_groups_) {
+ bundle_groups_.push_back(
+ std::make_unique<cricket::ContentGroup>(*bundle_group));
+ }
+ RefreshEstablishedBundleGroupsByMid();
+}
+
+void BundleManager::Commit() {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ stable_bundle_groups_.clear();
+ for (const auto& bundle_group : bundle_groups_) {
+ stable_bundle_groups_.push_back(
+ std::make_unique<cricket::ContentGroup>(*bundle_group));
+ }
+}
+
+void BundleManager::RefreshEstablishedBundleGroupsByMid() {
+ established_bundle_groups_by_mid_.clear();
+ for (const auto& bundle_group : bundle_groups_) {
+ for (const std::string& content_name : bundle_group->content_names()) {
+ established_bundle_groups_by_mid_[content_name] = bundle_group.get();
+ }
+ }
+}
+
+void JsepTransportCollection::RegisterTransport(
+ const std::string& mid,
+ std::unique_ptr<cricket::JsepTransport> transport) {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ SetTransportForMid(mid, transport.get());
+ jsep_transports_by_name_[mid] = std::move(transport);
+ RTC_DCHECK(IsConsistent());
+}
+
+std::vector<cricket::JsepTransport*> JsepTransportCollection::Transports() {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ std::vector<cricket::JsepTransport*> result;
+ for (auto& kv : jsep_transports_by_name_) {
+ result.push_back(kv.second.get());
+ }
+ return result;
+}
+
+std::vector<cricket::JsepTransport*>
+JsepTransportCollection::ActiveTransports() {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ std::set<cricket::JsepTransport*> transports;
+ for (const auto& kv : mid_to_transport_) {
+ transports.insert(kv.second);
+ }
+ return std::vector<cricket::JsepTransport*>(transports.begin(),
+ transports.end());
+}
+
+void JsepTransportCollection::DestroyAllTransports() {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ for (const auto& jsep_transport : jsep_transports_by_name_) {
+ map_change_callback_(jsep_transport.first, nullptr);
+ }
+ jsep_transports_by_name_.clear();
+ RTC_DCHECK(IsConsistent());
+}
+
+const cricket::JsepTransport* JsepTransportCollection::GetTransportByName(
+ const std::string& transport_name) const {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ auto it = jsep_transports_by_name_.find(transport_name);
+ return (it == jsep_transports_by_name_.end()) ? nullptr : it->second.get();
+}
+
+cricket::JsepTransport* JsepTransportCollection::GetTransportByName(
+ const std::string& transport_name) {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ auto it = jsep_transports_by_name_.find(transport_name);
+ return (it == jsep_transports_by_name_.end()) ? nullptr : it->second.get();
+}
+
+cricket::JsepTransport* JsepTransportCollection::GetTransportForMid(
+ const std::string& mid) {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ auto it = mid_to_transport_.find(mid);
+ return it == mid_to_transport_.end() ? nullptr : it->second;
+}
+
+const cricket::JsepTransport* JsepTransportCollection::GetTransportForMid(
+ const std::string& mid) const {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ auto it = mid_to_transport_.find(mid);
+ return it == mid_to_transport_.end() ? nullptr : it->second;
+}
+
+cricket::JsepTransport* JsepTransportCollection::GetTransportForMid(
+ absl::string_view mid) {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ // TODO(hta): should be a better way.
+ auto it = mid_to_transport_.find(std::string(mid));
+ return it == mid_to_transport_.end() ? nullptr : it->second;
+}
+
+const cricket::JsepTransport* JsepTransportCollection::GetTransportForMid(
+ absl::string_view mid) const {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ // TODO(hta): Should be a better way
+ auto it = mid_to_transport_.find(std::string(mid));
+ return it == mid_to_transport_.end() ? nullptr : it->second;
+}
+
+bool JsepTransportCollection::SetTransportForMid(
+ const std::string& mid,
+ cricket::JsepTransport* jsep_transport) {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ RTC_DCHECK(jsep_transport);
+
+ auto it = mid_to_transport_.find(mid);
+ if (it != mid_to_transport_.end() && it->second == jsep_transport)
+ return true;
+
+ // The map_change_callback must be called before destroying the
+ // transport, because it removes references to the transport
+ // in the RTP demuxer.
+ bool result = map_change_callback_(mid, jsep_transport);
+
+ if (it == mid_to_transport_.end()) {
+ mid_to_transport_.insert(std::make_pair(mid, jsep_transport));
+ } else {
+ auto old_transport = it->second;
+ it->second = jsep_transport;
+ MaybeDestroyJsepTransport(old_transport);
+ }
+ RTC_DCHECK(IsConsistent());
+ return result;
+}
+
+void JsepTransportCollection::RemoveTransportForMid(const std::string& mid) {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ RTC_DCHECK(IsConsistent());
+ bool ret = map_change_callback_(mid, nullptr);
+ // Calling OnTransportChanged with nullptr should always succeed, since it is
+ // only expected to fail when adding media to a transport (not removing).
+ RTC_DCHECK(ret);
+
+ auto old_transport = GetTransportForMid(mid);
+ if (old_transport) {
+ mid_to_transport_.erase(mid);
+ MaybeDestroyJsepTransport(old_transport);
+ }
+ RTC_DCHECK(IsConsistent());
+}
+
+bool JsepTransportCollection::RollbackTransports() {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ bool ret = true;
+ // First, remove any new mid->transport mappings.
+ for (const auto& kv : mid_to_transport_) {
+ if (stable_mid_to_transport_.count(kv.first) == 0) {
+ ret = ret && map_change_callback_(kv.first, nullptr);
+ }
+ }
+ // Next, restore old mappings.
+ for (const auto& kv : stable_mid_to_transport_) {
+ auto it = mid_to_transport_.find(kv.first);
+ if (it == mid_to_transport_.end() || it->second != kv.second) {
+ ret = ret && map_change_callback_(kv.first, kv.second);
+ }
+ }
+ mid_to_transport_ = stable_mid_to_transport_;
+ // Moving a transport back to mid_to_transport_ means it's now included in
+ // the aggregate state if it wasn't previously.
+ state_change_callback_();
+ DestroyUnusedTransports();
+ RTC_DCHECK(IsConsistent());
+ return ret;
+}
+
+void JsepTransportCollection::CommitTransports() {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ stable_mid_to_transport_ = mid_to_transport_;
+ DestroyUnusedTransports();
+ RTC_DCHECK(IsConsistent());
+}
+
+bool JsepTransportCollection::TransportInUse(
+ cricket::JsepTransport* jsep_transport) const {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ for (const auto& kv : mid_to_transport_) {
+ if (kv.second == jsep_transport) {
+ return true;
+ }
+ }
+ return false;
+}
+
+bool JsepTransportCollection::TransportNeededForRollback(
+ cricket::JsepTransport* jsep_transport) const {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ for (const auto& kv : stable_mid_to_transport_) {
+ if (kv.second == jsep_transport) {
+ return true;
+ }
+ }
+ return false;
+}
+
+void JsepTransportCollection::MaybeDestroyJsepTransport(
+ cricket::JsepTransport* transport) {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ // Don't destroy the JsepTransport if there are still media sections referring
+ // to it, or if it will be needed in case of rollback.
+ if (TransportInUse(transport)) {
+ return;
+ }
+ // If this transport is needed for rollback, don't destroy it yet, but make
+ // sure the aggregate state is updated since this transport is no longer
+ // included in it.
+ if (TransportNeededForRollback(transport)) {
+ state_change_callback_();
+ return;
+ }
+ for (const auto& it : jsep_transports_by_name_) {
+ if (it.second.get() == transport) {
+ jsep_transports_by_name_.erase(it.first);
+ state_change_callback_();
+ break;
+ }
+ }
+ RTC_DCHECK(IsConsistent());
+}
+
+void JsepTransportCollection::DestroyUnusedTransports() {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ bool need_state_change_callback = false;
+ auto it = jsep_transports_by_name_.begin();
+ while (it != jsep_transports_by_name_.end()) {
+ if (TransportInUse(it->second.get()) ||
+ TransportNeededForRollback(it->second.get())) {
+ ++it;
+ } else {
+ it = jsep_transports_by_name_.erase(it);
+ need_state_change_callback = true;
+ }
+ }
+ if (need_state_change_callback) {
+ state_change_callback_();
+ }
+}
+
+bool JsepTransportCollection::IsConsistent() {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ for (const auto& it : jsep_transports_by_name_) {
+ if (!TransportInUse(it.second.get()) &&
+ !TransportNeededForRollback(it.second.get())) {
+ RTC_LOG(LS_ERROR) << "Transport registered with mid " << it.first
+ << " is not in use, transport " << it.second.get();
+ return false;
+ }
+ }
+ return true;
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/jsep_transport_collection.h b/third_party/libwebrtc/pc/jsep_transport_collection.h
new file mode 100644
index 0000000000..f5eba64e96
--- /dev/null
+++ b/third_party/libwebrtc/pc/jsep_transport_collection.h
@@ -0,0 +1,173 @@
+/*
+ * Copyright 2021 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_JSEP_TRANSPORT_COLLECTION_H_
+#define PC_JSEP_TRANSPORT_COLLECTION_H_
+
+#include <functional>
+#include <map>
+#include <memory>
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "api/jsep.h"
+#include "api/peer_connection_interface.h"
+#include "api/sequence_checker.h"
+#include "pc/jsep_transport.h"
+#include "pc/session_description.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/system/no_unique_address.h"
+#include "rtc_base/thread_annotations.h"
+
+namespace webrtc {
+
+// This class manages information about RFC 8843 BUNDLE bundles
+// in SDP descriptions.
+
+// This is a work-in-progress. Planned steps:
+// 1) Move all Bundle-related data structures from JsepTransport
+// into this class.
+// 2) Move all Bundle-related functions into this class.
+// 3) Move remaining Bundle-related logic into this class.
+// Make data members private.
+// 4) Refine interface to have comprehensible semantics.
+// 5) Add unit tests.
+// 6) Change the logic to do what's right.
+class BundleManager {
+ public:
+ explicit BundleManager(PeerConnectionInterface::BundlePolicy bundle_policy)
+ : bundle_policy_(bundle_policy) {}
+ const std::vector<std::unique_ptr<cricket::ContentGroup>>& bundle_groups()
+ const {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ return bundle_groups_;
+ }
+ // Lookup a bundle group by a member mid name.
+ const cricket::ContentGroup* LookupGroupByMid(const std::string& mid) const;
+ cricket::ContentGroup* LookupGroupByMid(const std::string& mid);
+ // Returns true if the MID is the first item of a group, or if
+ // the MID is not a member of a group.
+ bool IsFirstMidInGroup(const std::string& mid) const;
+ // Update the groups description. This completely replaces the group
+ // description with the one from the SessionDescription.
+ void Update(const cricket::SessionDescription* description, SdpType type);
+ // Delete a MID from the group that contains it.
+ void DeleteMid(const cricket::ContentGroup* bundle_group,
+ const std::string& mid);
+ // Delete a group.
+ void DeleteGroup(const cricket::ContentGroup* bundle_group);
+ // Roll back to previous stable state.
+ void Rollback();
+ // Commit current bundle groups.
+ void Commit();
+
+ private:
+ // Recalculate established_bundle_groups_by_mid_ from bundle_groups_.
+ void RefreshEstablishedBundleGroupsByMid() RTC_RUN_ON(sequence_checker_);
+
+ RTC_NO_UNIQUE_ADDRESS SequenceChecker sequence_checker_{
+ SequenceChecker::kDetached};
+ PeerConnectionInterface::BundlePolicy bundle_policy_;
+ std::vector<std::unique_ptr<cricket::ContentGroup>> bundle_groups_
+ RTC_GUARDED_BY(sequence_checker_);
+ std::vector<std::unique_ptr<cricket::ContentGroup>> stable_bundle_groups_
+ RTC_GUARDED_BY(sequence_checker_);
+ std::map<std::string, cricket::ContentGroup*>
+ established_bundle_groups_by_mid_;
+};
+
+// This class keeps the mapping of MIDs to transports.
+// It is pulled out here because a lot of the code that deals with
+// bundles end up modifying this map, and the two need to be consistent;
+// the managers may merge.
+class JsepTransportCollection {
+ public:
+ JsepTransportCollection(std::function<bool(const std::string& mid,
+ cricket::JsepTransport* transport)>
+ map_change_callback,
+ std::function<void()> state_change_callback)
+ : map_change_callback_(map_change_callback),
+ state_change_callback_(state_change_callback) {}
+
+ void RegisterTransport(const std::string& mid,
+ std::unique_ptr<cricket::JsepTransport> transport);
+ // Returns all transports, including those not currently mapped to any MID
+ // because they're being kept alive in case of rollback.
+ std::vector<cricket::JsepTransport*> Transports();
+ // Only returns transports currently mapped to a MID.
+ std::vector<cricket::JsepTransport*> ActiveTransports();
+ void DestroyAllTransports();
+ // Lookup a JsepTransport by the MID that was used to register it.
+ cricket::JsepTransport* GetTransportByName(const std::string& mid);
+ const cricket::JsepTransport* GetTransportByName(
+ const std::string& mid) const;
+ // Lookup a JsepTransport by any MID that refers to it.
+ cricket::JsepTransport* GetTransportForMid(const std::string& mid);
+ const cricket::JsepTransport* GetTransportForMid(
+ const std::string& mid) const;
+ cricket::JsepTransport* GetTransportForMid(absl::string_view mid);
+ const cricket::JsepTransport* GetTransportForMid(absl::string_view mid) const;
+ // Set transport for a MID. This may destroy a transport if it is no
+ // longer in use.
+ bool SetTransportForMid(const std::string& mid,
+ cricket::JsepTransport* jsep_transport);
+ // Remove a transport for a MID. This may destroy a transport if it is
+ // no longer in use.
+ void RemoveTransportForMid(const std::string& mid);
+ // Roll back to previous stable mid-to-transport mappings.
+ bool RollbackTransports();
+ // Commit pending mid-transport mappings (rollback is no longer possible),
+ // and destroy unused transports because we know now we'll never need them
+ // again.
+ void CommitTransports();
+
+ private:
+ // Returns true if any mid currently maps to this transport.
+ bool TransportInUse(cricket::JsepTransport* jsep_transport) const;
+
+ // Returns true if any mid in the last stable mapping maps to this transport,
+ // meaning it should be kept alive in case of rollback.
+ bool TransportNeededForRollback(cricket::JsepTransport* jsep_transport) const;
+
+ // Destroy a transport if it's no longer in use. This includes whether it
+ // will be needed in case of rollback.
+ void MaybeDestroyJsepTransport(cricket::JsepTransport* transport);
+
+ // Destroys all transports that are no longer in use.
+ void DestroyUnusedTransports();
+
+ bool IsConsistent(); // For testing only: Verify internal structure.
+
+ RTC_NO_UNIQUE_ADDRESS SequenceChecker sequence_checker_{
+ SequenceChecker::kDetached};
+ // This member owns the JSEP transports.
+ std::map<std::string, std::unique_ptr<cricket::JsepTransport>>
+ jsep_transports_by_name_ RTC_GUARDED_BY(sequence_checker_);
+
+ // This keeps track of the mapping between media section
+ // (BaseChannel/SctpTransport) and the JsepTransport underneath.
+ std::map<std::string, cricket::JsepTransport*> mid_to_transport_
+ RTC_GUARDED_BY(sequence_checker_);
+ // A snapshot of mid_to_transport_ at the last stable state. Used for
+ // rollback.
+ std::map<std::string, cricket::JsepTransport*> stable_mid_to_transport_
+ RTC_GUARDED_BY(sequence_checker_);
+ // Callback used to inform subscribers of altered transports.
+ const std::function<bool(const std::string& mid,
+ cricket::JsepTransport* transport)>
+ map_change_callback_;
+ // Callback used to inform subscribers of possibly altered state.
+ const std::function<void()> state_change_callback_;
+};
+
+} // namespace webrtc
+
+#endif // PC_JSEP_TRANSPORT_COLLECTION_H_
diff --git a/third_party/libwebrtc/pc/jsep_transport_controller.cc b/third_party/libwebrtc/pc/jsep_transport_controller.cc
new file mode 100644
index 0000000000..7c669a5ae3
--- /dev/null
+++ b/third_party/libwebrtc/pc/jsep_transport_controller.cc
@@ -0,0 +1,1451 @@
+/*
+ * Copyright 2017 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/jsep_transport_controller.h"
+
+#include <stddef.h>
+
+#include <functional>
+#include <memory>
+#include <string>
+#include <type_traits>
+#include <utility>
+
+#include "absl/algorithm/container.h"
+#include "api/dtls_transport_interface.h"
+#include "api/rtp_parameters.h"
+#include "api/sequence_checker.h"
+#include "api/transport/enums.h"
+#include "media/sctp/sctp_transport_internal.h"
+#include "p2p/base/dtls_transport.h"
+#include "p2p/base/ice_transport_internal.h"
+#include "p2p/base/p2p_constants.h"
+#include "p2p/base/port.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/thread.h"
+#include "rtc_base/trace_event.h"
+
+using webrtc::SdpType;
+
+namespace webrtc {
+
+JsepTransportController::JsepTransportController(
+ rtc::Thread* network_thread,
+ cricket::PortAllocator* port_allocator,
+ AsyncDnsResolverFactoryInterface* async_dns_resolver_factory,
+ Config config)
+ : network_thread_(network_thread),
+ port_allocator_(port_allocator),
+ async_dns_resolver_factory_(async_dns_resolver_factory),
+ transports_(
+ [this](const std::string& mid, cricket::JsepTransport* transport) {
+ return OnTransportChanged(mid, transport);
+ },
+ [this]() {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ UpdateAggregateStates_n();
+ }),
+ config_(std::move(config)),
+ active_reset_srtp_params_(config.active_reset_srtp_params),
+ bundles_(config.bundle_policy) {
+ // The `transport_observer` is assumed to be non-null.
+ RTC_DCHECK(config_.transport_observer);
+ RTC_DCHECK(config_.rtcp_handler);
+ RTC_DCHECK(config_.ice_transport_factory);
+ RTC_DCHECK(config_.on_dtls_handshake_error_);
+ RTC_DCHECK(config_.field_trials);
+ if (port_allocator_) {
+ port_allocator_->SetIceTiebreaker(ice_tiebreaker_);
+ }
+}
+
+JsepTransportController::~JsepTransportController() {
+ // Channel destructors may try to send packets, so this needs to happen on
+ // the network thread.
+ RTC_DCHECK_RUN_ON(network_thread_);
+ DestroyAllJsepTransports_n();
+}
+
+RTCError JsepTransportController::SetLocalDescription(
+ SdpType type,
+ const cricket::SessionDescription* description) {
+ TRACE_EVENT0("webrtc", "JsepTransportController::SetLocalDescription");
+ if (!network_thread_->IsCurrent()) {
+ return network_thread_->BlockingCall(
+ [=] { return SetLocalDescription(type, description); });
+ }
+
+ RTC_DCHECK_RUN_ON(network_thread_);
+ if (!initial_offerer_.has_value()) {
+ initial_offerer_.emplace(type == SdpType::kOffer);
+ if (*initial_offerer_) {
+ SetIceRole_n(cricket::ICEROLE_CONTROLLING);
+ } else {
+ SetIceRole_n(cricket::ICEROLE_CONTROLLED);
+ }
+ }
+ return ApplyDescription_n(/*local=*/true, type, description);
+}
+
+RTCError JsepTransportController::SetRemoteDescription(
+ SdpType type,
+ const cricket::SessionDescription* description) {
+ TRACE_EVENT0("webrtc", "JsepTransportController::SetRemoteDescription");
+ if (!network_thread_->IsCurrent()) {
+ return network_thread_->BlockingCall(
+ [=] { return SetRemoteDescription(type, description); });
+ }
+
+ RTC_DCHECK_RUN_ON(network_thread_);
+ return ApplyDescription_n(/*local=*/false, type, description);
+}
+
+RtpTransportInternal* JsepTransportController::GetRtpTransport(
+ absl::string_view mid) const {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ auto jsep_transport = GetJsepTransportForMid(mid);
+ if (!jsep_transport) {
+ return nullptr;
+ }
+ return jsep_transport->rtp_transport();
+}
+
+DataChannelTransportInterface* JsepTransportController::GetDataChannelTransport(
+ const std::string& mid) const {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ auto jsep_transport = GetJsepTransportForMid(mid);
+ if (!jsep_transport) {
+ return nullptr;
+ }
+ return jsep_transport->data_channel_transport();
+}
+
+cricket::DtlsTransportInternal* JsepTransportController::GetDtlsTransport(
+ const std::string& mid) {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ auto jsep_transport = GetJsepTransportForMid(mid);
+ if (!jsep_transport) {
+ return nullptr;
+ }
+ return jsep_transport->rtp_dtls_transport();
+}
+
+const cricket::DtlsTransportInternal*
+JsepTransportController::GetRtcpDtlsTransport(const std::string& mid) const {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ auto jsep_transport = GetJsepTransportForMid(mid);
+ if (!jsep_transport) {
+ return nullptr;
+ }
+ return jsep_transport->rtcp_dtls_transport();
+}
+
+rtc::scoped_refptr<webrtc::DtlsTransport>
+JsepTransportController::LookupDtlsTransportByMid(const std::string& mid) {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ auto jsep_transport = GetJsepTransportForMid(mid);
+ if (!jsep_transport) {
+ return nullptr;
+ }
+ return jsep_transport->RtpDtlsTransport();
+}
+
+rtc::scoped_refptr<SctpTransport> JsepTransportController::GetSctpTransport(
+ const std::string& mid) const {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ auto jsep_transport = GetJsepTransportForMid(mid);
+ if (!jsep_transport) {
+ return nullptr;
+ }
+ return jsep_transport->SctpTransport();
+}
+
+void JsepTransportController::SetIceConfig(const cricket::IceConfig& config) {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ ice_config_ = config;
+ for (auto& dtls : GetDtlsTransports()) {
+ dtls->ice_transport()->SetIceConfig(ice_config_);
+ }
+}
+
+void JsepTransportController::SetNeedsIceRestartFlag() {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ for (auto& transport : transports_.Transports()) {
+ transport->SetNeedsIceRestartFlag();
+ }
+}
+
+bool JsepTransportController::NeedsIceRestart(
+ const std::string& transport_name) const {
+ RTC_DCHECK_RUN_ON(network_thread_);
+
+ const cricket::JsepTransport* transport =
+ GetJsepTransportByName(transport_name);
+ if (!transport) {
+ return false;
+ }
+ return transport->needs_ice_restart();
+}
+
+absl::optional<rtc::SSLRole> JsepTransportController::GetDtlsRole(
+ const std::string& mid) const {
+ // TODO(tommi): Remove this hop. Currently it's called from the signaling
+ // thread during negotiations, potentially multiple times.
+ // WebRtcSessionDescriptionFactory::InternalCreateAnswer is one example.
+ if (!network_thread_->IsCurrent()) {
+ return network_thread_->BlockingCall([&] { return GetDtlsRole(mid); });
+ }
+
+ RTC_DCHECK_RUN_ON(network_thread_);
+
+ const cricket::JsepTransport* t = GetJsepTransportForMid(mid);
+ if (!t) {
+ return absl::optional<rtc::SSLRole>();
+ }
+ return t->GetDtlsRole();
+}
+
+bool JsepTransportController::SetLocalCertificate(
+ const rtc::scoped_refptr<rtc::RTCCertificate>& certificate) {
+ if (!network_thread_->IsCurrent()) {
+ return network_thread_->BlockingCall(
+ [&] { return SetLocalCertificate(certificate); });
+ }
+
+ RTC_DCHECK_RUN_ON(network_thread_);
+
+ // Can't change a certificate, or set a null certificate.
+ if (certificate_ || !certificate) {
+ return false;
+ }
+ certificate_ = certificate;
+
+ // Set certificate for JsepTransport, which verifies it matches the
+ // fingerprint in SDP, and DTLS transport.
+ // Fallback from DTLS to SDES is not supported.
+ for (auto& transport : transports_.Transports()) {
+ transport->SetLocalCertificate(certificate_);
+ }
+ for (auto& dtls : GetDtlsTransports()) {
+ bool set_cert_success = dtls->SetLocalCertificate(certificate_);
+ RTC_DCHECK(set_cert_success);
+ }
+ return true;
+}
+
+rtc::scoped_refptr<rtc::RTCCertificate>
+JsepTransportController::GetLocalCertificate(
+ const std::string& transport_name) const {
+ RTC_DCHECK_RUN_ON(network_thread_);
+
+ const cricket::JsepTransport* t = GetJsepTransportByName(transport_name);
+ if (!t) {
+ return nullptr;
+ }
+ return t->GetLocalCertificate();
+}
+
+std::unique_ptr<rtc::SSLCertChain>
+JsepTransportController::GetRemoteSSLCertChain(
+ const std::string& transport_name) const {
+ RTC_DCHECK_RUN_ON(network_thread_);
+
+ // Get the certificate from the RTP transport's DTLS handshake. Should be
+ // identical to the RTCP transport's, since they were given the same remote
+ // fingerprint.
+ auto jsep_transport = GetJsepTransportByName(transport_name);
+ if (!jsep_transport) {
+ return nullptr;
+ }
+ auto dtls = jsep_transport->rtp_dtls_transport();
+ if (!dtls) {
+ return nullptr;
+ }
+
+ return dtls->GetRemoteSSLCertChain();
+}
+
+void JsepTransportController::MaybeStartGathering() {
+ if (!network_thread_->IsCurrent()) {
+ network_thread_->BlockingCall([&] { MaybeStartGathering(); });
+ return;
+ }
+
+ for (auto& dtls : GetDtlsTransports()) {
+ dtls->ice_transport()->MaybeStartGathering();
+ }
+}
+
+RTCError JsepTransportController::AddRemoteCandidates(
+ const std::string& transport_name,
+ const cricket::Candidates& candidates) {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ RTC_DCHECK(VerifyCandidates(candidates).ok());
+ auto jsep_transport = GetJsepTransportByName(transport_name);
+ if (!jsep_transport) {
+ RTC_LOG(LS_WARNING) << "Not adding candidate because the JsepTransport "
+ "doesn't exist. Ignore it.";
+ return RTCError::OK();
+ }
+ return jsep_transport->AddRemoteCandidates(candidates);
+}
+
+RTCError JsepTransportController::RemoveRemoteCandidates(
+ const cricket::Candidates& candidates) {
+ if (!network_thread_->IsCurrent()) {
+ return network_thread_->BlockingCall(
+ [&] { return RemoveRemoteCandidates(candidates); });
+ }
+
+ RTC_DCHECK_RUN_ON(network_thread_);
+
+ // Verify each candidate before passing down to the transport layer.
+ RTCError error = VerifyCandidates(candidates);
+ if (!error.ok()) {
+ return error;
+ }
+
+ std::map<std::string, cricket::Candidates> candidates_by_transport_name;
+ for (const cricket::Candidate& cand : candidates) {
+ if (!cand.transport_name().empty()) {
+ candidates_by_transport_name[cand.transport_name()].push_back(cand);
+ } else {
+ RTC_LOG(LS_ERROR) << "Not removing candidate because it does not have a "
+ "transport name set: "
+ << cand.ToSensitiveString();
+ }
+ }
+
+ for (const auto& kv : candidates_by_transport_name) {
+ const std::string& transport_name = kv.first;
+ const cricket::Candidates& candidates = kv.second;
+ cricket::JsepTransport* jsep_transport =
+ GetJsepTransportByName(transport_name);
+ if (!jsep_transport) {
+ RTC_LOG(LS_WARNING)
+ << "Not removing candidate because the JsepTransport doesn't exist.";
+ continue;
+ }
+ for (const cricket::Candidate& candidate : candidates) {
+ cricket::DtlsTransportInternal* dtls =
+ candidate.component() == cricket::ICE_CANDIDATE_COMPONENT_RTP
+ ? jsep_transport->rtp_dtls_transport()
+ : jsep_transport->rtcp_dtls_transport();
+ if (dtls) {
+ dtls->ice_transport()->RemoveRemoteCandidate(candidate);
+ }
+ }
+ }
+ return RTCError::OK();
+}
+
+bool JsepTransportController::GetStats(const std::string& transport_name,
+ cricket::TransportStats* stats) {
+ RTC_DCHECK_RUN_ON(network_thread_);
+
+ cricket::JsepTransport* transport = GetJsepTransportByName(transport_name);
+ if (!transport) {
+ return false;
+ }
+ return transport->GetStats(stats);
+}
+
+void JsepTransportController::SetActiveResetSrtpParams(
+ bool active_reset_srtp_params) {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ RTC_LOG(LS_INFO)
+ << "Updating the active_reset_srtp_params for JsepTransportController: "
+ << active_reset_srtp_params;
+ active_reset_srtp_params_ = active_reset_srtp_params;
+ for (auto& transport : transports_.Transports()) {
+ transport->SetActiveResetSrtpParams(active_reset_srtp_params);
+ }
+}
+
+RTCError JsepTransportController::RollbackTransports() {
+ if (!network_thread_->IsCurrent()) {
+ return network_thread_->BlockingCall([=] { return RollbackTransports(); });
+ }
+ RTC_DCHECK_RUN_ON(network_thread_);
+ bundles_.Rollback();
+ if (!transports_.RollbackTransports()) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR,
+ "Failed to roll back transport state.");
+ }
+ return RTCError::OK();
+}
+
+rtc::scoped_refptr<webrtc::IceTransportInterface>
+JsepTransportController::CreateIceTransport(const std::string& transport_name,
+ bool rtcp) {
+ int component = rtcp ? cricket::ICE_CANDIDATE_COMPONENT_RTCP
+ : cricket::ICE_CANDIDATE_COMPONENT_RTP;
+
+ IceTransportInit init;
+ init.set_port_allocator(port_allocator_);
+ init.set_async_dns_resolver_factory(async_dns_resolver_factory_);
+ init.set_event_log(config_.event_log);
+ init.set_field_trials(config_.field_trials);
+ auto transport = config_.ice_transport_factory->CreateIceTransport(
+ transport_name, component, std::move(init));
+ RTC_DCHECK(transport);
+ transport->internal()->SetIceRole(ice_role_);
+ transport->internal()->SetIceTiebreaker(ice_tiebreaker_);
+ transport->internal()->SetIceConfig(ice_config_);
+ return transport;
+}
+
+std::unique_ptr<cricket::DtlsTransportInternal>
+JsepTransportController::CreateDtlsTransport(
+ const cricket::ContentInfo& content_info,
+ cricket::IceTransportInternal* ice) {
+ RTC_DCHECK_RUN_ON(network_thread_);
+
+ std::unique_ptr<cricket::DtlsTransportInternal> dtls;
+
+ if (config_.dtls_transport_factory) {
+ dtls = config_.dtls_transport_factory->CreateDtlsTransport(
+ ice, config_.crypto_options, config_.ssl_max_version);
+ } else {
+ dtls = std::make_unique<cricket::DtlsTransport>(ice, config_.crypto_options,
+ config_.event_log,
+ config_.ssl_max_version);
+ }
+
+ RTC_DCHECK(dtls);
+ RTC_DCHECK_EQ(ice, dtls->ice_transport());
+
+ if (certificate_) {
+ bool set_cert_success = dtls->SetLocalCertificate(certificate_);
+ RTC_DCHECK(set_cert_success);
+ }
+
+ // Connect to signals offered by the DTLS and ICE transport.
+ dtls->SignalWritableState.connect(
+ this, &JsepTransportController::OnTransportWritableState_n);
+ dtls->SignalReceivingState.connect(
+ this, &JsepTransportController::OnTransportReceivingState_n);
+ dtls->ice_transport()->SignalGatheringState.connect(
+ this, &JsepTransportController::OnTransportGatheringState_n);
+ dtls->ice_transport()->SignalCandidateGathered.connect(
+ this, &JsepTransportController::OnTransportCandidateGathered_n);
+ dtls->ice_transport()->SignalCandidateError.connect(
+ this, &JsepTransportController::OnTransportCandidateError_n);
+ dtls->ice_transport()->SignalCandidatesRemoved.connect(
+ this, &JsepTransportController::OnTransportCandidatesRemoved_n);
+ dtls->ice_transport()->SignalRoleConflict.connect(
+ this, &JsepTransportController::OnTransportRoleConflict_n);
+ dtls->ice_transport()->SignalStateChanged.connect(
+ this, &JsepTransportController::OnTransportStateChanged_n);
+ dtls->ice_transport()->SignalIceTransportStateChanged.connect(
+ this, &JsepTransportController::OnTransportStateChanged_n);
+ dtls->ice_transport()->SignalCandidatePairChanged.connect(
+ this, &JsepTransportController::OnTransportCandidatePairChanged_n);
+
+ dtls->SubscribeDtlsHandshakeError(
+ [this](rtc::SSLHandshakeError error) { OnDtlsHandshakeError(error); });
+ return dtls;
+}
+
+std::unique_ptr<webrtc::RtpTransport>
+JsepTransportController::CreateUnencryptedRtpTransport(
+ const std::string& transport_name,
+ rtc::PacketTransportInternal* rtp_packet_transport,
+ rtc::PacketTransportInternal* rtcp_packet_transport) {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ auto unencrypted_rtp_transport =
+ std::make_unique<RtpTransport>(rtcp_packet_transport == nullptr);
+ unencrypted_rtp_transport->SetRtpPacketTransport(rtp_packet_transport);
+ if (rtcp_packet_transport) {
+ unencrypted_rtp_transport->SetRtcpPacketTransport(rtcp_packet_transport);
+ }
+ return unencrypted_rtp_transport;
+}
+
+std::unique_ptr<webrtc::SrtpTransport>
+JsepTransportController::CreateSdesTransport(
+ const std::string& transport_name,
+ cricket::DtlsTransportInternal* rtp_dtls_transport,
+ cricket::DtlsTransportInternal* rtcp_dtls_transport) {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ auto srtp_transport = std::make_unique<webrtc::SrtpTransport>(
+ rtcp_dtls_transport == nullptr, *config_.field_trials);
+ RTC_DCHECK(rtp_dtls_transport);
+ srtp_transport->SetRtpPacketTransport(rtp_dtls_transport);
+ if (rtcp_dtls_transport) {
+ srtp_transport->SetRtcpPacketTransport(rtcp_dtls_transport);
+ }
+ if (config_.enable_external_auth) {
+ srtp_transport->EnableExternalAuth();
+ }
+ return srtp_transport;
+}
+
+std::unique_ptr<webrtc::DtlsSrtpTransport>
+JsepTransportController::CreateDtlsSrtpTransport(
+ const std::string& transport_name,
+ cricket::DtlsTransportInternal* rtp_dtls_transport,
+ cricket::DtlsTransportInternal* rtcp_dtls_transport) {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ auto dtls_srtp_transport = std::make_unique<webrtc::DtlsSrtpTransport>(
+ rtcp_dtls_transport == nullptr, *config_.field_trials);
+ if (config_.enable_external_auth) {
+ dtls_srtp_transport->EnableExternalAuth();
+ }
+
+ dtls_srtp_transport->SetDtlsTransports(rtp_dtls_transport,
+ rtcp_dtls_transport);
+ dtls_srtp_transport->SetActiveResetSrtpParams(active_reset_srtp_params_);
+ // Capturing this in the callback because JsepTransportController will always
+ // outlive the DtlsSrtpTransport.
+ dtls_srtp_transport->SetOnDtlsStateChange([this]() {
+ RTC_DCHECK_RUN_ON(this->network_thread_);
+ this->UpdateAggregateStates_n();
+ });
+ return dtls_srtp_transport;
+}
+
+std::vector<cricket::DtlsTransportInternal*>
+JsepTransportController::GetDtlsTransports() {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ std::vector<cricket::DtlsTransportInternal*> dtls_transports;
+ for (auto jsep_transport : transports_.Transports()) {
+ RTC_DCHECK(jsep_transport);
+ if (jsep_transport->rtp_dtls_transport()) {
+ dtls_transports.push_back(jsep_transport->rtp_dtls_transport());
+ }
+
+ if (jsep_transport->rtcp_dtls_transport()) {
+ dtls_transports.push_back(jsep_transport->rtcp_dtls_transport());
+ }
+ }
+ return dtls_transports;
+}
+
+std::vector<cricket::DtlsTransportInternal*>
+JsepTransportController::GetActiveDtlsTransports() {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ std::vector<cricket::DtlsTransportInternal*> dtls_transports;
+ for (auto jsep_transport : transports_.ActiveTransports()) {
+ RTC_DCHECK(jsep_transport);
+ if (jsep_transport->rtp_dtls_transport()) {
+ dtls_transports.push_back(jsep_transport->rtp_dtls_transport());
+ }
+
+ if (jsep_transport->rtcp_dtls_transport()) {
+ dtls_transports.push_back(jsep_transport->rtcp_dtls_transport());
+ }
+ }
+ return dtls_transports;
+}
+
+RTCError JsepTransportController::ApplyDescription_n(
+ bool local,
+ SdpType type,
+ const cricket::SessionDescription* description) {
+ TRACE_EVENT0("webrtc", "JsepTransportController::ApplyDescription_n");
+ RTC_DCHECK(description);
+
+ if (local) {
+ local_desc_ = description;
+ } else {
+ remote_desc_ = description;
+ }
+
+ RTCError error;
+ error = ValidateAndMaybeUpdateBundleGroups(local, type, description);
+ if (!error.ok()) {
+ return error;
+ }
+
+ std::map<const cricket::ContentGroup*, std::vector<int>>
+ merged_encrypted_extension_ids_by_bundle;
+ if (!bundles_.bundle_groups().empty()) {
+ merged_encrypted_extension_ids_by_bundle =
+ MergeEncryptedHeaderExtensionIdsForBundles(description);
+ }
+
+ for (const cricket::ContentInfo& content_info : description->contents()) {
+ // Don't create transports for rejected m-lines and bundled m-lines.
+ if (content_info.rejected ||
+ !bundles_.IsFirstMidInGroup(content_info.name)) {
+ continue;
+ }
+ error = MaybeCreateJsepTransport(local, content_info, *description);
+ if (!error.ok()) {
+ return error;
+ }
+ }
+
+ RTC_DCHECK(description->contents().size() ==
+ description->transport_infos().size());
+ for (size_t i = 0; i < description->contents().size(); ++i) {
+ const cricket::ContentInfo& content_info = description->contents()[i];
+ const cricket::TransportInfo& transport_info =
+ description->transport_infos()[i];
+
+ if (content_info.rejected) {
+ // This may cause groups to be removed from |bundles_.bundle_groups()|.
+ HandleRejectedContent(content_info);
+ continue;
+ }
+
+ const cricket::ContentGroup* established_bundle_group =
+ bundles_.LookupGroupByMid(content_info.name);
+
+ // For bundle members that are not BUNDLE-tagged (not first in the group),
+ // configure their transport to be the same as the BUNDLE-tagged transport.
+ if (established_bundle_group &&
+ content_info.name != *established_bundle_group->FirstContentName()) {
+ if (!HandleBundledContent(content_info, *established_bundle_group)) {
+ return RTCError(RTCErrorType::INVALID_PARAMETER,
+ "Failed to process the bundled m= section with "
+ "mid='" +
+ content_info.name + "'.");
+ }
+ continue;
+ }
+
+ error = ValidateContent(content_info);
+ if (!error.ok()) {
+ return error;
+ }
+
+ std::vector<int> extension_ids;
+ // Is BUNDLE-tagged (first in the group)?
+ if (established_bundle_group &&
+ content_info.name == *established_bundle_group->FirstContentName()) {
+ auto it = merged_encrypted_extension_ids_by_bundle.find(
+ established_bundle_group);
+ RTC_DCHECK(it != merged_encrypted_extension_ids_by_bundle.end());
+ extension_ids = it->second;
+ } else {
+ extension_ids = GetEncryptedHeaderExtensionIds(content_info);
+ }
+
+ int rtp_abs_sendtime_extn_id =
+ GetRtpAbsSendTimeHeaderExtensionId(content_info);
+
+ cricket::JsepTransport* transport =
+ GetJsepTransportForMid(content_info.name);
+ if (!transport) {
+ LOG_AND_RETURN_ERROR(
+ RTCErrorType::INVALID_PARAMETER,
+ "Could not find transport for m= section with mid='" +
+ content_info.name + "'");
+ }
+
+ SetIceRole_n(DetermineIceRole(transport, transport_info, type, local));
+
+ cricket::JsepTransportDescription jsep_description =
+ CreateJsepTransportDescription(content_info, transport_info,
+ extension_ids, rtp_abs_sendtime_extn_id);
+ if (local) {
+ error =
+ transport->SetLocalJsepTransportDescription(jsep_description, type);
+ } else {
+ error =
+ transport->SetRemoteJsepTransportDescription(jsep_description, type);
+ }
+
+ if (!error.ok()) {
+ LOG_AND_RETURN_ERROR(
+ RTCErrorType::INVALID_PARAMETER,
+ "Failed to apply the description for m= section with mid='" +
+ content_info.name + "': " + error.message());
+ }
+ }
+ if (type == SdpType::kAnswer) {
+ transports_.CommitTransports();
+ bundles_.Commit();
+ }
+ return RTCError::OK();
+}
+
+RTCError JsepTransportController::ValidateAndMaybeUpdateBundleGroups(
+ bool local,
+ SdpType type,
+ const cricket::SessionDescription* description) {
+ RTC_DCHECK(description);
+
+ std::vector<const cricket::ContentGroup*> new_bundle_groups =
+ description->GetGroupsByName(cricket::GROUP_TYPE_BUNDLE);
+ // Verify `new_bundle_groups`.
+ std::map<std::string, const cricket::ContentGroup*> new_bundle_groups_by_mid;
+ for (const cricket::ContentGroup* new_bundle_group : new_bundle_groups) {
+ for (const std::string& content_name : new_bundle_group->content_names()) {
+ // The BUNDLE group must not contain a MID that is a member of a different
+ // BUNDLE group, or that contains the same MID multiple times.
+ if (new_bundle_groups_by_mid.find(content_name) !=
+ new_bundle_groups_by_mid.end()) {
+ return RTCError(RTCErrorType::INVALID_PARAMETER,
+ "A BUNDLE group contains a MID='" + content_name +
+ "' that is already in a BUNDLE group.");
+ }
+ new_bundle_groups_by_mid.insert(
+ std::make_pair(content_name, new_bundle_group));
+ // The BUNDLE group must not contain a MID that no m= section has.
+ if (!description->GetContentByName(content_name)) {
+ return RTCError(RTCErrorType::INVALID_PARAMETER,
+ "A BUNDLE group contains a MID='" + content_name +
+ "' matching no m= section.");
+ }
+ }
+ }
+
+ if (type == SdpType::kOffer) {
+ // For an offer, we need to verify that there is not a conflicting mapping
+ // between existing and new bundle groups. For example, if the existing
+ // groups are [[1,2],[3,4]] and new are [[1,3],[2,4]] or [[1,2,3,4]], or
+ // vice versa. Switching things around like this requires a separate offer
+ // that removes the relevant sections from their group, as per RFC 8843,
+ // section 7.5.2.
+ std::map<const cricket::ContentGroup*, const cricket::ContentGroup*>
+ new_bundle_groups_by_existing_bundle_groups;
+ std::map<const cricket::ContentGroup*, const cricket::ContentGroup*>
+ existing_bundle_groups_by_new_bundle_groups;
+ for (const cricket::ContentGroup* new_bundle_group : new_bundle_groups) {
+ for (const std::string& mid : new_bundle_group->content_names()) {
+ cricket::ContentGroup* existing_bundle_group =
+ bundles_.LookupGroupByMid(mid);
+ if (!existing_bundle_group) {
+ continue;
+ }
+ auto it = new_bundle_groups_by_existing_bundle_groups.find(
+ existing_bundle_group);
+ if (it != new_bundle_groups_by_existing_bundle_groups.end() &&
+ it->second != new_bundle_group) {
+ return RTCError(RTCErrorType::INVALID_PARAMETER,
+ "MID " + mid + " in the offer has changed group.");
+ }
+ new_bundle_groups_by_existing_bundle_groups.insert(
+ std::make_pair(existing_bundle_group, new_bundle_group));
+ it = existing_bundle_groups_by_new_bundle_groups.find(new_bundle_group);
+ if (it != existing_bundle_groups_by_new_bundle_groups.end() &&
+ it->second != existing_bundle_group) {
+ return RTCError(RTCErrorType::INVALID_PARAMETER,
+ "MID " + mid + " in the offer has changed group.");
+ }
+ existing_bundle_groups_by_new_bundle_groups.insert(
+ std::make_pair(new_bundle_group, existing_bundle_group));
+ }
+ }
+ } else if (type == SdpType::kAnswer) {
+ std::vector<const cricket::ContentGroup*> offered_bundle_groups =
+ local ? remote_desc_->GetGroupsByName(cricket::GROUP_TYPE_BUNDLE)
+ : local_desc_->GetGroupsByName(cricket::GROUP_TYPE_BUNDLE);
+
+ std::map<std::string, const cricket::ContentGroup*>
+ offered_bundle_groups_by_mid;
+ for (const cricket::ContentGroup* offered_bundle_group :
+ offered_bundle_groups) {
+ for (const std::string& content_name :
+ offered_bundle_group->content_names()) {
+ offered_bundle_groups_by_mid[content_name] = offered_bundle_group;
+ }
+ }
+
+ std::map<const cricket::ContentGroup*, const cricket::ContentGroup*>
+ new_bundle_groups_by_offered_bundle_groups;
+ for (const cricket::ContentGroup* new_bundle_group : new_bundle_groups) {
+ if (!new_bundle_group->FirstContentName()) {
+ // Empty groups could be a subset of any group.
+ continue;
+ }
+ // The group in the answer (new_bundle_group) must have a corresponding
+ // group in the offer (original_group), because the answer groups may only
+ // be subsets of the offer groups.
+ auto it = offered_bundle_groups_by_mid.find(
+ *new_bundle_group->FirstContentName());
+ if (it == offered_bundle_groups_by_mid.end()) {
+ return RTCError(RTCErrorType::INVALID_PARAMETER,
+ "A BUNDLE group was added in the answer that did not "
+ "exist in the offer.");
+ }
+ const cricket::ContentGroup* offered_bundle_group = it->second;
+ if (new_bundle_groups_by_offered_bundle_groups.find(
+ offered_bundle_group) !=
+ new_bundle_groups_by_offered_bundle_groups.end()) {
+ return RTCError(RTCErrorType::INVALID_PARAMETER,
+ "A MID in the answer has changed group.");
+ }
+ new_bundle_groups_by_offered_bundle_groups.insert(
+ std::make_pair(offered_bundle_group, new_bundle_group));
+ for (const std::string& content_name :
+ new_bundle_group->content_names()) {
+ it = offered_bundle_groups_by_mid.find(content_name);
+ // The BUNDLE group in answer should be a subset of offered group.
+ if (it == offered_bundle_groups_by_mid.end() ||
+ it->second != offered_bundle_group) {
+ return RTCError(RTCErrorType::INVALID_PARAMETER,
+ "A BUNDLE group in answer contains a MID='" +
+ content_name +
+ "' that was not in the offered group.");
+ }
+ }
+ }
+
+ for (const auto& bundle_group : bundles_.bundle_groups()) {
+ for (const std::string& content_name : bundle_group->content_names()) {
+ // An answer that removes m= sections from pre-negotiated BUNDLE group
+ // without rejecting it, is invalid.
+ auto it = new_bundle_groups_by_mid.find(content_name);
+ if (it == new_bundle_groups_by_mid.end()) {
+ auto* content_info = description->GetContentByName(content_name);
+ if (!content_info || !content_info->rejected) {
+ return RTCError(RTCErrorType::INVALID_PARAMETER,
+ "Answer cannot remove m= section with mid='" +
+ content_name +
+ "' from already-established BUNDLE group.");
+ }
+ }
+ }
+ }
+ }
+
+ if (config_.bundle_policy ==
+ PeerConnectionInterface::kBundlePolicyMaxBundle &&
+ !description->HasGroup(cricket::GROUP_TYPE_BUNDLE) &&
+ description->contents().size() > 1) {
+ return RTCError(RTCErrorType::INVALID_PARAMETER,
+ "max-bundle is used but no bundle group found.");
+ }
+
+ bundles_.Update(description, type);
+
+ for (const auto& bundle_group : bundles_.bundle_groups()) {
+ if (!bundle_group->FirstContentName())
+ continue;
+
+ // The first MID in a BUNDLE group is BUNDLE-tagged.
+ auto bundled_content =
+ description->GetContentByName(*bundle_group->FirstContentName());
+ if (!bundled_content) {
+ return RTCError(
+ RTCErrorType::INVALID_PARAMETER,
+ "An m= section associated with the BUNDLE-tag doesn't exist.");
+ }
+
+ // If the `bundled_content` is rejected, other contents in the bundle group
+ // must also be rejected.
+ if (bundled_content->rejected) {
+ for (const auto& content_name : bundle_group->content_names()) {
+ auto other_content = description->GetContentByName(content_name);
+ if (!other_content->rejected) {
+ return RTCError(RTCErrorType::INVALID_PARAMETER,
+ "The m= section with mid='" + content_name +
+ "' should be rejected.");
+ }
+ }
+ }
+ }
+ return RTCError::OK();
+}
+
+RTCError JsepTransportController::ValidateContent(
+ const cricket::ContentInfo& content_info) {
+ if (config_.rtcp_mux_policy ==
+ PeerConnectionInterface::kRtcpMuxPolicyRequire &&
+ content_info.type == cricket::MediaProtocolType::kRtp &&
+ !content_info.bundle_only &&
+ !content_info.media_description()->rtcp_mux()) {
+ return RTCError(RTCErrorType::INVALID_PARAMETER,
+ "The m= section with mid='" + content_info.name +
+ "' is invalid. RTCP-MUX is not "
+ "enabled when it is required.");
+ }
+ return RTCError::OK();
+}
+
+void JsepTransportController::HandleRejectedContent(
+ const cricket::ContentInfo& content_info) {
+ // If the content is rejected, let the
+ // BaseChannel/SctpTransport change the RtpTransport/DtlsTransport first,
+ // then destroy the cricket::JsepTransport.
+ cricket::ContentGroup* bundle_group =
+ bundles_.LookupGroupByMid(content_info.name);
+ if (bundle_group && !bundle_group->content_names().empty() &&
+ content_info.name == *bundle_group->FirstContentName()) {
+ // Rejecting a BUNDLE group's first mid means we are rejecting the entire
+ // group.
+ for (const auto& content_name : bundle_group->content_names()) {
+ transports_.RemoveTransportForMid(content_name);
+ }
+ // Delete the BUNDLE group.
+ bundles_.DeleteGroup(bundle_group);
+ } else {
+ transports_.RemoveTransportForMid(content_info.name);
+ if (bundle_group) {
+ // Remove the rejected content from the `bundle_group`.
+ bundles_.DeleteMid(bundle_group, content_info.name);
+ }
+ }
+}
+
+bool JsepTransportController::HandleBundledContent(
+ const cricket::ContentInfo& content_info,
+ const cricket::ContentGroup& bundle_group) {
+ TRACE_EVENT0("webrtc", "JsepTransportController::HandleBundledContent");
+ RTC_DCHECK(bundle_group.FirstContentName());
+ auto jsep_transport =
+ GetJsepTransportByName(*bundle_group.FirstContentName());
+ RTC_DCHECK(jsep_transport);
+ // If the content is bundled, let the
+ // BaseChannel/SctpTransport change the RtpTransport/DtlsTransport first,
+ // then destroy the cricket::JsepTransport.
+ // TODO(bugs.webrtc.org/9719) For media transport this is far from ideal,
+ // because it means that we first create media transport and start
+ // connecting it, and then we destroy it. We will need to address it before
+ // video path is enabled.
+ return transports_.SetTransportForMid(content_info.name, jsep_transport);
+}
+
+cricket::JsepTransportDescription
+JsepTransportController::CreateJsepTransportDescription(
+ const cricket::ContentInfo& content_info,
+ const cricket::TransportInfo& transport_info,
+ const std::vector<int>& encrypted_extension_ids,
+ int rtp_abs_sendtime_extn_id) {
+ TRACE_EVENT0("webrtc",
+ "JsepTransportController::CreateJsepTransportDescription");
+ const cricket::MediaContentDescription* content_desc =
+ content_info.media_description();
+ RTC_DCHECK(content_desc);
+ bool rtcp_mux_enabled = content_info.type == cricket::MediaProtocolType::kSctp
+ ? true
+ : content_desc->rtcp_mux();
+
+ return cricket::JsepTransportDescription(
+ rtcp_mux_enabled, content_desc->cryptos(), encrypted_extension_ids,
+ rtp_abs_sendtime_extn_id, transport_info.description);
+}
+
+std::vector<int> JsepTransportController::GetEncryptedHeaderExtensionIds(
+ const cricket::ContentInfo& content_info) {
+ const cricket::MediaContentDescription* content_desc =
+ content_info.media_description();
+
+ if (!config_.crypto_options.srtp.enable_encrypted_rtp_header_extensions) {
+ return std::vector<int>();
+ }
+
+ std::vector<int> encrypted_header_extension_ids;
+ for (const auto& extension : content_desc->rtp_header_extensions()) {
+ if (!extension.encrypt) {
+ continue;
+ }
+ if (!absl::c_linear_search(encrypted_header_extension_ids, extension.id)) {
+ encrypted_header_extension_ids.push_back(extension.id);
+ }
+ }
+ return encrypted_header_extension_ids;
+}
+
+std::map<const cricket::ContentGroup*, std::vector<int>>
+JsepTransportController::MergeEncryptedHeaderExtensionIdsForBundles(
+ const cricket::SessionDescription* description) {
+ RTC_DCHECK(description);
+ RTC_DCHECK(!bundles_.bundle_groups().empty());
+ std::map<const cricket::ContentGroup*, std::vector<int>>
+ merged_encrypted_extension_ids_by_bundle;
+ // Union the encrypted header IDs in the group when bundle is enabled.
+ for (const cricket::ContentInfo& content_info : description->contents()) {
+ auto group = bundles_.LookupGroupByMid(content_info.name);
+ if (!group)
+ continue;
+ // Get or create list of IDs for the BUNDLE group.
+ std::vector<int>& merged_ids =
+ merged_encrypted_extension_ids_by_bundle[group];
+ // Add IDs not already in the list.
+ std::vector<int> extension_ids =
+ GetEncryptedHeaderExtensionIds(content_info);
+ for (int id : extension_ids) {
+ if (!absl::c_linear_search(merged_ids, id)) {
+ merged_ids.push_back(id);
+ }
+ }
+ }
+ return merged_encrypted_extension_ids_by_bundle;
+}
+
+int JsepTransportController::GetRtpAbsSendTimeHeaderExtensionId(
+ const cricket::ContentInfo& content_info) {
+ if (!config_.enable_external_auth) {
+ return -1;
+ }
+
+ const cricket::MediaContentDescription* content_desc =
+ content_info.media_description();
+
+ const webrtc::RtpExtension* send_time_extension =
+ webrtc::RtpExtension::FindHeaderExtensionByUri(
+ content_desc->rtp_header_extensions(),
+ webrtc::RtpExtension::kAbsSendTimeUri,
+ config_.crypto_options.srtp.enable_encrypted_rtp_header_extensions
+ ? webrtc::RtpExtension::kPreferEncryptedExtension
+ : webrtc::RtpExtension::kDiscardEncryptedExtension);
+ return send_time_extension ? send_time_extension->id : -1;
+}
+
+const cricket::JsepTransport* JsepTransportController::GetJsepTransportForMid(
+ const std::string& mid) const {
+ return transports_.GetTransportForMid(mid);
+}
+
+cricket::JsepTransport* JsepTransportController::GetJsepTransportForMid(
+ const std::string& mid) {
+ return transports_.GetTransportForMid(mid);
+}
+const cricket::JsepTransport* JsepTransportController::GetJsepTransportForMid(
+ absl::string_view mid) const {
+ return transports_.GetTransportForMid(mid);
+}
+
+cricket::JsepTransport* JsepTransportController::GetJsepTransportForMid(
+ absl::string_view mid) {
+ return transports_.GetTransportForMid(mid);
+}
+
+const cricket::JsepTransport* JsepTransportController::GetJsepTransportByName(
+ const std::string& transport_name) const {
+ return transports_.GetTransportByName(transport_name);
+}
+
+cricket::JsepTransport* JsepTransportController::GetJsepTransportByName(
+ const std::string& transport_name) {
+ return transports_.GetTransportByName(transport_name);
+}
+
+RTCError JsepTransportController::MaybeCreateJsepTransport(
+ bool local,
+ const cricket::ContentInfo& content_info,
+ const cricket::SessionDescription& description) {
+ cricket::JsepTransport* transport = GetJsepTransportByName(content_info.name);
+ if (transport) {
+ return RTCError::OK();
+ }
+ const cricket::MediaContentDescription* content_desc =
+ content_info.media_description();
+ if (certificate_ && !content_desc->cryptos().empty()) {
+ return RTCError(RTCErrorType::INVALID_PARAMETER,
+ "SDES and DTLS-SRTP cannot be enabled at the same time.");
+ }
+
+ rtc::scoped_refptr<webrtc::IceTransportInterface> ice =
+ CreateIceTransport(content_info.name, /*rtcp=*/false);
+
+ std::unique_ptr<cricket::DtlsTransportInternal> rtp_dtls_transport =
+ CreateDtlsTransport(content_info, ice->internal());
+
+ std::unique_ptr<cricket::DtlsTransportInternal> rtcp_dtls_transport;
+ std::unique_ptr<RtpTransport> unencrypted_rtp_transport;
+ std::unique_ptr<SrtpTransport> sdes_transport;
+ std::unique_ptr<DtlsSrtpTransport> dtls_srtp_transport;
+
+ rtc::scoped_refptr<webrtc::IceTransportInterface> rtcp_ice;
+ if (config_.rtcp_mux_policy !=
+ PeerConnectionInterface::kRtcpMuxPolicyRequire &&
+ content_info.type == cricket::MediaProtocolType::kRtp) {
+ rtcp_ice = CreateIceTransport(content_info.name, /*rtcp=*/true);
+ rtcp_dtls_transport =
+ CreateDtlsTransport(content_info, rtcp_ice->internal());
+ }
+
+ if (config_.disable_encryption) {
+ RTC_LOG(LS_INFO)
+ << "Creating UnencryptedRtpTransport, becayse encryption is disabled.";
+ unencrypted_rtp_transport = CreateUnencryptedRtpTransport(
+ content_info.name, rtp_dtls_transport.get(), rtcp_dtls_transport.get());
+ } else if (!content_desc->cryptos().empty()) {
+ sdes_transport = CreateSdesTransport(
+ content_info.name, rtp_dtls_transport.get(), rtcp_dtls_transport.get());
+ RTC_LOG(LS_INFO) << "Creating SdesTransport.";
+ } else {
+ RTC_LOG(LS_INFO) << "Creating DtlsSrtpTransport.";
+ dtls_srtp_transport = CreateDtlsSrtpTransport(
+ content_info.name, rtp_dtls_transport.get(), rtcp_dtls_transport.get());
+ }
+
+ std::unique_ptr<cricket::SctpTransportInternal> sctp_transport;
+ if (config_.sctp_factory) {
+ sctp_transport =
+ config_.sctp_factory->CreateSctpTransport(rtp_dtls_transport.get());
+ }
+
+ std::unique_ptr<cricket::JsepTransport> jsep_transport =
+ std::make_unique<cricket::JsepTransport>(
+ content_info.name, certificate_, std::move(ice), std::move(rtcp_ice),
+ std::move(unencrypted_rtp_transport), std::move(sdes_transport),
+ std::move(dtls_srtp_transport), std::move(rtp_dtls_transport),
+ std::move(rtcp_dtls_transport), std::move(sctp_transport), [&]() {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ UpdateAggregateStates_n();
+ });
+
+ jsep_transport->rtp_transport()->SubscribeRtcpPacketReceived(
+ this, [this](rtc::CopyOnWriteBuffer* buffer, int64_t packet_time_ms) {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ OnRtcpPacketReceived_n(buffer, packet_time_ms);
+ });
+ jsep_transport->rtp_transport()->SetUnDemuxableRtpPacketReceivedHandler(
+ [this](webrtc::RtpPacketReceived& packet) {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ OnUnDemuxableRtpPacketReceived_n(packet);
+ });
+
+ transports_.RegisterTransport(content_info.name, std::move(jsep_transport));
+ UpdateAggregateStates_n();
+ return RTCError::OK();
+}
+
+void JsepTransportController::DestroyAllJsepTransports_n() {
+ transports_.DestroyAllTransports();
+}
+
+void JsepTransportController::SetIceRole_n(cricket::IceRole ice_role) {
+ ice_role_ = ice_role;
+ auto dtls_transports = GetDtlsTransports();
+ for (auto& dtls : dtls_transports) {
+ dtls->ice_transport()->SetIceRole(ice_role_);
+ }
+}
+
+cricket::IceRole JsepTransportController::DetermineIceRole(
+ cricket::JsepTransport* jsep_transport,
+ const cricket::TransportInfo& transport_info,
+ SdpType type,
+ bool local) {
+ cricket::IceRole ice_role = ice_role_;
+ auto tdesc = transport_info.description;
+ if (local) {
+ // The initial offer side may use ICE Lite, in which case, per RFC5245
+ // Section 5.1.1, the answer side should take the controlling role if it is
+ // in the full ICE mode.
+ //
+ // When both sides use ICE Lite, the initial offer side must take the
+ // controlling role, and this is the default logic implemented in
+ // SetLocalDescription in JsepTransportController.
+ if (jsep_transport->remote_description() &&
+ jsep_transport->remote_description()->transport_desc.ice_mode ==
+ cricket::ICEMODE_LITE &&
+ ice_role_ == cricket::ICEROLE_CONTROLLED &&
+ tdesc.ice_mode == cricket::ICEMODE_FULL) {
+ ice_role = cricket::ICEROLE_CONTROLLING;
+ }
+ } else {
+ // If our role is cricket::ICEROLE_CONTROLLED and the remote endpoint
+ // supports only ice_lite, this local endpoint should take the CONTROLLING
+ // role.
+ // TODO(deadbeef): This is a session-level attribute, so it really shouldn't
+ // be in a TransportDescription in the first place...
+ if (ice_role_ == cricket::ICEROLE_CONTROLLED &&
+ tdesc.ice_mode == cricket::ICEMODE_LITE) {
+ ice_role = cricket::ICEROLE_CONTROLLING;
+ }
+
+ // If we use ICE Lite and the remote endpoint uses the full implementation
+ // of ICE, the local endpoint must take the controlled role, and the other
+ // side must be the controlling role.
+ if (jsep_transport->local_description() &&
+ jsep_transport->local_description()->transport_desc.ice_mode ==
+ cricket::ICEMODE_LITE &&
+ ice_role_ == cricket::ICEROLE_CONTROLLING &&
+ tdesc.ice_mode == cricket::ICEMODE_FULL) {
+ ice_role = cricket::ICEROLE_CONTROLLED;
+ }
+ }
+
+ return ice_role;
+}
+
+void JsepTransportController::OnTransportWritableState_n(
+ rtc::PacketTransportInternal* transport) {
+ RTC_LOG(LS_INFO) << " Transport " << transport->transport_name()
+ << " writability changed to " << transport->writable()
+ << ".";
+ UpdateAggregateStates_n();
+}
+
+void JsepTransportController::OnTransportReceivingState_n(
+ rtc::PacketTransportInternal* transport) {
+ UpdateAggregateStates_n();
+}
+
+void JsepTransportController::OnTransportGatheringState_n(
+ cricket::IceTransportInternal* transport) {
+ UpdateAggregateStates_n();
+}
+
+void JsepTransportController::OnTransportCandidateGathered_n(
+ cricket::IceTransportInternal* transport,
+ const cricket::Candidate& candidate) {
+ // We should never signal peer-reflexive candidates.
+ if (candidate.type() == cricket::PRFLX_PORT_TYPE) {
+ RTC_DCHECK_NOTREACHED();
+ return;
+ }
+
+ signal_ice_candidates_gathered_.Send(
+ transport->transport_name(), std::vector<cricket::Candidate>{candidate});
+}
+
+void JsepTransportController::OnTransportCandidateError_n(
+ cricket::IceTransportInternal* transport,
+ const cricket::IceCandidateErrorEvent& event) {
+ signal_ice_candidate_error_.Send(event);
+}
+void JsepTransportController::OnTransportCandidatesRemoved_n(
+ cricket::IceTransportInternal* transport,
+ const cricket::Candidates& candidates) {
+ signal_ice_candidates_removed_.Send(candidates);
+}
+void JsepTransportController::OnTransportCandidatePairChanged_n(
+ const cricket::CandidatePairChangeEvent& event) {
+ signal_ice_candidate_pair_changed_.Send(event);
+}
+
+void JsepTransportController::OnTransportRoleConflict_n(
+ cricket::IceTransportInternal* transport) {
+ // Note: since the role conflict is handled entirely on the network thread,
+ // we don't need to worry about role conflicts occurring on two ports at
+ // once. The first one encountered should immediately reverse the role.
+ cricket::IceRole reversed_role = (ice_role_ == cricket::ICEROLE_CONTROLLING)
+ ? cricket::ICEROLE_CONTROLLED
+ : cricket::ICEROLE_CONTROLLING;
+ RTC_LOG(LS_INFO) << "Got role conflict; switching to "
+ << (reversed_role == cricket::ICEROLE_CONTROLLING
+ ? "controlling"
+ : "controlled")
+ << " role.";
+ SetIceRole_n(reversed_role);
+}
+
+void JsepTransportController::OnTransportStateChanged_n(
+ cricket::IceTransportInternal* transport) {
+ RTC_LOG(LS_INFO) << transport->transport_name() << " Transport "
+ << transport->component()
+ << " state changed. Check if state is complete.";
+ UpdateAggregateStates_n();
+}
+
+void JsepTransportController::UpdateAggregateStates_n() {
+ TRACE_EVENT0("webrtc", "JsepTransportController::UpdateAggregateStates_n");
+ auto dtls_transports = GetActiveDtlsTransports();
+ cricket::IceConnectionState new_connection_state =
+ cricket::kIceConnectionConnecting;
+ PeerConnectionInterface::IceConnectionState new_ice_connection_state =
+ PeerConnectionInterface::IceConnectionState::kIceConnectionNew;
+ PeerConnectionInterface::PeerConnectionState new_combined_state =
+ PeerConnectionInterface::PeerConnectionState::kNew;
+ cricket::IceGatheringState new_gathering_state = cricket::kIceGatheringNew;
+ bool any_failed = false;
+ bool all_connected = !dtls_transports.empty();
+ bool all_completed = !dtls_transports.empty();
+ bool any_gathering = false;
+ bool all_done_gathering = !dtls_transports.empty();
+
+ std::map<IceTransportState, int> ice_state_counts;
+ std::map<DtlsTransportState, int> dtls_state_counts;
+
+ for (const auto& dtls : dtls_transports) {
+ any_failed = any_failed || dtls->ice_transport()->GetState() ==
+ cricket::IceTransportState::STATE_FAILED;
+ all_connected = all_connected && dtls->writable();
+ all_completed =
+ all_completed && dtls->writable() &&
+ dtls->ice_transport()->GetState() ==
+ cricket::IceTransportState::STATE_COMPLETED &&
+ dtls->ice_transport()->GetIceRole() == cricket::ICEROLE_CONTROLLING &&
+ dtls->ice_transport()->gathering_state() ==
+ cricket::kIceGatheringComplete;
+ any_gathering = any_gathering || dtls->ice_transport()->gathering_state() !=
+ cricket::kIceGatheringNew;
+ all_done_gathering =
+ all_done_gathering && dtls->ice_transport()->gathering_state() ==
+ cricket::kIceGatheringComplete;
+
+ dtls_state_counts[dtls->dtls_state()]++;
+ ice_state_counts[dtls->ice_transport()->GetIceTransportState()]++;
+ }
+
+ if (any_failed) {
+ new_connection_state = cricket::kIceConnectionFailed;
+ } else if (all_completed) {
+ new_connection_state = cricket::kIceConnectionCompleted;
+ } else if (all_connected) {
+ new_connection_state = cricket::kIceConnectionConnected;
+ }
+ if (ice_connection_state_ != new_connection_state) {
+ ice_connection_state_ = new_connection_state;
+
+ signal_ice_connection_state_.Send(new_connection_state);
+ }
+
+ // Compute the current RTCIceConnectionState as described in
+ // https://www.w3.org/TR/webrtc/#dom-rtciceconnectionstate.
+ // The PeerConnection is responsible for handling the "closed" state.
+ int total_ice_checking = ice_state_counts[IceTransportState::kChecking];
+ int total_ice_connected = ice_state_counts[IceTransportState::kConnected];
+ int total_ice_completed = ice_state_counts[IceTransportState::kCompleted];
+ int total_ice_failed = ice_state_counts[IceTransportState::kFailed];
+ int total_ice_disconnected =
+ ice_state_counts[IceTransportState::kDisconnected];
+ int total_ice_closed = ice_state_counts[IceTransportState::kClosed];
+ int total_ice_new = ice_state_counts[IceTransportState::kNew];
+ int total_ice = dtls_transports.size();
+
+ if (total_ice_failed > 0) {
+ // Any RTCIceTransports are in the "failed" state.
+ new_ice_connection_state = PeerConnectionInterface::kIceConnectionFailed;
+ } else if (total_ice_disconnected > 0) {
+ // None of the previous states apply and any RTCIceTransports are in the
+ // "disconnected" state.
+ new_ice_connection_state =
+ PeerConnectionInterface::kIceConnectionDisconnected;
+ } else if (total_ice_new + total_ice_closed == total_ice) {
+ // None of the previous states apply and all RTCIceTransports are in the
+ // "new" or "closed" state, or there are no transports.
+ new_ice_connection_state = PeerConnectionInterface::kIceConnectionNew;
+ } else if (total_ice_new + total_ice_checking > 0) {
+ // None of the previous states apply and any RTCIceTransports are in the
+ // "new" or "checking" state.
+ new_ice_connection_state = PeerConnectionInterface::kIceConnectionChecking;
+ } else if (total_ice_completed + total_ice_closed == total_ice ||
+ all_completed) {
+ // None of the previous states apply and all RTCIceTransports are in the
+ // "completed" or "closed" state.
+ //
+ // TODO(https://bugs.webrtc.org/10356): The all_completed condition is added
+ // to mimic the behavior of the old ICE connection state, and should be
+ // removed once we get end-of-candidates signaling in place.
+ new_ice_connection_state = PeerConnectionInterface::kIceConnectionCompleted;
+ } else if (total_ice_connected + total_ice_completed + total_ice_closed ==
+ total_ice) {
+ // None of the previous states apply and all RTCIceTransports are in the
+ // "connected", "completed" or "closed" state.
+ new_ice_connection_state = PeerConnectionInterface::kIceConnectionConnected;
+ } else {
+ RTC_DCHECK_NOTREACHED();
+ }
+
+ if (standardized_ice_connection_state_ != new_ice_connection_state) {
+ if (standardized_ice_connection_state_ ==
+ PeerConnectionInterface::kIceConnectionChecking &&
+ new_ice_connection_state ==
+ PeerConnectionInterface::kIceConnectionCompleted) {
+ // Ensure that we never skip over the "connected" state.
+ signal_standardized_ice_connection_state_.Send(
+ PeerConnectionInterface::kIceConnectionConnected);
+ }
+ standardized_ice_connection_state_ = new_ice_connection_state;
+ signal_standardized_ice_connection_state_.Send(new_ice_connection_state);
+ }
+
+ // Compute the current RTCPeerConnectionState as described in
+ // https://www.w3.org/TR/webrtc/#dom-rtcpeerconnectionstate.
+ // The PeerConnection is responsible for handling the "closed" state.
+ // Note that "connecting" is only a valid state for DTLS transports while
+ // "checking", "completed" and "disconnected" are only valid for ICE
+ // transports.
+ int total_connected =
+ total_ice_connected + dtls_state_counts[DtlsTransportState::kConnected];
+ int total_dtls_connecting =
+ dtls_state_counts[DtlsTransportState::kConnecting];
+ int total_failed =
+ total_ice_failed + dtls_state_counts[DtlsTransportState::kFailed];
+ int total_closed =
+ total_ice_closed + dtls_state_counts[DtlsTransportState::kClosed];
+ int total_new = total_ice_new + dtls_state_counts[DtlsTransportState::kNew];
+ int total_transports = total_ice * 2;
+
+ if (total_failed > 0) {
+ // Any of the RTCIceTransports or RTCDtlsTransports are in a "failed" state.
+ new_combined_state = PeerConnectionInterface::PeerConnectionState::kFailed;
+ } else if (total_ice_disconnected > 0) {
+ // None of the previous states apply and any RTCIceTransports or
+ // RTCDtlsTransports are in the "disconnected" state.
+ new_combined_state =
+ PeerConnectionInterface::PeerConnectionState::kDisconnected;
+ } else if (total_new + total_closed == total_transports) {
+ // None of the previous states apply and all RTCIceTransports and
+ // RTCDtlsTransports are in the "new" or "closed" state, or there are no
+ // transports.
+ new_combined_state = PeerConnectionInterface::PeerConnectionState::kNew;
+ } else if (total_new + total_dtls_connecting + total_ice_checking > 0) {
+ // None of the previous states apply and all RTCIceTransports or
+ // RTCDtlsTransports are in the "new", "connecting" or "checking" state.
+ new_combined_state =
+ PeerConnectionInterface::PeerConnectionState::kConnecting;
+ } else if (total_connected + total_ice_completed + total_closed ==
+ total_transports) {
+ // None of the previous states apply and all RTCIceTransports and
+ // RTCDtlsTransports are in the "connected", "completed" or "closed" state.
+ new_combined_state =
+ PeerConnectionInterface::PeerConnectionState::kConnected;
+ } else {
+ RTC_DCHECK_NOTREACHED();
+ }
+
+ if (combined_connection_state_ != new_combined_state) {
+ combined_connection_state_ = new_combined_state;
+ signal_connection_state_.Send(new_combined_state);
+ }
+
+ // Compute the gathering state.
+ if (dtls_transports.empty()) {
+ new_gathering_state = cricket::kIceGatheringNew;
+ } else if (all_done_gathering) {
+ new_gathering_state = cricket::kIceGatheringComplete;
+ } else if (any_gathering) {
+ new_gathering_state = cricket::kIceGatheringGathering;
+ }
+ if (ice_gathering_state_ != new_gathering_state) {
+ ice_gathering_state_ = new_gathering_state;
+ signal_ice_gathering_state_.Send(new_gathering_state);
+ }
+}
+
+void JsepTransportController::OnRtcpPacketReceived_n(
+ rtc::CopyOnWriteBuffer* packet,
+ int64_t packet_time_us) {
+ RTC_DCHECK(config_.rtcp_handler);
+ config_.rtcp_handler(*packet, packet_time_us);
+}
+
+void JsepTransportController::OnUnDemuxableRtpPacketReceived_n(
+ const webrtc::RtpPacketReceived& packet) {
+ RTC_DCHECK(config_.un_demuxable_packet_handler);
+ config_.un_demuxable_packet_handler(packet);
+}
+
+void JsepTransportController::OnDtlsHandshakeError(
+ rtc::SSLHandshakeError error) {
+ config_.on_dtls_handshake_error_(error);
+}
+
+bool JsepTransportController::OnTransportChanged(
+ const std::string& mid,
+ cricket::JsepTransport* jsep_transport) {
+ if (config_.transport_observer) {
+ if (jsep_transport) {
+ return config_.transport_observer->OnTransportChanged(
+ mid, jsep_transport->rtp_transport(),
+ jsep_transport->RtpDtlsTransport(),
+ jsep_transport->data_channel_transport());
+ } else {
+ return config_.transport_observer->OnTransportChanged(mid, nullptr,
+ nullptr, nullptr);
+ }
+ }
+ return false;
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/jsep_transport_controller.h b/third_party/libwebrtc/pc/jsep_transport_controller.h
new file mode 100644
index 0000000000..5880e346cd
--- /dev/null
+++ b/third_party/libwebrtc/pc/jsep_transport_controller.h
@@ -0,0 +1,498 @@
+/*
+ * Copyright 2017 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_JSEP_TRANSPORT_CONTROLLER_H_
+#define PC_JSEP_TRANSPORT_CONTROLLER_H_
+
+#include <stdint.h>
+
+#include <functional>
+#include <map>
+#include <memory>
+#include <string>
+#include <type_traits>
+#include <utility>
+#include <vector>
+
+#include "absl/functional/any_invocable.h"
+#include "absl/types/optional.h"
+#include "api/async_dns_resolver.h"
+#include "api/candidate.h"
+#include "api/crypto/crypto_options.h"
+#include "api/ice_transport_factory.h"
+#include "api/ice_transport_interface.h"
+#include "api/jsep.h"
+#include "api/peer_connection_interface.h"
+#include "api/rtc_error.h"
+#include "api/rtc_event_log/rtc_event_log.h"
+#include "api/scoped_refptr.h"
+#include "api/sequence_checker.h"
+#include "api/transport/data_channel_transport_interface.h"
+#include "api/transport/sctp_transport_factory_interface.h"
+#include "media/sctp/sctp_transport_internal.h"
+#include "p2p/base/dtls_transport.h"
+#include "p2p/base/dtls_transport_factory.h"
+#include "p2p/base/dtls_transport_internal.h"
+#include "p2p/base/ice_transport_internal.h"
+#include "p2p/base/p2p_transport_channel.h"
+#include "p2p/base/packet_transport_internal.h"
+#include "p2p/base/port.h"
+#include "p2p/base/port_allocator.h"
+#include "p2p/base/transport_description.h"
+#include "p2p/base/transport_info.h"
+#include "pc/dtls_srtp_transport.h"
+#include "pc/dtls_transport.h"
+#include "pc/jsep_transport.h"
+#include "pc/jsep_transport_collection.h"
+#include "pc/rtp_transport.h"
+#include "pc/rtp_transport_internal.h"
+#include "pc/sctp_transport.h"
+#include "pc/session_description.h"
+#include "pc/srtp_transport.h"
+#include "pc/transport_stats.h"
+#include "rtc_base/callback_list.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/copy_on_write_buffer.h"
+#include "rtc_base/helpers.h"
+#include "rtc_base/rtc_certificate.h"
+#include "rtc_base/ssl_certificate.h"
+#include "rtc_base/ssl_stream_adapter.h"
+#include "rtc_base/third_party/sigslot/sigslot.h"
+#include "rtc_base/thread.h"
+#include "rtc_base/thread_annotations.h"
+
+namespace rtc {
+class Thread;
+class PacketTransportInternal;
+} // namespace rtc
+
+namespace webrtc {
+
+class JsepTransportController : public sigslot::has_slots<> {
+ public:
+ // Used when the RtpTransport/DtlsTransport of the m= section is changed
+ // because the section is rejected or BUNDLE is enabled.
+ class Observer {
+ public:
+ virtual ~Observer() {}
+
+ // Returns true if media associated with `mid` was successfully set up to be
+ // demultiplexed on `rtp_transport`. Could return false if two bundled m=
+ // sections use the same SSRC, for example.
+ //
+ // If a data channel transport must be negotiated, `data_channel_transport`
+ // and `negotiation_state` indicate negotiation status. If
+ // `data_channel_transport` is null, the data channel transport should not
+ // be used. Otherwise, the value is a pointer to the transport to be used
+ // for data channels on `mid`, if any.
+ //
+ // The observer should not send data on `data_channel_transport` until
+ // `negotiation_state` is provisional or final. It should not delete
+ // `data_channel_transport` or any fallback transport until
+ // `negotiation_state` is final.
+ virtual bool OnTransportChanged(
+ const std::string& mid,
+ RtpTransportInternal* rtp_transport,
+ rtc::scoped_refptr<DtlsTransport> dtls_transport,
+ DataChannelTransportInterface* data_channel_transport) = 0;
+ };
+
+ struct Config {
+ // If `redetermine_role_on_ice_restart` is true, ICE role is redetermined
+ // upon setting a local transport description that indicates an ICE
+ // restart.
+ bool redetermine_role_on_ice_restart = true;
+ rtc::SSLProtocolVersion ssl_max_version = rtc::SSL_PROTOCOL_DTLS_12;
+ // `crypto_options` is used to determine if created DTLS transports
+ // negotiate GCM crypto suites or not.
+ webrtc::CryptoOptions crypto_options;
+ PeerConnectionInterface::BundlePolicy bundle_policy =
+ PeerConnectionInterface::kBundlePolicyBalanced;
+ PeerConnectionInterface::RtcpMuxPolicy rtcp_mux_policy =
+ PeerConnectionInterface::kRtcpMuxPolicyRequire;
+ bool disable_encryption = false;
+ bool enable_external_auth = false;
+ // Used to inject the ICE/DTLS transports created externally.
+ webrtc::IceTransportFactory* ice_transport_factory = nullptr;
+ cricket::DtlsTransportFactory* dtls_transport_factory = nullptr;
+ Observer* transport_observer = nullptr;
+ // Must be provided and valid for the lifetime of the
+ // JsepTransportController instance.
+ absl::AnyInvocable<void(const rtc::CopyOnWriteBuffer& packet,
+ int64_t packet_time_us) const>
+ rtcp_handler;
+ absl::AnyInvocable<void(const RtpPacketReceived& parsed_packet) const>
+ un_demuxable_packet_handler;
+ // Initial value for whether DtlsTransport reset causes a reset
+ // of SRTP parameters.
+ bool active_reset_srtp_params = false;
+ RtcEventLog* event_log = nullptr;
+
+ // Factory for SCTP transports.
+ SctpTransportFactoryInterface* sctp_factory = nullptr;
+ std::function<void(rtc::SSLHandshakeError)> on_dtls_handshake_error_;
+
+ // Field trials.
+ const webrtc::FieldTrialsView* field_trials;
+ };
+
+ // The ICE related events are fired on the `network_thread`.
+ // All the transport related methods are called on the `network_thread`
+ // and destruction of the JsepTransportController must occur on the
+ // `network_thread`.
+ JsepTransportController(
+ rtc::Thread* network_thread,
+ cricket::PortAllocator* port_allocator,
+ AsyncDnsResolverFactoryInterface* async_dns_resolver_factory,
+ Config config);
+ virtual ~JsepTransportController();
+
+ JsepTransportController(const JsepTransportController&) = delete;
+ JsepTransportController& operator=(const JsepTransportController&) = delete;
+
+ // The main method to be called; applies a description at the transport
+ // level, creating/destroying transport objects as needed and updating their
+ // properties. This includes RTP, DTLS, and ICE (but not SCTP). At least not
+ // yet? May make sense to in the future.
+ RTCError SetLocalDescription(SdpType type,
+ const cricket::SessionDescription* description);
+
+ RTCError SetRemoteDescription(SdpType type,
+ const cricket::SessionDescription* description);
+
+ // Get transports to be used for the provided `mid`. If bundling is enabled,
+ // calling GetRtpTransport for multiple MIDs may yield the same object.
+ RtpTransportInternal* GetRtpTransport(absl::string_view mid) const;
+ cricket::DtlsTransportInternal* GetDtlsTransport(const std::string& mid);
+ const cricket::DtlsTransportInternal* GetRtcpDtlsTransport(
+ const std::string& mid) const;
+ // Gets the externally sharable version of the DtlsTransport.
+ rtc::scoped_refptr<webrtc::DtlsTransport> LookupDtlsTransportByMid(
+ const std::string& mid);
+ rtc::scoped_refptr<SctpTransport> GetSctpTransport(
+ const std::string& mid) const;
+
+ DataChannelTransportInterface* GetDataChannelTransport(
+ const std::string& mid) const;
+
+ /*********************
+ * ICE-related methods
+ ********************/
+ // This method is public to allow PeerConnection to update it from
+ // SetConfiguration.
+ void SetIceConfig(const cricket::IceConfig& config);
+ // Set the "needs-ice-restart" flag as described in JSEP. After the flag is
+ // set, offers should generate new ufrags/passwords until an ICE restart
+ // occurs.
+ void SetNeedsIceRestartFlag();
+ // Returns true if the ICE restart flag above was set, and no ICE restart has
+ // occurred yet for this transport (by applying a local description with
+ // changed ufrag/password). If the transport has been deleted as a result of
+ // bundling, returns false.
+ bool NeedsIceRestart(const std::string& mid) const;
+ // Start gathering candidates for any new transports, or transports doing an
+ // ICE restart.
+ void MaybeStartGathering();
+ RTCError AddRemoteCandidates(
+ const std::string& mid,
+ const std::vector<cricket::Candidate>& candidates);
+ RTCError RemoveRemoteCandidates(
+ const std::vector<cricket::Candidate>& candidates);
+
+ /**********************
+ * DTLS-related methods
+ *********************/
+ // Specifies the identity to use in this session.
+ // Can only be called once.
+ bool SetLocalCertificate(
+ const rtc::scoped_refptr<rtc::RTCCertificate>& certificate);
+ rtc::scoped_refptr<rtc::RTCCertificate> GetLocalCertificate(
+ const std::string& mid) const;
+ // Caller owns returned certificate chain. This method mainly exists for
+ // stats reporting.
+ std::unique_ptr<rtc::SSLCertChain> GetRemoteSSLCertChain(
+ const std::string& mid) const;
+ // Get negotiated role, if one has been negotiated.
+ absl::optional<rtc::SSLRole> GetDtlsRole(const std::string& mid) const;
+
+ // TODO(deadbeef): GetStats isn't const because all the way down to
+ // OpenSSLStreamAdapter, GetSslCipherSuite and GetDtlsSrtpCryptoSuite are not
+ // const. Fix this.
+ bool GetStats(const std::string& mid, cricket::TransportStats* stats);
+
+ bool initial_offerer() const { return initial_offerer_ && *initial_offerer_; }
+
+ void SetActiveResetSrtpParams(bool active_reset_srtp_params);
+
+ RTCError RollbackTransports();
+
+ // F: void(const std::string&, const std::vector<cricket::Candidate>&)
+ template <typename F>
+ void SubscribeIceCandidateGathered(F&& callback) {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ signal_ice_candidates_gathered_.AddReceiver(std::forward<F>(callback));
+ }
+
+ // F: void(cricket::IceConnectionState)
+ template <typename F>
+ void SubscribeIceConnectionState(F&& callback) {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ signal_ice_connection_state_.AddReceiver(std::forward<F>(callback));
+ }
+
+ // F: void(PeerConnectionInterface::PeerConnectionState)
+ template <typename F>
+ void SubscribeConnectionState(F&& callback) {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ signal_connection_state_.AddReceiver(std::forward<F>(callback));
+ }
+
+ // F: void(PeerConnectionInterface::IceConnectionState)
+ template <typename F>
+ void SubscribeStandardizedIceConnectionState(F&& callback) {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ signal_standardized_ice_connection_state_.AddReceiver(
+ std::forward<F>(callback));
+ }
+
+ // F: void(cricket::IceGatheringState)
+ template <typename F>
+ void SubscribeIceGatheringState(F&& callback) {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ signal_ice_gathering_state_.AddReceiver(std::forward<F>(callback));
+ }
+
+ // F: void(const cricket::IceCandidateErrorEvent&)
+ template <typename F>
+ void SubscribeIceCandidateError(F&& callback) {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ signal_ice_candidate_error_.AddReceiver(std::forward<F>(callback));
+ }
+
+ // F: void(const std::vector<cricket::Candidate>&)
+ template <typename F>
+ void SubscribeIceCandidatesRemoved(F&& callback) {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ signal_ice_candidates_removed_.AddReceiver(std::forward<F>(callback));
+ }
+
+ // F: void(const cricket::CandidatePairChangeEvent&)
+ template <typename F>
+ void SubscribeIceCandidatePairChanged(F&& callback) {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ signal_ice_candidate_pair_changed_.AddReceiver(std::forward<F>(callback));
+ }
+
+ private:
+ // All of these callbacks are fired on the network thread.
+
+ // If any transport failed => failed,
+ // Else if all completed => completed,
+ // Else if all connected => connected,
+ // Else => connecting
+ CallbackList<cricket::IceConnectionState> signal_ice_connection_state_
+ RTC_GUARDED_BY(network_thread_);
+
+ CallbackList<PeerConnectionInterface::PeerConnectionState>
+ signal_connection_state_ RTC_GUARDED_BY(network_thread_);
+
+ CallbackList<PeerConnectionInterface::IceConnectionState>
+ signal_standardized_ice_connection_state_ RTC_GUARDED_BY(network_thread_);
+
+ // If all transports done gathering => complete,
+ // Else if any are gathering => gathering,
+ // Else => new
+ CallbackList<cricket::IceGatheringState> signal_ice_gathering_state_
+ RTC_GUARDED_BY(network_thread_);
+
+ // [mid, candidates]
+ CallbackList<const std::string&, const std::vector<cricket::Candidate>&>
+ signal_ice_candidates_gathered_ RTC_GUARDED_BY(network_thread_);
+
+ CallbackList<const cricket::IceCandidateErrorEvent&>
+ signal_ice_candidate_error_ RTC_GUARDED_BY(network_thread_);
+
+ CallbackList<const std::vector<cricket::Candidate>&>
+ signal_ice_candidates_removed_ RTC_GUARDED_BY(network_thread_);
+
+ CallbackList<const cricket::CandidatePairChangeEvent&>
+ signal_ice_candidate_pair_changed_ RTC_GUARDED_BY(network_thread_);
+
+ RTCError ApplyDescription_n(bool local,
+ SdpType type,
+ const cricket::SessionDescription* description)
+ RTC_RUN_ON(network_thread_);
+ RTCError ValidateAndMaybeUpdateBundleGroups(
+ bool local,
+ SdpType type,
+ const cricket::SessionDescription* description);
+ RTCError ValidateContent(const cricket::ContentInfo& content_info);
+
+ void HandleRejectedContent(const cricket::ContentInfo& content_info)
+ RTC_RUN_ON(network_thread_);
+ bool HandleBundledContent(const cricket::ContentInfo& content_info,
+ const cricket::ContentGroup& bundle_group)
+ RTC_RUN_ON(network_thread_);
+
+ cricket::JsepTransportDescription CreateJsepTransportDescription(
+ const cricket::ContentInfo& content_info,
+ const cricket::TransportInfo& transport_info,
+ const std::vector<int>& encrypted_extension_ids,
+ int rtp_abs_sendtime_extn_id);
+
+ std::map<const cricket::ContentGroup*, std::vector<int>>
+ MergeEncryptedHeaderExtensionIdsForBundles(
+ const cricket::SessionDescription* description);
+ std::vector<int> GetEncryptedHeaderExtensionIds(
+ const cricket::ContentInfo& content_info);
+
+ int GetRtpAbsSendTimeHeaderExtensionId(
+ const cricket::ContentInfo& content_info);
+
+ // This method takes the BUNDLE group into account. If the JsepTransport is
+ // destroyed because of BUNDLE, it would return the transport which other
+ // transports are bundled on (In current implementation, it is the first
+ // content in the BUNDLE group).
+ const cricket::JsepTransport* GetJsepTransportForMid(
+ const std::string& mid) const RTC_RUN_ON(network_thread_);
+ cricket::JsepTransport* GetJsepTransportForMid(const std::string& mid)
+ RTC_RUN_ON(network_thread_);
+ const cricket::JsepTransport* GetJsepTransportForMid(
+ absl::string_view mid) const RTC_RUN_ON(network_thread_);
+ cricket::JsepTransport* GetJsepTransportForMid(absl::string_view mid)
+ RTC_RUN_ON(network_thread_);
+
+ // Get the JsepTransport without considering the BUNDLE group. Return nullptr
+ // if the JsepTransport is destroyed.
+ const cricket::JsepTransport* GetJsepTransportByName(
+ const std::string& transport_name) const RTC_RUN_ON(network_thread_);
+ cricket::JsepTransport* GetJsepTransportByName(
+ const std::string& transport_name) RTC_RUN_ON(network_thread_);
+
+ // Creates jsep transport. Noop if transport is already created.
+ // Transport is created either during SetLocalDescription (`local` == true) or
+ // during SetRemoteDescription (`local` == false). Passing `local` helps to
+ // differentiate initiator (caller) from answerer (callee).
+ RTCError MaybeCreateJsepTransport(
+ bool local,
+ const cricket::ContentInfo& content_info,
+ const cricket::SessionDescription& description)
+ RTC_RUN_ON(network_thread_);
+
+ void DestroyAllJsepTransports_n() RTC_RUN_ON(network_thread_);
+
+ void SetIceRole_n(cricket::IceRole ice_role) RTC_RUN_ON(network_thread_);
+
+ cricket::IceRole DetermineIceRole(
+ cricket::JsepTransport* jsep_transport,
+ const cricket::TransportInfo& transport_info,
+ SdpType type,
+ bool local);
+
+ std::unique_ptr<cricket::DtlsTransportInternal> CreateDtlsTransport(
+ const cricket::ContentInfo& content_info,
+ cricket::IceTransportInternal* ice);
+ rtc::scoped_refptr<webrtc::IceTransportInterface> CreateIceTransport(
+ const std::string& transport_name,
+ bool rtcp);
+
+ std::unique_ptr<webrtc::RtpTransport> CreateUnencryptedRtpTransport(
+ const std::string& transport_name,
+ rtc::PacketTransportInternal* rtp_packet_transport,
+ rtc::PacketTransportInternal* rtcp_packet_transport);
+ std::unique_ptr<webrtc::SrtpTransport> CreateSdesTransport(
+ const std::string& transport_name,
+ cricket::DtlsTransportInternal* rtp_dtls_transport,
+ cricket::DtlsTransportInternal* rtcp_dtls_transport);
+ std::unique_ptr<webrtc::DtlsSrtpTransport> CreateDtlsSrtpTransport(
+ const std::string& transport_name,
+ cricket::DtlsTransportInternal* rtp_dtls_transport,
+ cricket::DtlsTransportInternal* rtcp_dtls_transport);
+
+ // Collect all the DtlsTransports, including RTP and RTCP, from the
+ // JsepTransports, including those not mapped to a MID because they are being
+ // kept alive in case of rollback.
+ std::vector<cricket::DtlsTransportInternal*> GetDtlsTransports();
+ // Same as the above, but doesn't include rollback transports.
+ // JsepTransportController can iterate all the DtlsTransports and update the
+ // aggregate states.
+ std::vector<cricket::DtlsTransportInternal*> GetActiveDtlsTransports();
+
+ // Handlers for signals from Transport.
+ void OnTransportWritableState_n(rtc::PacketTransportInternal* transport)
+ RTC_RUN_ON(network_thread_);
+ void OnTransportReceivingState_n(rtc::PacketTransportInternal* transport)
+ RTC_RUN_ON(network_thread_);
+ void OnTransportGatheringState_n(cricket::IceTransportInternal* transport)
+ RTC_RUN_ON(network_thread_);
+ void OnTransportCandidateGathered_n(cricket::IceTransportInternal* transport,
+ const cricket::Candidate& candidate)
+ RTC_RUN_ON(network_thread_);
+ void OnTransportCandidateError_n(cricket::IceTransportInternal* transport,
+ const cricket::IceCandidateErrorEvent& event)
+ RTC_RUN_ON(network_thread_);
+ void OnTransportCandidatesRemoved_n(cricket::IceTransportInternal* transport,
+ const cricket::Candidates& candidates)
+ RTC_RUN_ON(network_thread_);
+ void OnTransportRoleConflict_n(cricket::IceTransportInternal* transport)
+ RTC_RUN_ON(network_thread_);
+ void OnTransportStateChanged_n(cricket::IceTransportInternal* transport)
+ RTC_RUN_ON(network_thread_);
+ void OnTransportCandidatePairChanged_n(
+ const cricket::CandidatePairChangeEvent& event)
+ RTC_RUN_ON(network_thread_);
+ void UpdateAggregateStates_n() RTC_RUN_ON(network_thread_);
+
+ void OnRtcpPacketReceived_n(rtc::CopyOnWriteBuffer* packet,
+ int64_t packet_time_us)
+ RTC_RUN_ON(network_thread_);
+ void OnUnDemuxableRtpPacketReceived_n(const webrtc::RtpPacketReceived& packet)
+ RTC_RUN_ON(network_thread_);
+
+ void OnDtlsHandshakeError(rtc::SSLHandshakeError error);
+
+ bool OnTransportChanged(const std::string& mid,
+ cricket::JsepTransport* transport);
+
+ rtc::Thread* const network_thread_ = nullptr;
+ cricket::PortAllocator* const port_allocator_ = nullptr;
+ AsyncDnsResolverFactoryInterface* const async_dns_resolver_factory_ = nullptr;
+
+ JsepTransportCollection transports_ RTC_GUARDED_BY(network_thread_);
+ // Aggregate states for Transports.
+ // standardized_ice_connection_state_ is intended to replace
+ // ice_connection_state, see bugs.webrtc.org/9308
+ cricket::IceConnectionState ice_connection_state_ =
+ cricket::kIceConnectionConnecting;
+ PeerConnectionInterface::IceConnectionState
+ standardized_ice_connection_state_ =
+ PeerConnectionInterface::kIceConnectionNew;
+ PeerConnectionInterface::PeerConnectionState combined_connection_state_ =
+ PeerConnectionInterface::PeerConnectionState::kNew;
+ cricket::IceGatheringState ice_gathering_state_ = cricket::kIceGatheringNew;
+
+ const Config config_;
+ bool active_reset_srtp_params_ RTC_GUARDED_BY(network_thread_);
+
+ const cricket::SessionDescription* local_desc_ = nullptr;
+ const cricket::SessionDescription* remote_desc_ = nullptr;
+ absl::optional<bool> initial_offerer_;
+
+ cricket::IceConfig ice_config_;
+ cricket::IceRole ice_role_ = cricket::ICEROLE_CONTROLLING;
+ uint64_t ice_tiebreaker_ = rtc::CreateRandomId64();
+ rtc::scoped_refptr<rtc::RTCCertificate> certificate_;
+
+ BundleManager bundles_;
+};
+
+} // namespace webrtc
+
+#endif // PC_JSEP_TRANSPORT_CONTROLLER_H_
diff --git a/third_party/libwebrtc/pc/jsep_transport_controller_unittest.cc b/third_party/libwebrtc/pc/jsep_transport_controller_unittest.cc
new file mode 100644
index 0000000000..faa8842e35
--- /dev/null
+++ b/third_party/libwebrtc/pc/jsep_transport_controller_unittest.cc
@@ -0,0 +1,2746 @@
+/*
+ * Copyright 2018 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/jsep_transport_controller.h"
+
+#include <map>
+#include <string>
+#include <utility>
+
+#include "api/dtls_transport_interface.h"
+#include "api/transport/enums.h"
+#include "p2p/base/candidate_pair_interface.h"
+#include "p2p/base/dtls_transport_factory.h"
+#include "p2p/base/fake_dtls_transport.h"
+#include "p2p/base/fake_ice_transport.h"
+#include "p2p/base/p2p_constants.h"
+#include "p2p/base/transport_info.h"
+#include "rtc_base/fake_ssl_identity.h"
+#include "rtc_base/gunit.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/net_helper.h"
+#include "rtc_base/socket_address.h"
+#include "rtc_base/ssl_fingerprint.h"
+#include "rtc_base/ssl_identity.h"
+#include "rtc_base/task_queue_for_test.h"
+#include "rtc_base/thread.h"
+#include "test/gtest.h"
+#include "test/scoped_key_value_config.h"
+
+using cricket::Candidate;
+using cricket::Candidates;
+using cricket::FakeDtlsTransport;
+using webrtc::SdpType;
+
+static const int kTimeout = 100;
+static const char kIceUfrag1[] = "u0001";
+static const char kIcePwd1[] = "TESTICEPWD00000000000001";
+static const char kIceUfrag2[] = "u0002";
+static const char kIcePwd2[] = "TESTICEPWD00000000000002";
+static const char kIceUfrag3[] = "u0003";
+static const char kIcePwd3[] = "TESTICEPWD00000000000003";
+static const char kIceUfrag4[] = "u0004";
+static const char kIcePwd4[] = "TESTICEPWD00000000000004";
+static const char kAudioMid1[] = "audio1";
+static const char kAudioMid2[] = "audio2";
+static const char kVideoMid1[] = "video1";
+static const char kVideoMid2[] = "video2";
+static const char kDataMid1[] = "data1";
+
+namespace webrtc {
+
+class FakeIceTransportFactory : public webrtc::IceTransportFactory {
+ public:
+ ~FakeIceTransportFactory() override = default;
+ rtc::scoped_refptr<IceTransportInterface> CreateIceTransport(
+ const std::string& transport_name,
+ int component,
+ IceTransportInit init) override {
+ return rtc::make_ref_counted<cricket::FakeIceTransportWrapper>(
+ std::make_unique<cricket::FakeIceTransport>(transport_name, component));
+ }
+};
+
+class FakeDtlsTransportFactory : public cricket::DtlsTransportFactory {
+ public:
+ std::unique_ptr<cricket::DtlsTransportInternal> CreateDtlsTransport(
+ cricket::IceTransportInternal* ice,
+ const webrtc::CryptoOptions& crypto_options,
+ rtc::SSLProtocolVersion max_version) override {
+ return std::make_unique<FakeDtlsTransport>(
+ static_cast<cricket::FakeIceTransport*>(ice));
+ }
+};
+
+class JsepTransportControllerTest : public JsepTransportController::Observer,
+ public ::testing::Test,
+ public sigslot::has_slots<> {
+ public:
+ JsepTransportControllerTest() : signaling_thread_(rtc::Thread::Current()) {
+ fake_ice_transport_factory_ = std::make_unique<FakeIceTransportFactory>();
+ fake_dtls_transport_factory_ = std::make_unique<FakeDtlsTransportFactory>();
+ }
+
+ void CreateJsepTransportController(
+ JsepTransportController::Config config,
+ rtc::Thread* network_thread = rtc::Thread::Current(),
+ cricket::PortAllocator* port_allocator = nullptr) {
+ config.transport_observer = this;
+ config.rtcp_handler = [](const rtc::CopyOnWriteBuffer& packet,
+ int64_t packet_time_us) {
+ RTC_DCHECK_NOTREACHED();
+ };
+ config.ice_transport_factory = fake_ice_transport_factory_.get();
+ config.dtls_transport_factory = fake_dtls_transport_factory_.get();
+ config.on_dtls_handshake_error_ = [](rtc::SSLHandshakeError s) {};
+ config.field_trials = &field_trials_;
+ transport_controller_ = std::make_unique<JsepTransportController>(
+ network_thread, port_allocator, nullptr /* async_resolver_factory */,
+ std::move(config));
+ SendTask(network_thread, [&] { ConnectTransportControllerSignals(); });
+ }
+
+ void ConnectTransportControllerSignals() {
+ transport_controller_->SubscribeIceConnectionState(
+ [this](cricket::IceConnectionState s) {
+ JsepTransportControllerTest::OnConnectionState(s);
+ });
+ transport_controller_->SubscribeConnectionState(
+ [this](PeerConnectionInterface::PeerConnectionState s) {
+ JsepTransportControllerTest::OnCombinedConnectionState(s);
+ });
+ transport_controller_->SubscribeStandardizedIceConnectionState(
+ [this](PeerConnectionInterface::IceConnectionState s) {
+ JsepTransportControllerTest::OnStandardizedIceConnectionState(s);
+ });
+ transport_controller_->SubscribeIceGatheringState(
+ [this](cricket::IceGatheringState s) {
+ JsepTransportControllerTest::OnGatheringState(s);
+ });
+ transport_controller_->SubscribeIceCandidateGathered(
+ [this](const std::string& transport,
+ const std::vector<cricket::Candidate>& candidates) {
+ JsepTransportControllerTest::OnCandidatesGathered(transport,
+ candidates);
+ });
+ }
+
+ std::unique_ptr<cricket::SessionDescription>
+ CreateSessionDescriptionWithoutBundle() {
+ auto description = std::make_unique<cricket::SessionDescription>();
+ AddAudioSection(description.get(), kAudioMid1, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(description.get(), kVideoMid1, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ return description;
+ }
+
+ std::unique_ptr<cricket::SessionDescription>
+ CreateSessionDescriptionWithBundleGroup() {
+ auto description = CreateSessionDescriptionWithoutBundle();
+ cricket::ContentGroup bundle_group(cricket::GROUP_TYPE_BUNDLE);
+ bundle_group.AddContentName(kAudioMid1);
+ bundle_group.AddContentName(kVideoMid1);
+ description->AddGroup(bundle_group);
+
+ return description;
+ }
+
+ std::unique_ptr<cricket::SessionDescription>
+ CreateSessionDescriptionWithBundledData() {
+ auto description = CreateSessionDescriptionWithoutBundle();
+ AddDataSection(description.get(), kDataMid1,
+ cricket::MediaProtocolType::kSctp, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ cricket::ContentGroup bundle_group(cricket::GROUP_TYPE_BUNDLE);
+ bundle_group.AddContentName(kAudioMid1);
+ bundle_group.AddContentName(kVideoMid1);
+ bundle_group.AddContentName(kDataMid1);
+ description->AddGroup(bundle_group);
+ return description;
+ }
+
+ void AddAudioSection(cricket::SessionDescription* description,
+ const std::string& mid,
+ const std::string& ufrag,
+ const std::string& pwd,
+ cricket::IceMode ice_mode,
+ cricket::ConnectionRole conn_role,
+ rtc::scoped_refptr<rtc::RTCCertificate> cert) {
+ std::unique_ptr<cricket::AudioContentDescription> audio(
+ new cricket::AudioContentDescription());
+ // Set RTCP-mux to be true because the default policy is "mux required".
+ audio->set_rtcp_mux(true);
+ description->AddContent(mid, cricket::MediaProtocolType::kRtp,
+ /*rejected=*/false, std::move(audio));
+ AddTransportInfo(description, mid, ufrag, pwd, ice_mode, conn_role, cert);
+ }
+
+ void AddVideoSection(cricket::SessionDescription* description,
+ const std::string& mid,
+ const std::string& ufrag,
+ const std::string& pwd,
+ cricket::IceMode ice_mode,
+ cricket::ConnectionRole conn_role,
+ rtc::scoped_refptr<rtc::RTCCertificate> cert) {
+ std::unique_ptr<cricket::VideoContentDescription> video(
+ new cricket::VideoContentDescription());
+ // Set RTCP-mux to be true because the default policy is "mux required".
+ video->set_rtcp_mux(true);
+ description->AddContent(mid, cricket::MediaProtocolType::kRtp,
+ /*rejected=*/false, std::move(video));
+ AddTransportInfo(description, mid, ufrag, pwd, ice_mode, conn_role, cert);
+ }
+
+ void AddDataSection(cricket::SessionDescription* description,
+ const std::string& mid,
+ cricket::MediaProtocolType protocol_type,
+ const std::string& ufrag,
+ const std::string& pwd,
+ cricket::IceMode ice_mode,
+ cricket::ConnectionRole conn_role,
+ rtc::scoped_refptr<rtc::RTCCertificate> cert) {
+ RTC_CHECK(protocol_type == cricket::MediaProtocolType::kSctp);
+ std::unique_ptr<cricket::SctpDataContentDescription> data(
+ new cricket::SctpDataContentDescription());
+ data->set_rtcp_mux(true);
+ description->AddContent(mid, protocol_type,
+ /*rejected=*/false, std::move(data));
+ AddTransportInfo(description, mid, ufrag, pwd, ice_mode, conn_role, cert);
+ }
+
+ void AddTransportInfo(cricket::SessionDescription* description,
+ const std::string& mid,
+ const std::string& ufrag,
+ const std::string& pwd,
+ cricket::IceMode ice_mode,
+ cricket::ConnectionRole conn_role,
+ rtc::scoped_refptr<rtc::RTCCertificate> cert) {
+ std::unique_ptr<rtc::SSLFingerprint> fingerprint;
+ if (cert) {
+ fingerprint = rtc::SSLFingerprint::CreateFromCertificate(*cert);
+ }
+
+ cricket::TransportDescription transport_desc(std::vector<std::string>(),
+ ufrag, pwd, ice_mode,
+ conn_role, fingerprint.get());
+ description->AddTransportInfo(cricket::TransportInfo(mid, transport_desc));
+ }
+
+ cricket::IceConfig CreateIceConfig(
+ int receiving_timeout,
+ cricket::ContinualGatheringPolicy continual_gathering_policy) {
+ cricket::IceConfig config;
+ config.receiving_timeout = receiving_timeout;
+ config.continual_gathering_policy = continual_gathering_policy;
+ return config;
+ }
+
+ Candidate CreateCandidate(const std::string& transport_name, int component) {
+ Candidate c;
+ c.set_transport_name(transport_name);
+ c.set_address(rtc::SocketAddress("192.168.1.1", 8000));
+ c.set_component(component);
+ c.set_protocol(cricket::UDP_PROTOCOL_NAME);
+ c.set_priority(1);
+ return c;
+ }
+
+ void CreateLocalDescriptionAndCompleteConnectionOnNetworkThread() {
+ if (!network_thread_->IsCurrent()) {
+ SendTask(network_thread_.get(), [&] {
+ CreateLocalDescriptionAndCompleteConnectionOnNetworkThread();
+ });
+ return;
+ }
+
+ auto description = CreateSessionDescriptionWithBundleGroup();
+ EXPECT_TRUE(transport_controller_
+ ->SetLocalDescription(SdpType::kOffer, description.get())
+ .ok());
+
+ transport_controller_->MaybeStartGathering();
+ auto fake_audio_dtls = static_cast<FakeDtlsTransport*>(
+ transport_controller_->GetDtlsTransport(kAudioMid1));
+ auto fake_video_dtls = static_cast<FakeDtlsTransport*>(
+ transport_controller_->GetDtlsTransport(kVideoMid1));
+ fake_audio_dtls->fake_ice_transport()->SignalCandidateGathered(
+ fake_audio_dtls->fake_ice_transport(),
+ CreateCandidate(kAudioMid1, /*component=*/1));
+ fake_video_dtls->fake_ice_transport()->SignalCandidateGathered(
+ fake_video_dtls->fake_ice_transport(),
+ CreateCandidate(kVideoMid1, /*component=*/1));
+ fake_audio_dtls->fake_ice_transport()->SetCandidatesGatheringComplete();
+ fake_video_dtls->fake_ice_transport()->SetCandidatesGatheringComplete();
+ fake_audio_dtls->fake_ice_transport()->SetConnectionCount(2);
+ fake_video_dtls->fake_ice_transport()->SetConnectionCount(2);
+ fake_audio_dtls->SetReceiving(true);
+ fake_video_dtls->SetReceiving(true);
+ fake_audio_dtls->SetWritable(true);
+ fake_video_dtls->SetWritable(true);
+ fake_audio_dtls->fake_ice_transport()->SetConnectionCount(1);
+ fake_video_dtls->fake_ice_transport()->SetConnectionCount(1);
+ }
+
+ protected:
+ void OnConnectionState(cricket::IceConnectionState state) {
+ ice_signaled_on_thread_ = rtc::Thread::Current();
+ connection_state_ = state;
+ ++connection_state_signal_count_;
+ }
+
+ void OnStandardizedIceConnectionState(
+ PeerConnectionInterface::IceConnectionState state) {
+ ice_signaled_on_thread_ = rtc::Thread::Current();
+ ice_connection_state_ = state;
+ ++ice_connection_state_signal_count_;
+ }
+
+ void OnCombinedConnectionState(
+ PeerConnectionInterface::PeerConnectionState state) {
+ RTC_LOG(LS_INFO) << "OnCombinedConnectionState: "
+ << static_cast<int>(state);
+ ice_signaled_on_thread_ = rtc::Thread::Current();
+ combined_connection_state_ = state;
+ ++combined_connection_state_signal_count_;
+ }
+
+ void OnGatheringState(cricket::IceGatheringState state) {
+ ice_signaled_on_thread_ = rtc::Thread::Current();
+ gathering_state_ = state;
+ ++gathering_state_signal_count_;
+ }
+
+ void OnCandidatesGathered(const std::string& transport_name,
+ const Candidates& candidates) {
+ ice_signaled_on_thread_ = rtc::Thread::Current();
+ candidates_[transport_name].insert(candidates_[transport_name].end(),
+ candidates.begin(), candidates.end());
+ ++candidates_signal_count_;
+ }
+
+ // JsepTransportController::Observer overrides.
+ bool OnTransportChanged(
+ const std::string& mid,
+ RtpTransportInternal* rtp_transport,
+ rtc::scoped_refptr<DtlsTransport> dtls_transport,
+ DataChannelTransportInterface* data_channel_transport) override {
+ changed_rtp_transport_by_mid_[mid] = rtp_transport;
+ if (dtls_transport) {
+ changed_dtls_transport_by_mid_[mid] = dtls_transport->internal();
+ } else {
+ changed_dtls_transport_by_mid_[mid] = nullptr;
+ }
+ return true;
+ }
+
+ rtc::AutoThread main_thread_;
+ // Information received from signals from transport controller.
+ cricket::IceConnectionState connection_state_ =
+ cricket::kIceConnectionConnecting;
+ PeerConnectionInterface::IceConnectionState ice_connection_state_ =
+ PeerConnectionInterface::kIceConnectionNew;
+ PeerConnectionInterface::PeerConnectionState combined_connection_state_ =
+ PeerConnectionInterface::PeerConnectionState::kNew;
+ bool receiving_ = false;
+ cricket::IceGatheringState gathering_state_ = cricket::kIceGatheringNew;
+ // transport_name => candidates
+ std::map<std::string, Candidates> candidates_;
+ // Counts of each signal emitted.
+ int connection_state_signal_count_ = 0;
+ int ice_connection_state_signal_count_ = 0;
+ int combined_connection_state_signal_count_ = 0;
+ int receiving_signal_count_ = 0;
+ int gathering_state_signal_count_ = 0;
+ int candidates_signal_count_ = 0;
+
+ // `network_thread_` should be destroyed after `transport_controller_`
+ std::unique_ptr<rtc::Thread> network_thread_;
+ std::unique_ptr<FakeIceTransportFactory> fake_ice_transport_factory_;
+ std::unique_ptr<FakeDtlsTransportFactory> fake_dtls_transport_factory_;
+ rtc::Thread* const signaling_thread_ = nullptr;
+ rtc::Thread* ice_signaled_on_thread_ = nullptr;
+ // Used to verify the SignalRtpTransportChanged/SignalDtlsTransportChanged are
+ // signaled correctly.
+ std::map<std::string, RtpTransportInternal*> changed_rtp_transport_by_mid_;
+ std::map<std::string, cricket::DtlsTransportInternal*>
+ changed_dtls_transport_by_mid_;
+
+ // Transport controller needs to be destroyed first, because it may issue
+ // callbacks that modify the changed_*_by_mid in the destructor.
+ std::unique_ptr<JsepTransportController> transport_controller_;
+ webrtc::test::ScopedKeyValueConfig field_trials_;
+};
+
+TEST_F(JsepTransportControllerTest, GetRtpTransport) {
+ CreateJsepTransportController(JsepTransportController::Config());
+ auto description = CreateSessionDescriptionWithoutBundle();
+ EXPECT_TRUE(transport_controller_
+ ->SetLocalDescription(SdpType::kOffer, description.get())
+ .ok());
+ auto audio_rtp_transport = transport_controller_->GetRtpTransport(kAudioMid1);
+ auto video_rtp_transport = transport_controller_->GetRtpTransport(kVideoMid1);
+ EXPECT_NE(nullptr, audio_rtp_transport);
+ EXPECT_NE(nullptr, video_rtp_transport);
+ EXPECT_NE(audio_rtp_transport, video_rtp_transport);
+ // Return nullptr for non-existing ones.
+ EXPECT_EQ(nullptr, transport_controller_->GetRtpTransport(kAudioMid2));
+}
+
+TEST_F(JsepTransportControllerTest, GetDtlsTransport) {
+ JsepTransportController::Config config;
+ config.rtcp_mux_policy = PeerConnectionInterface::kRtcpMuxPolicyNegotiate;
+ CreateJsepTransportController(std::move(config));
+ auto description = CreateSessionDescriptionWithoutBundle();
+ EXPECT_TRUE(transport_controller_
+ ->SetLocalDescription(SdpType::kOffer, description.get())
+ .ok());
+ EXPECT_NE(nullptr, transport_controller_->GetDtlsTransport(kAudioMid1));
+ EXPECT_NE(nullptr, transport_controller_->GetRtcpDtlsTransport(kAudioMid1));
+ EXPECT_NE(nullptr,
+ transport_controller_->LookupDtlsTransportByMid(kAudioMid1));
+ EXPECT_NE(nullptr, transport_controller_->GetDtlsTransport(kVideoMid1));
+ EXPECT_NE(nullptr, transport_controller_->GetRtcpDtlsTransport(kVideoMid1));
+ EXPECT_NE(nullptr,
+ transport_controller_->LookupDtlsTransportByMid(kVideoMid1));
+ // Lookup for all MIDs should return different transports (no bundle)
+ EXPECT_NE(transport_controller_->LookupDtlsTransportByMid(kAudioMid1),
+ transport_controller_->LookupDtlsTransportByMid(kVideoMid1));
+ // Return nullptr for non-existing ones.
+ EXPECT_EQ(nullptr, transport_controller_->GetDtlsTransport(kVideoMid2));
+ EXPECT_EQ(nullptr, transport_controller_->GetRtcpDtlsTransport(kVideoMid2));
+ EXPECT_EQ(nullptr,
+ transport_controller_->LookupDtlsTransportByMid(kVideoMid2));
+ // Take a pointer to a transport, shut down the transport controller,
+ // and verify that the resulting container is empty.
+ auto dtls_transport =
+ transport_controller_->LookupDtlsTransportByMid(kVideoMid1);
+ webrtc::DtlsTransport* my_transport =
+ static_cast<DtlsTransport*>(dtls_transport.get());
+ EXPECT_NE(nullptr, my_transport->internal());
+ transport_controller_.reset();
+ EXPECT_EQ(nullptr, my_transport->internal());
+}
+
+TEST_F(JsepTransportControllerTest, GetDtlsTransportWithRtcpMux) {
+ JsepTransportController::Config config;
+ config.rtcp_mux_policy = PeerConnectionInterface::kRtcpMuxPolicyRequire;
+ CreateJsepTransportController(std::move(config));
+ auto description = CreateSessionDescriptionWithoutBundle();
+ EXPECT_TRUE(transport_controller_
+ ->SetLocalDescription(SdpType::kOffer, description.get())
+ .ok());
+ EXPECT_NE(nullptr, transport_controller_->GetDtlsTransport(kAudioMid1));
+ EXPECT_EQ(nullptr, transport_controller_->GetRtcpDtlsTransport(kAudioMid1));
+ EXPECT_NE(nullptr, transport_controller_->GetDtlsTransport(kVideoMid1));
+ EXPECT_EQ(nullptr, transport_controller_->GetRtcpDtlsTransport(kVideoMid1));
+}
+
+TEST_F(JsepTransportControllerTest, SetIceConfig) {
+ CreateJsepTransportController(JsepTransportController::Config());
+ auto description = CreateSessionDescriptionWithoutBundle();
+ EXPECT_TRUE(transport_controller_
+ ->SetLocalDescription(SdpType::kOffer, description.get())
+ .ok());
+
+ transport_controller_->SetIceConfig(
+ CreateIceConfig(kTimeout, cricket::GATHER_CONTINUALLY));
+ FakeDtlsTransport* fake_audio_dtls = static_cast<FakeDtlsTransport*>(
+ transport_controller_->GetDtlsTransport(kAudioMid1));
+ ASSERT_NE(nullptr, fake_audio_dtls);
+ EXPECT_EQ(kTimeout,
+ fake_audio_dtls->fake_ice_transport()->receiving_timeout());
+ EXPECT_TRUE(fake_audio_dtls->fake_ice_transport()->gather_continually());
+
+ // Test that value stored in controller is applied to new transports.
+ AddAudioSection(description.get(), kAudioMid2, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+
+ EXPECT_TRUE(transport_controller_
+ ->SetLocalDescription(SdpType::kOffer, description.get())
+ .ok());
+ fake_audio_dtls = static_cast<FakeDtlsTransport*>(
+ transport_controller_->GetDtlsTransport(kAudioMid2));
+ ASSERT_NE(nullptr, fake_audio_dtls);
+ EXPECT_EQ(kTimeout,
+ fake_audio_dtls->fake_ice_transport()->receiving_timeout());
+ EXPECT_TRUE(fake_audio_dtls->fake_ice_transport()->gather_continually());
+}
+
+// Tests the getter and setter of the ICE restart flag.
+TEST_F(JsepTransportControllerTest, NeedIceRestart) {
+ CreateJsepTransportController(JsepTransportController::Config());
+ auto description = CreateSessionDescriptionWithoutBundle();
+ EXPECT_TRUE(transport_controller_
+ ->SetLocalDescription(SdpType::kOffer, description.get())
+ .ok());
+ EXPECT_TRUE(transport_controller_
+ ->SetRemoteDescription(SdpType::kAnswer, description.get())
+ .ok());
+
+ // Initially NeedsIceRestart should return false.
+ EXPECT_FALSE(transport_controller_->NeedsIceRestart(kAudioMid1));
+ EXPECT_FALSE(transport_controller_->NeedsIceRestart(kVideoMid1));
+ // Set the needs-ice-restart flag and verify NeedsIceRestart starts returning
+ // true.
+ transport_controller_->SetNeedsIceRestartFlag();
+ EXPECT_TRUE(transport_controller_->NeedsIceRestart(kAudioMid1));
+ EXPECT_TRUE(transport_controller_->NeedsIceRestart(kVideoMid1));
+ // For a nonexistent transport, false should be returned.
+ EXPECT_FALSE(transport_controller_->NeedsIceRestart(kVideoMid2));
+
+ // Reset the ice_ufrag/ice_pwd for audio.
+ auto audio_transport_info = description->GetTransportInfoByName(kAudioMid1);
+ audio_transport_info->description.ice_ufrag = kIceUfrag2;
+ audio_transport_info->description.ice_pwd = kIcePwd2;
+ EXPECT_TRUE(transport_controller_
+ ->SetLocalDescription(SdpType::kOffer, description.get())
+ .ok());
+ // Because the ICE is only restarted for audio, NeedsIceRestart is expected to
+ // return false for audio and true for video.
+ EXPECT_FALSE(transport_controller_->NeedsIceRestart(kAudioMid1));
+ EXPECT_TRUE(transport_controller_->NeedsIceRestart(kVideoMid1));
+}
+
+TEST_F(JsepTransportControllerTest, MaybeStartGathering) {
+ CreateJsepTransportController(JsepTransportController::Config());
+ auto description = CreateSessionDescriptionWithBundleGroup();
+ EXPECT_TRUE(transport_controller_
+ ->SetLocalDescription(SdpType::kOffer, description.get())
+ .ok());
+ // After setting the local description, we should be able to start gathering
+ // candidates.
+ transport_controller_->MaybeStartGathering();
+ EXPECT_EQ_WAIT(cricket::kIceGatheringGathering, gathering_state_, kTimeout);
+ EXPECT_EQ(1, gathering_state_signal_count_);
+}
+
+TEST_F(JsepTransportControllerTest, AddRemoveRemoteCandidates) {
+ CreateJsepTransportController(JsepTransportController::Config());
+ auto description = CreateSessionDescriptionWithoutBundle();
+ transport_controller_->SetLocalDescription(SdpType::kOffer,
+ description.get());
+ transport_controller_->SetRemoteDescription(SdpType::kAnswer,
+ description.get());
+ auto fake_audio_dtls = static_cast<FakeDtlsTransport*>(
+ transport_controller_->GetDtlsTransport(kAudioMid1));
+ ASSERT_NE(nullptr, fake_audio_dtls);
+ Candidates candidates;
+ candidates.push_back(
+ CreateCandidate(kAudioMid1, cricket::ICE_CANDIDATE_COMPONENT_RTP));
+ EXPECT_TRUE(
+ transport_controller_->AddRemoteCandidates(kAudioMid1, candidates).ok());
+ EXPECT_EQ(1U,
+ fake_audio_dtls->fake_ice_transport()->remote_candidates().size());
+
+ EXPECT_TRUE(transport_controller_->RemoveRemoteCandidates(candidates).ok());
+ EXPECT_EQ(0U,
+ fake_audio_dtls->fake_ice_transport()->remote_candidates().size());
+}
+
+TEST_F(JsepTransportControllerTest, SetAndGetLocalCertificate) {
+ CreateJsepTransportController(JsepTransportController::Config());
+
+ rtc::scoped_refptr<rtc::RTCCertificate> certificate1 =
+ rtc::RTCCertificate::Create(
+ rtc::SSLIdentity::Create("session1", rtc::KT_DEFAULT));
+ rtc::scoped_refptr<rtc::RTCCertificate> returned_certificate;
+
+ auto description = std::make_unique<cricket::SessionDescription>();
+ AddAudioSection(description.get(), kAudioMid1, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ certificate1);
+
+ // Apply the local certificate.
+ EXPECT_TRUE(transport_controller_->SetLocalCertificate(certificate1));
+ // Apply the local description.
+ EXPECT_TRUE(transport_controller_
+ ->SetLocalDescription(SdpType::kOffer, description.get())
+ .ok());
+ returned_certificate = transport_controller_->GetLocalCertificate(kAudioMid1);
+ EXPECT_TRUE(returned_certificate);
+ EXPECT_EQ(certificate1->identity()->certificate().ToPEMString(),
+ returned_certificate->identity()->certificate().ToPEMString());
+
+ // Should fail if called for a nonexistant transport.
+ EXPECT_EQ(nullptr, transport_controller_->GetLocalCertificate(kVideoMid1));
+
+ // Shouldn't be able to change the identity once set.
+ rtc::scoped_refptr<rtc::RTCCertificate> certificate2 =
+ rtc::RTCCertificate::Create(
+ rtc::SSLIdentity::Create("session2", rtc::KT_DEFAULT));
+ EXPECT_FALSE(transport_controller_->SetLocalCertificate(certificate2));
+}
+
+TEST_F(JsepTransportControllerTest, GetRemoteSSLCertChain) {
+ CreateJsepTransportController(JsepTransportController::Config());
+ auto description = CreateSessionDescriptionWithBundleGroup();
+ EXPECT_TRUE(transport_controller_
+ ->SetLocalDescription(SdpType::kOffer, description.get())
+ .ok());
+ rtc::FakeSSLCertificate fake_certificate("fake_data");
+
+ auto fake_audio_dtls = static_cast<FakeDtlsTransport*>(
+ transport_controller_->GetDtlsTransport(kAudioMid1));
+ fake_audio_dtls->SetRemoteSSLCertificate(&fake_certificate);
+ std::unique_ptr<rtc::SSLCertChain> returned_cert_chain =
+ transport_controller_->GetRemoteSSLCertChain(kAudioMid1);
+ ASSERT_TRUE(returned_cert_chain);
+ ASSERT_EQ(1u, returned_cert_chain->GetSize());
+ EXPECT_EQ(fake_certificate.ToPEMString(),
+ returned_cert_chain->Get(0).ToPEMString());
+
+ // Should fail if called for a nonexistant transport.
+ EXPECT_FALSE(transport_controller_->GetRemoteSSLCertChain(kAudioMid2));
+}
+
+TEST_F(JsepTransportControllerTest, GetDtlsRole) {
+ CreateJsepTransportController(JsepTransportController::Config());
+ auto offer_certificate = rtc::RTCCertificate::Create(
+ rtc::SSLIdentity::Create("offer", rtc::KT_DEFAULT));
+ auto answer_certificate = rtc::RTCCertificate::Create(
+ rtc::SSLIdentity::Create("answer", rtc::KT_DEFAULT));
+ transport_controller_->SetLocalCertificate(offer_certificate);
+
+ auto offer_desc = std::make_unique<cricket::SessionDescription>();
+ AddAudioSection(offer_desc.get(), kAudioMid1, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ offer_certificate);
+ auto answer_desc = std::make_unique<cricket::SessionDescription>();
+ AddAudioSection(answer_desc.get(), kAudioMid1, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_PASSIVE,
+ answer_certificate);
+
+ EXPECT_TRUE(transport_controller_
+ ->SetLocalDescription(SdpType::kOffer, offer_desc.get())
+ .ok());
+
+ absl::optional<rtc::SSLRole> role =
+ transport_controller_->GetDtlsRole(kAudioMid1);
+ // The DTLS role is not decided yet.
+ EXPECT_FALSE(role);
+ EXPECT_TRUE(transport_controller_
+ ->SetRemoteDescription(SdpType::kAnswer, answer_desc.get())
+ .ok());
+ role = transport_controller_->GetDtlsRole(kAudioMid1);
+
+ ASSERT_TRUE(role);
+ EXPECT_EQ(rtc::SSL_CLIENT, *role);
+}
+
+TEST_F(JsepTransportControllerTest, GetStats) {
+ CreateJsepTransportController(JsepTransportController::Config());
+ auto description = CreateSessionDescriptionWithBundleGroup();
+ EXPECT_TRUE(transport_controller_
+ ->SetLocalDescription(SdpType::kOffer, description.get())
+ .ok());
+
+ cricket::TransportStats stats;
+ EXPECT_TRUE(transport_controller_->GetStats(kAudioMid1, &stats));
+ EXPECT_EQ(kAudioMid1, stats.transport_name);
+ EXPECT_EQ(1u, stats.channel_stats.size());
+ // Return false for non-existing transport.
+ EXPECT_FALSE(transport_controller_->GetStats(kAudioMid2, &stats));
+}
+
+TEST_F(JsepTransportControllerTest, SignalConnectionStateFailed) {
+ CreateJsepTransportController(JsepTransportController::Config());
+ auto description = CreateSessionDescriptionWithoutBundle();
+ EXPECT_TRUE(transport_controller_
+ ->SetLocalDescription(SdpType::kOffer, description.get())
+ .ok());
+
+ auto fake_ice = static_cast<cricket::FakeIceTransport*>(
+ transport_controller_->GetDtlsTransport(kAudioMid1)->ice_transport());
+ fake_ice->SetCandidatesGatheringComplete();
+ fake_ice->SetConnectionCount(1);
+ // The connection stats will be failed if there is no active connection.
+ fake_ice->SetConnectionCount(0);
+ EXPECT_EQ_WAIT(cricket::kIceConnectionFailed, connection_state_, kTimeout);
+ EXPECT_EQ(1, connection_state_signal_count_);
+ EXPECT_EQ_WAIT(PeerConnectionInterface::kIceConnectionFailed,
+ ice_connection_state_, kTimeout);
+ EXPECT_EQ(1, ice_connection_state_signal_count_);
+ EXPECT_EQ_WAIT(PeerConnectionInterface::PeerConnectionState::kFailed,
+ combined_connection_state_, kTimeout);
+ EXPECT_EQ(1, combined_connection_state_signal_count_);
+}
+
+TEST_F(JsepTransportControllerTest,
+ SignalConnectionStateConnectedNoMediaTransport) {
+ CreateJsepTransportController(JsepTransportController::Config());
+ auto description = CreateSessionDescriptionWithoutBundle();
+ EXPECT_TRUE(transport_controller_
+ ->SetLocalDescription(SdpType::kOffer, description.get())
+ .ok());
+
+ auto fake_audio_dtls = static_cast<FakeDtlsTransport*>(
+ transport_controller_->GetDtlsTransport(kAudioMid1));
+ auto fake_video_dtls = static_cast<FakeDtlsTransport*>(
+ transport_controller_->GetDtlsTransport(kVideoMid1));
+
+ // First, have one transport connect, and another fail, to ensure that
+ // the first transport connecting didn't trigger a "connected" state signal.
+ // We should only get a signal when all are connected.
+ fake_audio_dtls->fake_ice_transport()->SetConnectionCount(1);
+ fake_audio_dtls->SetWritable(true);
+ fake_audio_dtls->fake_ice_transport()->SetCandidatesGatheringComplete();
+ // Decrease the number of the connection to trigger the signal.
+ fake_video_dtls->fake_ice_transport()->SetConnectionCount(1);
+ fake_video_dtls->fake_ice_transport()->SetConnectionCount(0);
+ fake_video_dtls->fake_ice_transport()->SetCandidatesGatheringComplete();
+
+ EXPECT_EQ_WAIT(cricket::kIceConnectionFailed, connection_state_, kTimeout);
+ EXPECT_EQ(1, connection_state_signal_count_);
+ EXPECT_EQ_WAIT(PeerConnectionInterface::kIceConnectionFailed,
+ ice_connection_state_, kTimeout);
+ EXPECT_EQ(2, ice_connection_state_signal_count_);
+ EXPECT_EQ_WAIT(PeerConnectionInterface::PeerConnectionState::kFailed,
+ combined_connection_state_, kTimeout);
+ EXPECT_EQ(2, combined_connection_state_signal_count_);
+
+ fake_audio_dtls->SetDtlsState(DtlsTransportState::kConnected);
+ fake_video_dtls->SetDtlsState(DtlsTransportState::kConnected);
+ // Set the connection count to be 2 and the cricket::FakeIceTransport will set
+ // the transport state to be STATE_CONNECTING.
+ fake_video_dtls->fake_ice_transport()->SetConnectionCount(2);
+ fake_video_dtls->SetWritable(true);
+ EXPECT_EQ_WAIT(cricket::kIceConnectionConnected, connection_state_, kTimeout);
+ EXPECT_EQ(2, connection_state_signal_count_);
+ EXPECT_EQ_WAIT(PeerConnectionInterface::kIceConnectionConnected,
+ ice_connection_state_, kTimeout);
+ EXPECT_EQ(3, ice_connection_state_signal_count_);
+ EXPECT_EQ_WAIT(PeerConnectionInterface::PeerConnectionState::kConnected,
+ combined_connection_state_, kTimeout);
+ EXPECT_EQ(3, combined_connection_state_signal_count_);
+}
+
+TEST_F(JsepTransportControllerTest, SignalConnectionStateComplete) {
+ CreateJsepTransportController(JsepTransportController::Config());
+ auto description = CreateSessionDescriptionWithoutBundle();
+ EXPECT_TRUE(transport_controller_
+ ->SetLocalDescription(SdpType::kOffer, description.get())
+ .ok());
+
+ auto fake_audio_dtls = static_cast<FakeDtlsTransport*>(
+ transport_controller_->GetDtlsTransport(kAudioMid1));
+ auto fake_video_dtls = static_cast<FakeDtlsTransport*>(
+ transport_controller_->GetDtlsTransport(kVideoMid1));
+
+ // First, have one transport connect, and another fail, to ensure that
+ // the first transport connecting didn't trigger a "connected" state signal.
+ // We should only get a signal when all are connected.
+ fake_audio_dtls->fake_ice_transport()->SetTransportState(
+ IceTransportState::kCompleted,
+ cricket::IceTransportState::STATE_COMPLETED);
+ fake_audio_dtls->SetWritable(true);
+ fake_audio_dtls->fake_ice_transport()->SetCandidatesGatheringComplete();
+
+ EXPECT_EQ_WAIT(PeerConnectionInterface::kIceConnectionChecking,
+ ice_connection_state_, kTimeout);
+ EXPECT_EQ(1, ice_connection_state_signal_count_);
+ EXPECT_EQ_WAIT(PeerConnectionInterface::PeerConnectionState::kConnecting,
+ combined_connection_state_, kTimeout);
+ EXPECT_EQ(1, combined_connection_state_signal_count_);
+
+ fake_video_dtls->fake_ice_transport()->SetTransportState(
+ IceTransportState::kFailed, cricket::IceTransportState::STATE_FAILED);
+ fake_video_dtls->fake_ice_transport()->SetCandidatesGatheringComplete();
+
+ EXPECT_EQ_WAIT(cricket::kIceConnectionFailed, connection_state_, kTimeout);
+ EXPECT_EQ(1, connection_state_signal_count_);
+ EXPECT_EQ_WAIT(PeerConnectionInterface::kIceConnectionFailed,
+ ice_connection_state_, kTimeout);
+ EXPECT_EQ(2, ice_connection_state_signal_count_);
+ EXPECT_EQ_WAIT(PeerConnectionInterface::PeerConnectionState::kFailed,
+ combined_connection_state_, kTimeout);
+ EXPECT_EQ(2, combined_connection_state_signal_count_);
+
+ fake_audio_dtls->SetDtlsState(DtlsTransportState::kConnected);
+ fake_video_dtls->SetDtlsState(DtlsTransportState::kConnected);
+ // Set the connection count to be 1 and the cricket::FakeIceTransport will set
+ // the transport state to be STATE_COMPLETED.
+ fake_video_dtls->fake_ice_transport()->SetTransportState(
+ IceTransportState::kCompleted,
+ cricket::IceTransportState::STATE_COMPLETED);
+ fake_video_dtls->SetWritable(true);
+ EXPECT_EQ_WAIT(cricket::kIceConnectionCompleted, connection_state_, kTimeout);
+ EXPECT_EQ(3, connection_state_signal_count_);
+ EXPECT_EQ_WAIT(PeerConnectionInterface::kIceConnectionCompleted,
+ ice_connection_state_, kTimeout);
+ EXPECT_EQ(3, ice_connection_state_signal_count_);
+ EXPECT_EQ_WAIT(PeerConnectionInterface::PeerConnectionState::kConnected,
+ combined_connection_state_, kTimeout);
+ EXPECT_EQ(3, combined_connection_state_signal_count_);
+}
+
+TEST_F(JsepTransportControllerTest, SignalIceGatheringStateGathering) {
+ CreateJsepTransportController(JsepTransportController::Config());
+ auto description = CreateSessionDescriptionWithoutBundle();
+ EXPECT_TRUE(transport_controller_
+ ->SetLocalDescription(SdpType::kOffer, description.get())
+ .ok());
+
+ auto fake_audio_dtls = static_cast<FakeDtlsTransport*>(
+ transport_controller_->GetDtlsTransport(kAudioMid1));
+ fake_audio_dtls->fake_ice_transport()->MaybeStartGathering();
+ // Should be in the gathering state as soon as any transport starts gathering.
+ EXPECT_EQ_WAIT(cricket::kIceGatheringGathering, gathering_state_, kTimeout);
+ EXPECT_EQ(1, gathering_state_signal_count_);
+}
+
+TEST_F(JsepTransportControllerTest, SignalIceGatheringStateComplete) {
+ CreateJsepTransportController(JsepTransportController::Config());
+ auto description = CreateSessionDescriptionWithoutBundle();
+ EXPECT_TRUE(transport_controller_
+ ->SetLocalDescription(SdpType::kOffer, description.get())
+ .ok());
+
+ auto fake_audio_dtls = static_cast<FakeDtlsTransport*>(
+ transport_controller_->GetDtlsTransport(kAudioMid1));
+ auto fake_video_dtls = static_cast<FakeDtlsTransport*>(
+ transport_controller_->GetDtlsTransport(kVideoMid1));
+
+ fake_audio_dtls->fake_ice_transport()->MaybeStartGathering();
+ EXPECT_EQ_WAIT(cricket::kIceGatheringGathering, gathering_state_, kTimeout);
+ EXPECT_EQ(1, gathering_state_signal_count_);
+
+ // Have one transport finish gathering, to make sure gathering
+ // completion wasn't signalled if only one transport finished gathering.
+ fake_audio_dtls->fake_ice_transport()->SetCandidatesGatheringComplete();
+ EXPECT_EQ(1, gathering_state_signal_count_);
+
+ fake_video_dtls->fake_ice_transport()->MaybeStartGathering();
+ EXPECT_EQ_WAIT(cricket::kIceGatheringGathering, gathering_state_, kTimeout);
+ EXPECT_EQ(1, gathering_state_signal_count_);
+
+ fake_video_dtls->fake_ice_transport()->SetCandidatesGatheringComplete();
+ EXPECT_EQ_WAIT(cricket::kIceGatheringComplete, gathering_state_, kTimeout);
+ EXPECT_EQ(2, gathering_state_signal_count_);
+}
+
+// Test that when the last transport that hasn't finished connecting and/or
+// gathering is destroyed, the aggregate state jumps to "completed". This can
+// happen if, for example, we have an audio and video transport, the audio
+// transport completes, then we start bundling video on the audio transport.
+TEST_F(JsepTransportControllerTest,
+ SignalingWhenLastIncompleteTransportDestroyed) {
+ CreateJsepTransportController(JsepTransportController::Config());
+ auto description = CreateSessionDescriptionWithBundleGroup();
+ EXPECT_TRUE(transport_controller_
+ ->SetLocalDescription(SdpType::kOffer, description.get())
+ .ok());
+
+ auto fake_audio_dtls = static_cast<FakeDtlsTransport*>(
+ transport_controller_->GetDtlsTransport(kAudioMid1));
+ auto fake_video_dtls = static_cast<FakeDtlsTransport*>(
+ transport_controller_->GetDtlsTransport(kVideoMid1));
+ EXPECT_NE(fake_audio_dtls, fake_video_dtls);
+
+ fake_audio_dtls->fake_ice_transport()->MaybeStartGathering();
+ EXPECT_EQ_WAIT(cricket::kIceGatheringGathering, gathering_state_, kTimeout);
+ EXPECT_EQ(1, gathering_state_signal_count_);
+
+ // Let the audio transport complete.
+ fake_audio_dtls->SetWritable(true);
+ fake_audio_dtls->fake_ice_transport()->SetCandidatesGatheringComplete();
+ fake_audio_dtls->fake_ice_transport()->SetConnectionCount(1);
+ fake_audio_dtls->SetDtlsState(DtlsTransportState::kConnected);
+ EXPECT_EQ(1, gathering_state_signal_count_);
+
+ // Set the remote description and enable the bundle.
+ EXPECT_TRUE(transport_controller_
+ ->SetRemoteDescription(SdpType::kAnswer, description.get())
+ .ok());
+ // The BUNDLE should be enabled, the incomplete video transport should be
+ // deleted and the states should be updated.
+ fake_video_dtls = static_cast<FakeDtlsTransport*>(
+ transport_controller_->GetDtlsTransport(kVideoMid1));
+ EXPECT_EQ(fake_audio_dtls, fake_video_dtls);
+ EXPECT_EQ_WAIT(cricket::kIceConnectionCompleted, connection_state_, kTimeout);
+ EXPECT_EQ(PeerConnectionInterface::kIceConnectionCompleted,
+ ice_connection_state_);
+ EXPECT_EQ(PeerConnectionInterface::PeerConnectionState::kConnected,
+ combined_connection_state_);
+ EXPECT_EQ_WAIT(cricket::kIceGatheringComplete, gathering_state_, kTimeout);
+ EXPECT_EQ(2, gathering_state_signal_count_);
+}
+
+// Test that states immediately return to "new" if all transports are
+// discarded. This should happen at offer time, even though the transport
+// controller may keep the transport alive in case of rollback.
+TEST_F(JsepTransportControllerTest,
+ IceStatesReturnToNewWhenTransportsDiscarded) {
+ CreateJsepTransportController(JsepTransportController::Config());
+ auto description = std::make_unique<cricket::SessionDescription>();
+ AddAudioSection(description.get(), kAudioMid1, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ EXPECT_TRUE(transport_controller_
+ ->SetLocalDescription(SdpType::kOffer, description.get())
+ .ok());
+ EXPECT_TRUE(transport_controller_
+ ->SetRemoteDescription(SdpType::kAnswer, description.get())
+ .ok());
+
+ // Trigger and verify initial non-new states.
+ auto fake_audio_dtls = static_cast<FakeDtlsTransport*>(
+ transport_controller_->GetDtlsTransport(kAudioMid1));
+ fake_audio_dtls->fake_ice_transport()->MaybeStartGathering();
+ fake_audio_dtls->fake_ice_transport()->SetTransportState(
+ webrtc::IceTransportState::kChecking,
+ cricket::IceTransportState::STATE_CONNECTING);
+ EXPECT_EQ_WAIT(PeerConnectionInterface::kIceConnectionChecking,
+ ice_connection_state_, kTimeout);
+ EXPECT_EQ(1, ice_connection_state_signal_count_);
+ EXPECT_EQ_WAIT(PeerConnectionInterface::PeerConnectionState::kConnecting,
+ combined_connection_state_, kTimeout);
+ EXPECT_EQ(1, combined_connection_state_signal_count_);
+ EXPECT_EQ_WAIT(cricket::kIceGatheringGathering, gathering_state_, kTimeout);
+ EXPECT_EQ(1, gathering_state_signal_count_);
+
+ // Reject m= section which should disconnect the transport and return states
+ // to "new".
+ description->contents()[0].rejected = true;
+ EXPECT_TRUE(transport_controller_
+ ->SetRemoteDescription(SdpType::kOffer, description.get())
+ .ok());
+ EXPECT_EQ_WAIT(PeerConnectionInterface::kIceConnectionNew,
+ ice_connection_state_, kTimeout);
+ EXPECT_EQ(2, ice_connection_state_signal_count_);
+ EXPECT_EQ_WAIT(PeerConnectionInterface::PeerConnectionState::kNew,
+ combined_connection_state_, kTimeout);
+ EXPECT_EQ(2, combined_connection_state_signal_count_);
+ EXPECT_EQ_WAIT(cricket::kIceGatheringNew, gathering_state_, kTimeout);
+ EXPECT_EQ(2, gathering_state_signal_count_);
+
+ // For good measure, rollback the offer and verify that states return to
+ // their previous values.
+ EXPECT_TRUE(transport_controller_->RollbackTransports().ok());
+ EXPECT_EQ_WAIT(PeerConnectionInterface::kIceConnectionChecking,
+ ice_connection_state_, kTimeout);
+ EXPECT_EQ(3, ice_connection_state_signal_count_);
+ EXPECT_EQ_WAIT(PeerConnectionInterface::PeerConnectionState::kConnecting,
+ combined_connection_state_, kTimeout);
+ EXPECT_EQ(3, combined_connection_state_signal_count_);
+ EXPECT_EQ_WAIT(cricket::kIceGatheringGathering, gathering_state_, kTimeout);
+ EXPECT_EQ(3, gathering_state_signal_count_);
+}
+
+TEST_F(JsepTransportControllerTest, SignalCandidatesGathered) {
+ CreateJsepTransportController(JsepTransportController::Config());
+ auto description = CreateSessionDescriptionWithBundleGroup();
+ EXPECT_TRUE(transport_controller_
+ ->SetLocalDescription(SdpType::kOffer, description.get())
+ .ok());
+ transport_controller_->MaybeStartGathering();
+
+ auto fake_audio_dtls = static_cast<FakeDtlsTransport*>(
+ transport_controller_->GetDtlsTransport(kAudioMid1));
+ fake_audio_dtls->fake_ice_transport()->SignalCandidateGathered(
+ fake_audio_dtls->fake_ice_transport(), CreateCandidate(kAudioMid1, 1));
+ EXPECT_EQ_WAIT(1, candidates_signal_count_, kTimeout);
+ EXPECT_EQ(1u, candidates_[kAudioMid1].size());
+}
+
+TEST_F(JsepTransportControllerTest, IceSignalingOccursOnNetworkThread) {
+ network_thread_ = rtc::Thread::CreateWithSocketServer();
+ network_thread_->Start();
+ EXPECT_EQ(ice_signaled_on_thread_, nullptr);
+ CreateJsepTransportController(JsepTransportController::Config(),
+ network_thread_.get(),
+ /*port_allocator=*/nullptr);
+ CreateLocalDescriptionAndCompleteConnectionOnNetworkThread();
+
+ // connecting --> connected --> completed
+ EXPECT_EQ_WAIT(cricket::kIceConnectionCompleted, connection_state_, kTimeout);
+ EXPECT_EQ(2, connection_state_signal_count_);
+
+ // new --> gathering --> complete
+ EXPECT_EQ_WAIT(cricket::kIceGatheringComplete, gathering_state_, kTimeout);
+ EXPECT_EQ(2, gathering_state_signal_count_);
+
+ EXPECT_EQ_WAIT(1u, candidates_[kAudioMid1].size(), kTimeout);
+ EXPECT_EQ_WAIT(1u, candidates_[kVideoMid1].size(), kTimeout);
+ EXPECT_EQ(2, candidates_signal_count_);
+
+ EXPECT_EQ(ice_signaled_on_thread_, network_thread_.get());
+
+ SendTask(network_thread_.get(), [&] { transport_controller_.reset(); });
+}
+
+// Test that if the TransportController was created with the
+// `redetermine_role_on_ice_restart` parameter set to false, the role is *not*
+// redetermined on an ICE restart.
+TEST_F(JsepTransportControllerTest, IceRoleNotRedetermined) {
+ JsepTransportController::Config config;
+ config.redetermine_role_on_ice_restart = false;
+
+ CreateJsepTransportController(std::move(config));
+ // Let the `transport_controller_` be the controlled side initially.
+ auto remote_offer = std::make_unique<cricket::SessionDescription>();
+ AddAudioSection(remote_offer.get(), kAudioMid1, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ auto local_answer = std::make_unique<cricket::SessionDescription>();
+ AddAudioSection(local_answer.get(), kAudioMid1, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_PASSIVE,
+ nullptr);
+
+ EXPECT_TRUE(transport_controller_
+ ->SetRemoteDescription(SdpType::kOffer, remote_offer.get())
+ .ok());
+ EXPECT_TRUE(transport_controller_
+ ->SetLocalDescription(SdpType::kAnswer, local_answer.get())
+ .ok());
+
+ auto fake_dtls = static_cast<FakeDtlsTransport*>(
+ transport_controller_->GetDtlsTransport(kAudioMid1));
+ EXPECT_EQ(cricket::ICEROLE_CONTROLLED,
+ fake_dtls->fake_ice_transport()->GetIceRole());
+
+ // New offer will trigger the ICE restart.
+ auto restart_local_offer = std::make_unique<cricket::SessionDescription>();
+ AddAudioSection(restart_local_offer.get(), kAudioMid1, kIceUfrag3, kIcePwd3,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ EXPECT_TRUE(
+ transport_controller_
+ ->SetLocalDescription(SdpType::kOffer, restart_local_offer.get())
+ .ok());
+ EXPECT_EQ(cricket::ICEROLE_CONTROLLED,
+ fake_dtls->fake_ice_transport()->GetIceRole());
+}
+
+// Tests ICE-Lite mode in remote answer.
+TEST_F(JsepTransportControllerTest, SetIceRoleWhenIceLiteInRemoteAnswer) {
+ CreateJsepTransportController(JsepTransportController::Config());
+ auto local_offer = std::make_unique<cricket::SessionDescription>();
+ AddAudioSection(local_offer.get(), kAudioMid1, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ EXPECT_TRUE(transport_controller_
+ ->SetLocalDescription(SdpType::kOffer, local_offer.get())
+ .ok());
+ auto fake_dtls = static_cast<FakeDtlsTransport*>(
+ transport_controller_->GetDtlsTransport(kAudioMid1));
+ EXPECT_EQ(cricket::ICEROLE_CONTROLLING,
+ fake_dtls->fake_ice_transport()->GetIceRole());
+ EXPECT_EQ(cricket::ICEMODE_FULL,
+ fake_dtls->fake_ice_transport()->remote_ice_mode());
+
+ auto remote_answer = std::make_unique<cricket::SessionDescription>();
+ AddAudioSection(remote_answer.get(), kAudioMid1, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_LITE, cricket::CONNECTIONROLE_PASSIVE,
+ nullptr);
+ EXPECT_TRUE(transport_controller_
+ ->SetRemoteDescription(SdpType::kAnswer, remote_answer.get())
+ .ok());
+ EXPECT_EQ(cricket::ICEROLE_CONTROLLING,
+ fake_dtls->fake_ice_transport()->GetIceRole());
+ EXPECT_EQ(cricket::ICEMODE_LITE,
+ fake_dtls->fake_ice_transport()->remote_ice_mode());
+}
+
+// Tests that the ICE role remains "controlling" if a subsequent offer that
+// does an ICE restart is received from an ICE lite endpoint. Regression test
+// for: https://crbug.com/710760
+TEST_F(JsepTransportControllerTest,
+ IceRoleIsControllingAfterIceRestartFromIceLiteEndpoint) {
+ CreateJsepTransportController(JsepTransportController::Config());
+ auto remote_offer = std::make_unique<cricket::SessionDescription>();
+ AddAudioSection(remote_offer.get(), kAudioMid1, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_LITE, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ auto local_answer = std::make_unique<cricket::SessionDescription>();
+ AddAudioSection(local_answer.get(), kAudioMid1, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_PASSIVE,
+ nullptr);
+ // Initial Offer/Answer exchange. If the remote offerer is ICE-Lite, then the
+ // local side is the controlling.
+ EXPECT_TRUE(transport_controller_
+ ->SetRemoteDescription(SdpType::kOffer, remote_offer.get())
+ .ok());
+ EXPECT_TRUE(transport_controller_
+ ->SetLocalDescription(SdpType::kAnswer, local_answer.get())
+ .ok());
+ auto fake_dtls = static_cast<FakeDtlsTransport*>(
+ transport_controller_->GetDtlsTransport(kAudioMid1));
+ EXPECT_EQ(cricket::ICEROLE_CONTROLLING,
+ fake_dtls->fake_ice_transport()->GetIceRole());
+
+ // In the subsequence remote offer triggers an ICE restart.
+ auto remote_offer2 = std::make_unique<cricket::SessionDescription>();
+ AddAudioSection(remote_offer2.get(), kAudioMid1, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_LITE, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ auto local_answer2 = std::make_unique<cricket::SessionDescription>();
+ AddAudioSection(local_answer2.get(), kAudioMid1, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_PASSIVE,
+ nullptr);
+ EXPECT_TRUE(transport_controller_
+ ->SetRemoteDescription(SdpType::kOffer, remote_offer2.get())
+ .ok());
+ EXPECT_TRUE(transport_controller_
+ ->SetLocalDescription(SdpType::kAnswer, local_answer2.get())
+ .ok());
+ fake_dtls = static_cast<FakeDtlsTransport*>(
+ transport_controller_->GetDtlsTransport(kAudioMid1));
+ // The local side is still the controlling role since the remote side is using
+ // ICE-Lite.
+ EXPECT_EQ(cricket::ICEROLE_CONTROLLING,
+ fake_dtls->fake_ice_transport()->GetIceRole());
+}
+
+// Tests that the SDP has more than one audio/video m= sections.
+TEST_F(JsepTransportControllerTest, MultipleMediaSectionsOfSameTypeWithBundle) {
+ CreateJsepTransportController(JsepTransportController::Config());
+ cricket::ContentGroup bundle_group(cricket::GROUP_TYPE_BUNDLE);
+ bundle_group.AddContentName(kAudioMid1);
+ bundle_group.AddContentName(kAudioMid2);
+ bundle_group.AddContentName(kVideoMid1);
+ bundle_group.AddContentName(kDataMid1);
+
+ auto local_offer = std::make_unique<cricket::SessionDescription>();
+ AddAudioSection(local_offer.get(), kAudioMid1, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddAudioSection(local_offer.get(), kAudioMid2, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(local_offer.get(), kVideoMid1, kIceUfrag3, kIcePwd3,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddDataSection(local_offer.get(), kDataMid1,
+ cricket::MediaProtocolType::kSctp, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+
+ auto remote_answer = std::make_unique<cricket::SessionDescription>();
+ AddAudioSection(remote_answer.get(), kAudioMid1, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_PASSIVE,
+ nullptr);
+ AddAudioSection(remote_answer.get(), kAudioMid2, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_PASSIVE,
+ nullptr);
+ AddVideoSection(remote_answer.get(), kVideoMid1, kIceUfrag3, kIcePwd3,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_PASSIVE,
+ nullptr);
+ AddDataSection(remote_answer.get(), kDataMid1,
+ cricket::MediaProtocolType::kSctp, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_PASSIVE,
+ nullptr);
+
+ local_offer->AddGroup(bundle_group);
+ remote_answer->AddGroup(bundle_group);
+
+ EXPECT_TRUE(transport_controller_
+ ->SetLocalDescription(SdpType::kOffer, local_offer.get())
+ .ok());
+ EXPECT_TRUE(transport_controller_
+ ->SetRemoteDescription(SdpType::kAnswer, remote_answer.get())
+ .ok());
+ // Verify that all the sections are bundled on kAudio1.
+ auto transport1 = transport_controller_->GetRtpTransport(kAudioMid1);
+ auto transport2 = transport_controller_->GetRtpTransport(kAudioMid2);
+ auto transport3 = transport_controller_->GetRtpTransport(kVideoMid1);
+ auto transport4 = transport_controller_->GetRtpTransport(kDataMid1);
+ EXPECT_EQ(transport1, transport2);
+ EXPECT_EQ(transport1, transport3);
+ EXPECT_EQ(transport1, transport4);
+
+ EXPECT_EQ(transport_controller_->LookupDtlsTransportByMid(kAudioMid1),
+ transport_controller_->LookupDtlsTransportByMid(kVideoMid1));
+
+ // Verify the OnRtpTransport/DtlsTransportChanged signals are fired correctly.
+ auto it = changed_rtp_transport_by_mid_.find(kAudioMid2);
+ ASSERT_TRUE(it != changed_rtp_transport_by_mid_.end());
+ EXPECT_EQ(transport1, it->second);
+ it = changed_rtp_transport_by_mid_.find(kAudioMid2);
+ ASSERT_TRUE(it != changed_rtp_transport_by_mid_.end());
+ EXPECT_EQ(transport1, it->second);
+ it = changed_rtp_transport_by_mid_.find(kVideoMid1);
+ ASSERT_TRUE(it != changed_rtp_transport_by_mid_.end());
+ EXPECT_EQ(transport1, it->second);
+ // Verify the DtlsTransport for the SCTP data channel is reset correctly.
+ auto it2 = changed_dtls_transport_by_mid_.find(kDataMid1);
+ ASSERT_TRUE(it2 != changed_dtls_transport_by_mid_.end());
+}
+
+TEST_F(JsepTransportControllerTest, MultipleBundleGroups) {
+ static const char kMid1Audio[] = "1_audio";
+ static const char kMid2Video[] = "2_video";
+ static const char kMid3Audio[] = "3_audio";
+ static const char kMid4Video[] = "4_video";
+
+ CreateJsepTransportController(JsepTransportController::Config());
+ cricket::ContentGroup bundle_group1(cricket::GROUP_TYPE_BUNDLE);
+ bundle_group1.AddContentName(kMid1Audio);
+ bundle_group1.AddContentName(kMid2Video);
+ cricket::ContentGroup bundle_group2(cricket::GROUP_TYPE_BUNDLE);
+ bundle_group2.AddContentName(kMid3Audio);
+ bundle_group2.AddContentName(kMid4Video);
+
+ auto local_offer = std::make_unique<cricket::SessionDescription>();
+ AddAudioSection(local_offer.get(), kMid1Audio, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(local_offer.get(), kMid2Video, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddAudioSection(local_offer.get(), kMid3Audio, kIceUfrag3, kIcePwd3,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(local_offer.get(), kMid4Video, kIceUfrag4, kIcePwd4,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ local_offer->AddGroup(bundle_group1);
+ local_offer->AddGroup(bundle_group2);
+
+ auto remote_answer = std::make_unique<cricket::SessionDescription>();
+ AddAudioSection(remote_answer.get(), kMid1Audio, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(remote_answer.get(), kMid2Video, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddAudioSection(remote_answer.get(), kMid3Audio, kIceUfrag3, kIcePwd3,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(remote_answer.get(), kMid4Video, kIceUfrag4, kIcePwd4,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ remote_answer->AddGroup(bundle_group1);
+ remote_answer->AddGroup(bundle_group2);
+
+ EXPECT_TRUE(transport_controller_
+ ->SetLocalDescription(SdpType::kOffer, local_offer.get())
+ .ok());
+ EXPECT_TRUE(transport_controller_
+ ->SetRemoteDescription(SdpType::kAnswer, remote_answer.get())
+ .ok());
+
+ // Verify that (kMid1Audio,kMid2Video) and (kMid3Audio,kMid4Video) form two
+ // distinct bundled groups.
+ auto mid1_transport = transport_controller_->GetRtpTransport(kMid1Audio);
+ auto mid2_transport = transport_controller_->GetRtpTransport(kMid2Video);
+ auto mid3_transport = transport_controller_->GetRtpTransport(kMid3Audio);
+ auto mid4_transport = transport_controller_->GetRtpTransport(kMid4Video);
+ EXPECT_EQ(mid1_transport, mid2_transport);
+ EXPECT_EQ(mid3_transport, mid4_transport);
+ EXPECT_NE(mid1_transport, mid3_transport);
+
+ auto it = changed_rtp_transport_by_mid_.find(kMid1Audio);
+ ASSERT_TRUE(it != changed_rtp_transport_by_mid_.end());
+ EXPECT_EQ(it->second, mid1_transport);
+
+ it = changed_rtp_transport_by_mid_.find(kMid2Video);
+ ASSERT_TRUE(it != changed_rtp_transport_by_mid_.end());
+ EXPECT_EQ(it->second, mid2_transport);
+
+ it = changed_rtp_transport_by_mid_.find(kMid3Audio);
+ ASSERT_TRUE(it != changed_rtp_transport_by_mid_.end());
+ EXPECT_EQ(it->second, mid3_transport);
+
+ it = changed_rtp_transport_by_mid_.find(kMid4Video);
+ ASSERT_TRUE(it != changed_rtp_transport_by_mid_.end());
+ EXPECT_EQ(it->second, mid4_transport);
+}
+
+TEST_F(JsepTransportControllerTest,
+ MultipleBundleGroupsInOfferButOnlyASingleGroupInAnswer) {
+ static const char kMid1Audio[] = "1_audio";
+ static const char kMid2Video[] = "2_video";
+ static const char kMid3Audio[] = "3_audio";
+ static const char kMid4Video[] = "4_video";
+
+ CreateJsepTransportController(JsepTransportController::Config());
+ cricket::ContentGroup bundle_group1(cricket::GROUP_TYPE_BUNDLE);
+ bundle_group1.AddContentName(kMid1Audio);
+ bundle_group1.AddContentName(kMid2Video);
+ cricket::ContentGroup bundle_group2(cricket::GROUP_TYPE_BUNDLE);
+ bundle_group2.AddContentName(kMid3Audio);
+ bundle_group2.AddContentName(kMid4Video);
+
+ auto local_offer = std::make_unique<cricket::SessionDescription>();
+ AddAudioSection(local_offer.get(), kMid1Audio, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(local_offer.get(), kMid2Video, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddAudioSection(local_offer.get(), kMid3Audio, kIceUfrag3, kIcePwd3,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(local_offer.get(), kMid4Video, kIceUfrag4, kIcePwd4,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ // The offer has both groups.
+ local_offer->AddGroup(bundle_group1);
+ local_offer->AddGroup(bundle_group2);
+
+ auto remote_answer = std::make_unique<cricket::SessionDescription>();
+ AddAudioSection(remote_answer.get(), kMid1Audio, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(remote_answer.get(), kMid2Video, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddAudioSection(remote_answer.get(), kMid3Audio, kIceUfrag3, kIcePwd3,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(remote_answer.get(), kMid4Video, kIceUfrag4, kIcePwd4,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ // The answer only has a single group! This is what happens when talking to an
+ // endpoint that does not have support for multiple BUNDLE groups.
+ remote_answer->AddGroup(bundle_group1);
+
+ EXPECT_TRUE(transport_controller_
+ ->SetLocalDescription(SdpType::kOffer, local_offer.get())
+ .ok());
+ EXPECT_TRUE(transport_controller_
+ ->SetRemoteDescription(SdpType::kAnswer, remote_answer.get())
+ .ok());
+
+ // Verify that (kMid1Audio,kMid2Video) form a bundle group, but that
+ // kMid3Audio and kMid4Video are unbundled.
+ auto mid1_transport = transport_controller_->GetRtpTransport(kMid1Audio);
+ auto mid2_transport = transport_controller_->GetRtpTransport(kMid2Video);
+ auto mid3_transport = transport_controller_->GetRtpTransport(kMid3Audio);
+ auto mid4_transport = transport_controller_->GetRtpTransport(kMid4Video);
+ EXPECT_EQ(mid1_transport, mid2_transport);
+ EXPECT_NE(mid3_transport, mid4_transport);
+ EXPECT_NE(mid1_transport, mid3_transport);
+ EXPECT_NE(mid1_transport, mid4_transport);
+}
+
+TEST_F(JsepTransportControllerTest, MultipleBundleGroupsIllegallyChangeGroup) {
+ static const char kMid1Audio[] = "1_audio";
+ static const char kMid2Video[] = "2_video";
+ static const char kMid3Audio[] = "3_audio";
+ static const char kMid4Video[] = "4_video";
+
+ CreateJsepTransportController(JsepTransportController::Config());
+ // Offer groups (kMid1Audio,kMid2Video) and (kMid3Audio,kMid4Video).
+ cricket::ContentGroup offer_bundle_group1(cricket::GROUP_TYPE_BUNDLE);
+ offer_bundle_group1.AddContentName(kMid1Audio);
+ offer_bundle_group1.AddContentName(kMid2Video);
+ cricket::ContentGroup offer_bundle_group2(cricket::GROUP_TYPE_BUNDLE);
+ offer_bundle_group2.AddContentName(kMid3Audio);
+ offer_bundle_group2.AddContentName(kMid4Video);
+ // Answer groups (kMid1Audio,kMid4Video) and (kMid3Audio,kMid2Video), i.e. the
+ // second group members have switched places. This should get rejected.
+ cricket::ContentGroup answer_bundle_group1(cricket::GROUP_TYPE_BUNDLE);
+ answer_bundle_group1.AddContentName(kMid1Audio);
+ answer_bundle_group1.AddContentName(kMid4Video);
+ cricket::ContentGroup answer_bundle_group2(cricket::GROUP_TYPE_BUNDLE);
+ answer_bundle_group2.AddContentName(kMid3Audio);
+ answer_bundle_group2.AddContentName(kMid2Video);
+
+ auto local_offer = std::make_unique<cricket::SessionDescription>();
+ AddAudioSection(local_offer.get(), kMid1Audio, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(local_offer.get(), kMid2Video, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddAudioSection(local_offer.get(), kMid3Audio, kIceUfrag3, kIcePwd3,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(local_offer.get(), kMid4Video, kIceUfrag4, kIcePwd4,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ local_offer->AddGroup(offer_bundle_group1);
+ local_offer->AddGroup(offer_bundle_group2);
+
+ auto remote_answer = std::make_unique<cricket::SessionDescription>();
+ AddAudioSection(remote_answer.get(), kMid1Audio, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(remote_answer.get(), kMid2Video, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddAudioSection(remote_answer.get(), kMid3Audio, kIceUfrag3, kIcePwd3,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(remote_answer.get(), kMid4Video, kIceUfrag4, kIcePwd4,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ remote_answer->AddGroup(answer_bundle_group1);
+ remote_answer->AddGroup(answer_bundle_group2);
+
+ // Accept offer.
+ EXPECT_TRUE(transport_controller_
+ ->SetLocalDescription(SdpType::kOffer, local_offer.get())
+ .ok());
+ // Reject answer!
+ EXPECT_FALSE(transport_controller_
+ ->SetRemoteDescription(SdpType::kAnswer, remote_answer.get())
+ .ok());
+}
+
+TEST_F(JsepTransportControllerTest, MultipleBundleGroupsInvalidSubsets) {
+ static const char kMid1Audio[] = "1_audio";
+ static const char kMid2Video[] = "2_video";
+ static const char kMid3Audio[] = "3_audio";
+ static const char kMid4Video[] = "4_video";
+
+ CreateJsepTransportController(JsepTransportController::Config());
+ // Offer groups (kMid1Audio,kMid2Video) and (kMid3Audio,kMid4Video).
+ cricket::ContentGroup offer_bundle_group1(cricket::GROUP_TYPE_BUNDLE);
+ offer_bundle_group1.AddContentName(kMid1Audio);
+ offer_bundle_group1.AddContentName(kMid2Video);
+ cricket::ContentGroup offer_bundle_group2(cricket::GROUP_TYPE_BUNDLE);
+ offer_bundle_group2.AddContentName(kMid3Audio);
+ offer_bundle_group2.AddContentName(kMid4Video);
+ // Answer groups (kMid1Audio) and (kMid2Video), i.e. the second group was
+ // moved from the first group. This should get rejected.
+ cricket::ContentGroup answer_bundle_group1(cricket::GROUP_TYPE_BUNDLE);
+ answer_bundle_group1.AddContentName(kMid1Audio);
+ cricket::ContentGroup answer_bundle_group2(cricket::GROUP_TYPE_BUNDLE);
+ answer_bundle_group2.AddContentName(kMid2Video);
+
+ auto local_offer = std::make_unique<cricket::SessionDescription>();
+ AddAudioSection(local_offer.get(), kMid1Audio, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(local_offer.get(), kMid2Video, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddAudioSection(local_offer.get(), kMid3Audio, kIceUfrag3, kIcePwd3,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(local_offer.get(), kMid4Video, kIceUfrag4, kIcePwd4,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ local_offer->AddGroup(offer_bundle_group1);
+ local_offer->AddGroup(offer_bundle_group2);
+
+ auto remote_answer = std::make_unique<cricket::SessionDescription>();
+ AddAudioSection(remote_answer.get(), kMid1Audio, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(remote_answer.get(), kMid2Video, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddAudioSection(remote_answer.get(), kMid3Audio, kIceUfrag3, kIcePwd3,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(remote_answer.get(), kMid4Video, kIceUfrag4, kIcePwd4,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ remote_answer->AddGroup(answer_bundle_group1);
+ remote_answer->AddGroup(answer_bundle_group2);
+
+ // Accept offer.
+ EXPECT_TRUE(transport_controller_
+ ->SetLocalDescription(SdpType::kOffer, local_offer.get())
+ .ok());
+ // Reject answer!
+ EXPECT_FALSE(transport_controller_
+ ->SetRemoteDescription(SdpType::kAnswer, remote_answer.get())
+ .ok());
+}
+
+TEST_F(JsepTransportControllerTest, MultipleBundleGroupsInvalidOverlap) {
+ static const char kMid1Audio[] = "1_audio";
+ static const char kMid2Video[] = "2_video";
+ static const char kMid3Audio[] = "3_audio";
+
+ CreateJsepTransportController(JsepTransportController::Config());
+ // Offer groups (kMid1Audio,kMid3Audio) and (kMid2Video,kMid3Audio), i.e.
+ // kMid3Audio is in both groups - this is illegal.
+ cricket::ContentGroup offer_bundle_group1(cricket::GROUP_TYPE_BUNDLE);
+ offer_bundle_group1.AddContentName(kMid1Audio);
+ offer_bundle_group1.AddContentName(kMid3Audio);
+ cricket::ContentGroup offer_bundle_group2(cricket::GROUP_TYPE_BUNDLE);
+ offer_bundle_group2.AddContentName(kMid2Video);
+ offer_bundle_group2.AddContentName(kMid3Audio);
+
+ auto offer = std::make_unique<cricket::SessionDescription>();
+ AddAudioSection(offer.get(), kMid1Audio, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(offer.get(), kMid2Video, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddAudioSection(offer.get(), kMid3Audio, kIceUfrag3, kIcePwd3,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ offer->AddGroup(offer_bundle_group1);
+ offer->AddGroup(offer_bundle_group2);
+
+ // Reject offer, both if set as local or remote.
+ EXPECT_FALSE(
+ transport_controller_->SetLocalDescription(SdpType::kOffer, offer.get())
+ .ok());
+ EXPECT_FALSE(
+ transport_controller_->SetRemoteDescription(SdpType::kOffer, offer.get())
+ .ok());
+}
+
+TEST_F(JsepTransportControllerTest, MultipleBundleGroupsUnbundleFirstMid) {
+ static const char kMid1Audio[] = "1_audio";
+ static const char kMid2Audio[] = "2_audio";
+ static const char kMid3Audio[] = "3_audio";
+ static const char kMid4Video[] = "4_video";
+ static const char kMid5Video[] = "5_video";
+ static const char kMid6Video[] = "6_video";
+
+ CreateJsepTransportController(JsepTransportController::Config());
+ // Offer groups (kMid1Audio,kMid2Audio,kMid3Audio) and
+ // (kMid4Video,kMid5Video,kMid6Video).
+ cricket::ContentGroup offer_bundle_group1(cricket::GROUP_TYPE_BUNDLE);
+ offer_bundle_group1.AddContentName(kMid1Audio);
+ offer_bundle_group1.AddContentName(kMid2Audio);
+ offer_bundle_group1.AddContentName(kMid3Audio);
+ cricket::ContentGroup offer_bundle_group2(cricket::GROUP_TYPE_BUNDLE);
+ offer_bundle_group2.AddContentName(kMid4Video);
+ offer_bundle_group2.AddContentName(kMid5Video);
+ offer_bundle_group2.AddContentName(kMid6Video);
+ // Answer groups (kMid2Audio,kMid3Audio) and (kMid5Video,kMid6Video), i.e.
+ // we've moved the first MIDs out of the groups.
+ cricket::ContentGroup answer_bundle_group1(cricket::GROUP_TYPE_BUNDLE);
+ answer_bundle_group1.AddContentName(kMid2Audio);
+ answer_bundle_group1.AddContentName(kMid3Audio);
+ cricket::ContentGroup answer_bundle_group2(cricket::GROUP_TYPE_BUNDLE);
+ answer_bundle_group2.AddContentName(kMid5Video);
+ answer_bundle_group2.AddContentName(kMid6Video);
+
+ auto local_offer = std::make_unique<cricket::SessionDescription>();
+ AddAudioSection(local_offer.get(), kMid1Audio, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddAudioSection(local_offer.get(), kMid2Audio, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddAudioSection(local_offer.get(), kMid3Audio, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(local_offer.get(), kMid4Video, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(local_offer.get(), kMid5Video, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(local_offer.get(), kMid6Video, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ local_offer->AddGroup(offer_bundle_group1);
+ local_offer->AddGroup(offer_bundle_group2);
+
+ auto remote_answer = std::make_unique<cricket::SessionDescription>();
+ AddAudioSection(remote_answer.get(), kMid1Audio, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddAudioSection(remote_answer.get(), kMid2Audio, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddAudioSection(remote_answer.get(), kMid3Audio, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(remote_answer.get(), kMid4Video, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(remote_answer.get(), kMid5Video, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(remote_answer.get(), kMid6Video, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ remote_answer->AddGroup(answer_bundle_group1);
+ remote_answer->AddGroup(answer_bundle_group2);
+
+ EXPECT_TRUE(transport_controller_
+ ->SetLocalDescription(SdpType::kOffer, local_offer.get())
+ .ok());
+ EXPECT_TRUE(transport_controller_
+ ->SetRemoteDescription(SdpType::kAnswer, remote_answer.get())
+ .ok());
+
+ auto mid1_transport = transport_controller_->GetRtpTransport(kMid1Audio);
+ auto mid2_transport = transport_controller_->GetRtpTransport(kMid2Audio);
+ auto mid3_transport = transport_controller_->GetRtpTransport(kMid3Audio);
+ auto mid4_transport = transport_controller_->GetRtpTransport(kMid4Video);
+ auto mid5_transport = transport_controller_->GetRtpTransport(kMid5Video);
+ auto mid6_transport = transport_controller_->GetRtpTransport(kMid6Video);
+ EXPECT_NE(mid1_transport, mid2_transport);
+ EXPECT_EQ(mid2_transport, mid3_transport);
+ EXPECT_NE(mid4_transport, mid5_transport);
+ EXPECT_EQ(mid5_transport, mid6_transport);
+ EXPECT_NE(mid1_transport, mid4_transport);
+ EXPECT_NE(mid2_transport, mid5_transport);
+}
+
+TEST_F(JsepTransportControllerTest, MultipleBundleGroupsChangeFirstMid) {
+ static const char kMid1Audio[] = "1_audio";
+ static const char kMid2Audio[] = "2_audio";
+ static const char kMid3Audio[] = "3_audio";
+ static const char kMid4Video[] = "4_video";
+ static const char kMid5Video[] = "5_video";
+ static const char kMid6Video[] = "6_video";
+
+ CreateJsepTransportController(JsepTransportController::Config());
+ // Offer groups (kMid1Audio,kMid2Audio,kMid3Audio) and
+ // (kMid4Video,kMid5Video,kMid6Video).
+ cricket::ContentGroup offer_bundle_group1(cricket::GROUP_TYPE_BUNDLE);
+ offer_bundle_group1.AddContentName(kMid1Audio);
+ offer_bundle_group1.AddContentName(kMid2Audio);
+ offer_bundle_group1.AddContentName(kMid3Audio);
+ cricket::ContentGroup offer_bundle_group2(cricket::GROUP_TYPE_BUNDLE);
+ offer_bundle_group2.AddContentName(kMid4Video);
+ offer_bundle_group2.AddContentName(kMid5Video);
+ offer_bundle_group2.AddContentName(kMid6Video);
+ // Answer groups (kMid2Audio,kMid1Audio,kMid3Audio) and
+ // (kMid5Video,kMid6Video,kMid4Video), i.e. we've changed which MID is first
+ // but accept the whole group.
+ cricket::ContentGroup answer_bundle_group1(cricket::GROUP_TYPE_BUNDLE);
+ answer_bundle_group1.AddContentName(kMid2Audio);
+ answer_bundle_group1.AddContentName(kMid1Audio);
+ answer_bundle_group1.AddContentName(kMid3Audio);
+ cricket::ContentGroup answer_bundle_group2(cricket::GROUP_TYPE_BUNDLE);
+ answer_bundle_group2.AddContentName(kMid5Video);
+ answer_bundle_group2.AddContentName(kMid6Video);
+ answer_bundle_group2.AddContentName(kMid4Video);
+
+ auto local_offer = std::make_unique<cricket::SessionDescription>();
+ AddAudioSection(local_offer.get(), kMid1Audio, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddAudioSection(local_offer.get(), kMid2Audio, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddAudioSection(local_offer.get(), kMid3Audio, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(local_offer.get(), kMid4Video, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(local_offer.get(), kMid5Video, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(local_offer.get(), kMid6Video, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ local_offer->AddGroup(offer_bundle_group1);
+ local_offer->AddGroup(offer_bundle_group2);
+
+ auto remote_answer = std::make_unique<cricket::SessionDescription>();
+ AddAudioSection(remote_answer.get(), kMid1Audio, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddAudioSection(remote_answer.get(), kMid2Audio, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddAudioSection(remote_answer.get(), kMid3Audio, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(remote_answer.get(), kMid4Video, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(remote_answer.get(), kMid5Video, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(remote_answer.get(), kMid6Video, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ remote_answer->AddGroup(answer_bundle_group1);
+ remote_answer->AddGroup(answer_bundle_group2);
+
+ EXPECT_TRUE(transport_controller_
+ ->SetLocalDescription(SdpType::kOffer, local_offer.get())
+ .ok());
+
+ // The fact that we accept this answer is actually a bug. If we accept the
+ // first MID to be in the group, we should also accept that it is the tagged
+ // one.
+ // TODO(https://crbug.com/webrtc/12699): When this issue is fixed, change this
+ // to EXPECT_FALSE and remove the below expectations about transports.
+ EXPECT_TRUE(transport_controller_
+ ->SetRemoteDescription(SdpType::kAnswer, remote_answer.get())
+ .ok());
+ auto mid1_transport = transport_controller_->GetRtpTransport(kMid1Audio);
+ auto mid2_transport = transport_controller_->GetRtpTransport(kMid2Audio);
+ auto mid3_transport = transport_controller_->GetRtpTransport(kMid3Audio);
+ auto mid4_transport = transport_controller_->GetRtpTransport(kMid4Video);
+ auto mid5_transport = transport_controller_->GetRtpTransport(kMid5Video);
+ auto mid6_transport = transport_controller_->GetRtpTransport(kMid6Video);
+ EXPECT_NE(mid1_transport, mid4_transport);
+ EXPECT_EQ(mid1_transport, mid2_transport);
+ EXPECT_EQ(mid2_transport, mid3_transport);
+ EXPECT_EQ(mid4_transport, mid5_transport);
+ EXPECT_EQ(mid5_transport, mid6_transport);
+}
+
+TEST_F(JsepTransportControllerTest,
+ MultipleBundleGroupsSectionsAddedInSubsequentOffer) {
+ static const char kMid1Audio[] = "1_audio";
+ static const char kMid2Audio[] = "2_audio";
+ static const char kMid3Audio[] = "3_audio";
+ static const char kMid4Video[] = "4_video";
+ static const char kMid5Video[] = "5_video";
+ static const char kMid6Video[] = "6_video";
+
+ CreateJsepTransportController(JsepTransportController::Config());
+ // Start by grouping (kMid1Audio,kMid2Audio) and (kMid4Video,kMid4f5Video).
+ cricket::ContentGroup bundle_group1(cricket::GROUP_TYPE_BUNDLE);
+ bundle_group1.AddContentName(kMid1Audio);
+ bundle_group1.AddContentName(kMid2Audio);
+ cricket::ContentGroup bundle_group2(cricket::GROUP_TYPE_BUNDLE);
+ bundle_group2.AddContentName(kMid4Video);
+ bundle_group2.AddContentName(kMid5Video);
+
+ auto local_offer = std::make_unique<cricket::SessionDescription>();
+ AddAudioSection(local_offer.get(), kMid1Audio, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddAudioSection(local_offer.get(), kMid2Audio, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(local_offer.get(), kMid4Video, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(local_offer.get(), kMid5Video, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ local_offer->AddGroup(bundle_group1);
+ local_offer->AddGroup(bundle_group2);
+
+ auto remote_answer = std::make_unique<cricket::SessionDescription>();
+ AddAudioSection(remote_answer.get(), kMid1Audio, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddAudioSection(remote_answer.get(), kMid2Audio, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(remote_answer.get(), kMid4Video, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(remote_answer.get(), kMid5Video, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ remote_answer->AddGroup(bundle_group1);
+ remote_answer->AddGroup(bundle_group2);
+
+ EXPECT_TRUE(transport_controller_
+ ->SetLocalDescription(SdpType::kOffer, local_offer.get())
+ .ok());
+ EXPECT_TRUE(transport_controller_
+ ->SetRemoteDescription(SdpType::kAnswer, remote_answer.get())
+ .ok());
+
+ // Add kMid3Audio and kMid6Video to the respective audio/video bundle groups.
+ cricket::ContentGroup new_bundle_group1(cricket::GROUP_TYPE_BUNDLE);
+ bundle_group1.AddContentName(kMid3Audio);
+ cricket::ContentGroup new_bundle_group2(cricket::GROUP_TYPE_BUNDLE);
+ bundle_group2.AddContentName(kMid6Video);
+
+ auto subsequent_offer = std::make_unique<cricket::SessionDescription>();
+ AddAudioSection(subsequent_offer.get(), kMid1Audio, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddAudioSection(subsequent_offer.get(), kMid2Audio, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddAudioSection(subsequent_offer.get(), kMid3Audio, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(subsequent_offer.get(), kMid4Video, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(subsequent_offer.get(), kMid5Video, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(subsequent_offer.get(), kMid6Video, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ subsequent_offer->AddGroup(bundle_group1);
+ subsequent_offer->AddGroup(bundle_group2);
+ EXPECT_TRUE(transport_controller_
+ ->SetLocalDescription(SdpType::kOffer, subsequent_offer.get())
+ .ok());
+ auto mid1_transport = transport_controller_->GetRtpTransport(kMid1Audio);
+ auto mid2_transport = transport_controller_->GetRtpTransport(kMid2Audio);
+ auto mid3_transport = transport_controller_->GetRtpTransport(kMid3Audio);
+ auto mid4_transport = transport_controller_->GetRtpTransport(kMid4Video);
+ auto mid5_transport = transport_controller_->GetRtpTransport(kMid5Video);
+ auto mid6_transport = transport_controller_->GetRtpTransport(kMid6Video);
+ EXPECT_NE(mid1_transport, mid4_transport);
+ EXPECT_EQ(mid1_transport, mid2_transport);
+ EXPECT_EQ(mid2_transport, mid3_transport);
+ EXPECT_EQ(mid4_transport, mid5_transport);
+ EXPECT_EQ(mid5_transport, mid6_transport);
+}
+
+TEST_F(JsepTransportControllerTest,
+ MultipleBundleGroupsCombinedInSubsequentOffer) {
+ static const char kMid1Audio[] = "1_audio";
+ static const char kMid2Audio[] = "2_audio";
+ static const char kMid3Video[] = "3_video";
+ static const char kMid4Video[] = "4_video";
+
+ CreateJsepTransportController(JsepTransportController::Config());
+ // Start by grouping (kMid1Audio,kMid2Audio) and (kMid3Video,kMid4Video).
+ cricket::ContentGroup bundle_group1(cricket::GROUP_TYPE_BUNDLE);
+ bundle_group1.AddContentName(kMid1Audio);
+ bundle_group1.AddContentName(kMid2Audio);
+ cricket::ContentGroup bundle_group2(cricket::GROUP_TYPE_BUNDLE);
+ bundle_group2.AddContentName(kMid3Video);
+ bundle_group2.AddContentName(kMid4Video);
+
+ auto local_offer = std::make_unique<cricket::SessionDescription>();
+ AddAudioSection(local_offer.get(), kMid1Audio, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddAudioSection(local_offer.get(), kMid2Audio, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(local_offer.get(), kMid3Video, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(local_offer.get(), kMid4Video, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ local_offer->AddGroup(bundle_group1);
+ local_offer->AddGroup(bundle_group2);
+
+ auto remote_answer = std::make_unique<cricket::SessionDescription>();
+ AddAudioSection(remote_answer.get(), kMid1Audio, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddAudioSection(remote_answer.get(), kMid2Audio, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(remote_answer.get(), kMid3Video, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(remote_answer.get(), kMid4Video, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ remote_answer->AddGroup(bundle_group1);
+ remote_answer->AddGroup(bundle_group2);
+
+ EXPECT_TRUE(transport_controller_
+ ->SetLocalDescription(SdpType::kOffer, local_offer.get())
+ .ok());
+ EXPECT_TRUE(transport_controller_
+ ->SetRemoteDescription(SdpType::kAnswer, remote_answer.get())
+ .ok());
+
+ // Switch to grouping (kMid1Audio,kMid2Audio,kMid3Video,kMid4Video).
+ // This is a illegal without first removing m= sections from their groups.
+ cricket::ContentGroup new_bundle_group(cricket::GROUP_TYPE_BUNDLE);
+ new_bundle_group.AddContentName(kMid1Audio);
+ new_bundle_group.AddContentName(kMid2Audio);
+ new_bundle_group.AddContentName(kMid3Video);
+ new_bundle_group.AddContentName(kMid4Video);
+
+ auto subsequent_offer = std::make_unique<cricket::SessionDescription>();
+ AddAudioSection(subsequent_offer.get(), kMid1Audio, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddAudioSection(subsequent_offer.get(), kMid2Audio, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(subsequent_offer.get(), kMid3Video, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(subsequent_offer.get(), kMid4Video, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ subsequent_offer->AddGroup(new_bundle_group);
+ EXPECT_FALSE(
+ transport_controller_
+ ->SetLocalDescription(SdpType::kOffer, subsequent_offer.get())
+ .ok());
+}
+
+TEST_F(JsepTransportControllerTest,
+ MultipleBundleGroupsSplitInSubsequentOffer) {
+ static const char kMid1Audio[] = "1_audio";
+ static const char kMid2Audio[] = "2_audio";
+ static const char kMid3Video[] = "3_video";
+ static const char kMid4Video[] = "4_video";
+
+ CreateJsepTransportController(JsepTransportController::Config());
+ // Start by grouping (kMid1Audio,kMid2Audio,kMid3Video,kMid4Video).
+ cricket::ContentGroup bundle_group(cricket::GROUP_TYPE_BUNDLE);
+ bundle_group.AddContentName(kMid1Audio);
+ bundle_group.AddContentName(kMid2Audio);
+ bundle_group.AddContentName(kMid3Video);
+ bundle_group.AddContentName(kMid4Video);
+
+ auto local_offer = std::make_unique<cricket::SessionDescription>();
+ AddAudioSection(local_offer.get(), kMid1Audio, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddAudioSection(local_offer.get(), kMid2Audio, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(local_offer.get(), kMid3Video, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(local_offer.get(), kMid4Video, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ local_offer->AddGroup(bundle_group);
+
+ auto remote_answer = std::make_unique<cricket::SessionDescription>();
+ AddAudioSection(remote_answer.get(), kMid1Audio, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddAudioSection(remote_answer.get(), kMid2Audio, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(remote_answer.get(), kMid3Video, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(remote_answer.get(), kMid4Video, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ remote_answer->AddGroup(bundle_group);
+
+ EXPECT_TRUE(transport_controller_
+ ->SetLocalDescription(SdpType::kOffer, local_offer.get())
+ .ok());
+ EXPECT_TRUE(transport_controller_
+ ->SetRemoteDescription(SdpType::kAnswer, remote_answer.get())
+ .ok());
+
+ // Switch to grouping (kMid1Audio,kMid2Audio) and (kMid3Video,kMid4Video).
+ // This is a illegal without first removing m= sections from their groups.
+ cricket::ContentGroup new_bundle_group1(cricket::GROUP_TYPE_BUNDLE);
+ new_bundle_group1.AddContentName(kMid1Audio);
+ new_bundle_group1.AddContentName(kMid2Audio);
+ cricket::ContentGroup new_bundle_group2(cricket::GROUP_TYPE_BUNDLE);
+ new_bundle_group2.AddContentName(kMid3Video);
+ new_bundle_group2.AddContentName(kMid4Video);
+
+ auto subsequent_offer = std::make_unique<cricket::SessionDescription>();
+ AddAudioSection(subsequent_offer.get(), kMid1Audio, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddAudioSection(subsequent_offer.get(), kMid2Audio, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(subsequent_offer.get(), kMid3Video, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(subsequent_offer.get(), kMid4Video, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ subsequent_offer->AddGroup(new_bundle_group1);
+ subsequent_offer->AddGroup(new_bundle_group2);
+ EXPECT_FALSE(
+ transport_controller_
+ ->SetLocalDescription(SdpType::kOffer, subsequent_offer.get())
+ .ok());
+}
+
+TEST_F(JsepTransportControllerTest,
+ MultipleBundleGroupsShuffledInSubsequentOffer) {
+ static const char kMid1Audio[] = "1_audio";
+ static const char kMid2Audio[] = "2_audio";
+ static const char kMid3Video[] = "3_video";
+ static const char kMid4Video[] = "4_video";
+
+ CreateJsepTransportController(JsepTransportController::Config());
+ // Start by grouping (kMid1Audio,kMid2Audio) and (kMid3Video,kMid4Video).
+ cricket::ContentGroup bundle_group1(cricket::GROUP_TYPE_BUNDLE);
+ bundle_group1.AddContentName(kMid1Audio);
+ bundle_group1.AddContentName(kMid2Audio);
+ cricket::ContentGroup bundle_group2(cricket::GROUP_TYPE_BUNDLE);
+ bundle_group2.AddContentName(kMid3Video);
+ bundle_group2.AddContentName(kMid4Video);
+
+ auto local_offer = std::make_unique<cricket::SessionDescription>();
+ AddAudioSection(local_offer.get(), kMid1Audio, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddAudioSection(local_offer.get(), kMid2Audio, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(local_offer.get(), kMid3Video, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(local_offer.get(), kMid4Video, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ local_offer->AddGroup(bundle_group1);
+ local_offer->AddGroup(bundle_group2);
+
+ auto remote_answer = std::make_unique<cricket::SessionDescription>();
+ AddAudioSection(remote_answer.get(), kMid1Audio, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddAudioSection(remote_answer.get(), kMid2Audio, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(remote_answer.get(), kMid3Video, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(remote_answer.get(), kMid4Video, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ remote_answer->AddGroup(bundle_group1);
+ remote_answer->AddGroup(bundle_group2);
+
+ EXPECT_TRUE(transport_controller_
+ ->SetLocalDescription(SdpType::kOffer, local_offer.get())
+ .ok());
+ EXPECT_TRUE(transport_controller_
+ ->SetRemoteDescription(SdpType::kAnswer, remote_answer.get())
+ .ok());
+
+ // Switch to grouping (kMid1Audio,kMid3Video) and (kMid2Audio,kMid3Video).
+ // This is a illegal without first removing m= sections from their groups.
+ cricket::ContentGroup new_bundle_group1(cricket::GROUP_TYPE_BUNDLE);
+ new_bundle_group1.AddContentName(kMid1Audio);
+ new_bundle_group1.AddContentName(kMid3Video);
+ cricket::ContentGroup new_bundle_group2(cricket::GROUP_TYPE_BUNDLE);
+ new_bundle_group2.AddContentName(kMid2Audio);
+ new_bundle_group2.AddContentName(kMid4Video);
+
+ auto subsequent_offer = std::make_unique<cricket::SessionDescription>();
+ AddAudioSection(subsequent_offer.get(), kMid1Audio, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddAudioSection(subsequent_offer.get(), kMid2Audio, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(subsequent_offer.get(), kMid3Video, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(subsequent_offer.get(), kMid4Video, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ subsequent_offer->AddGroup(new_bundle_group1);
+ subsequent_offer->AddGroup(new_bundle_group2);
+ EXPECT_FALSE(
+ transport_controller_
+ ->SetLocalDescription(SdpType::kOffer, subsequent_offer.get())
+ .ok());
+}
+
+// Tests that only a subset of all the m= sections are bundled.
+TEST_F(JsepTransportControllerTest, BundleSubsetOfMediaSections) {
+ CreateJsepTransportController(JsepTransportController::Config());
+ cricket::ContentGroup bundle_group(cricket::GROUP_TYPE_BUNDLE);
+ bundle_group.AddContentName(kAudioMid1);
+ bundle_group.AddContentName(kVideoMid1);
+
+ auto local_offer = std::make_unique<cricket::SessionDescription>();
+ AddAudioSection(local_offer.get(), kAudioMid1, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddAudioSection(local_offer.get(), kAudioMid2, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(local_offer.get(), kVideoMid1, kIceUfrag3, kIcePwd3,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+
+ auto remote_answer = std::make_unique<cricket::SessionDescription>();
+ AddAudioSection(remote_answer.get(), kAudioMid1, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_PASSIVE,
+ nullptr);
+ AddAudioSection(remote_answer.get(), kAudioMid2, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_PASSIVE,
+ nullptr);
+ AddVideoSection(remote_answer.get(), kVideoMid1, kIceUfrag3, kIcePwd3,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_PASSIVE,
+ nullptr);
+
+ local_offer->AddGroup(bundle_group);
+ remote_answer->AddGroup(bundle_group);
+ EXPECT_TRUE(transport_controller_
+ ->SetLocalDescription(SdpType::kOffer, local_offer.get())
+ .ok());
+ EXPECT_TRUE(transport_controller_
+ ->SetRemoteDescription(SdpType::kAnswer, remote_answer.get())
+ .ok());
+
+ // Verifiy that only `kAudio1` and `kVideo1` are bundled.
+ auto transport1 = transport_controller_->GetRtpTransport(kAudioMid1);
+ auto transport2 = transport_controller_->GetRtpTransport(kAudioMid2);
+ auto transport3 = transport_controller_->GetRtpTransport(kVideoMid1);
+ EXPECT_NE(transport1, transport2);
+ EXPECT_EQ(transport1, transport3);
+
+ auto it = changed_rtp_transport_by_mid_.find(kVideoMid1);
+ ASSERT_TRUE(it != changed_rtp_transport_by_mid_.end());
+ EXPECT_EQ(transport1, it->second);
+ it = changed_rtp_transport_by_mid_.find(kAudioMid2);
+ EXPECT_TRUE(transport2 == it->second);
+}
+
+// Tests that the initial offer/answer only have data section and audio/video
+// sections are added in the subsequent offer.
+TEST_F(JsepTransportControllerTest, BundleOnDataSectionInSubsequentOffer) {
+ CreateJsepTransportController(JsepTransportController::Config());
+ cricket::ContentGroup bundle_group(cricket::GROUP_TYPE_BUNDLE);
+ bundle_group.AddContentName(kDataMid1);
+
+ auto local_offer = std::make_unique<cricket::SessionDescription>();
+ AddDataSection(local_offer.get(), kDataMid1,
+ cricket::MediaProtocolType::kSctp, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ auto remote_answer = std::make_unique<cricket::SessionDescription>();
+ AddDataSection(remote_answer.get(), kDataMid1,
+ cricket::MediaProtocolType::kSctp, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_PASSIVE,
+ nullptr);
+ local_offer->AddGroup(bundle_group);
+ remote_answer->AddGroup(bundle_group);
+
+ EXPECT_TRUE(transport_controller_
+ ->SetLocalDescription(SdpType::kOffer, local_offer.get())
+ .ok());
+ EXPECT_TRUE(transport_controller_
+ ->SetRemoteDescription(SdpType::kAnswer, remote_answer.get())
+ .ok());
+ auto data_transport = transport_controller_->GetRtpTransport(kDataMid1);
+
+ // Add audio/video sections in subsequent offer.
+ AddAudioSection(local_offer.get(), kAudioMid1, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(local_offer.get(), kVideoMid1, kIceUfrag3, kIcePwd3,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddAudioSection(remote_answer.get(), kAudioMid1, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_PASSIVE,
+ nullptr);
+ AddVideoSection(remote_answer.get(), kVideoMid1, kIceUfrag3, kIcePwd3,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_PASSIVE,
+ nullptr);
+
+ // Reset the bundle group and do another offer/answer exchange.
+ bundle_group.AddContentName(kAudioMid1);
+ bundle_group.AddContentName(kVideoMid1);
+ local_offer->RemoveGroupByName(cricket::GROUP_TYPE_BUNDLE);
+ remote_answer->RemoveGroupByName(cricket::GROUP_TYPE_BUNDLE);
+ local_offer->AddGroup(bundle_group);
+ remote_answer->AddGroup(bundle_group);
+
+ EXPECT_TRUE(transport_controller_
+ ->SetLocalDescription(SdpType::kOffer, local_offer.get())
+ .ok());
+ EXPECT_TRUE(transport_controller_
+ ->SetRemoteDescription(SdpType::kAnswer, remote_answer.get())
+ .ok());
+
+ auto audio_transport = transport_controller_->GetRtpTransport(kAudioMid1);
+ auto video_transport = transport_controller_->GetRtpTransport(kVideoMid1);
+ EXPECT_EQ(data_transport, audio_transport);
+ EXPECT_EQ(data_transport, video_transport);
+}
+
+TEST_F(JsepTransportControllerTest, VideoDataRejectedInAnswer) {
+ CreateJsepTransportController(JsepTransportController::Config());
+ cricket::ContentGroup bundle_group(cricket::GROUP_TYPE_BUNDLE);
+ bundle_group.AddContentName(kAudioMid1);
+ bundle_group.AddContentName(kVideoMid1);
+ bundle_group.AddContentName(kDataMid1);
+
+ auto local_offer = std::make_unique<cricket::SessionDescription>();
+ AddAudioSection(local_offer.get(), kAudioMid1, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(local_offer.get(), kVideoMid1, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddDataSection(local_offer.get(), kDataMid1,
+ cricket::MediaProtocolType::kSctp, kIceUfrag3, kIcePwd3,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+
+ auto remote_answer = std::make_unique<cricket::SessionDescription>();
+ AddAudioSection(remote_answer.get(), kAudioMid1, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_PASSIVE,
+ nullptr);
+ AddVideoSection(remote_answer.get(), kVideoMid1, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_PASSIVE,
+ nullptr);
+ AddDataSection(remote_answer.get(), kDataMid1,
+ cricket::MediaProtocolType::kSctp, kIceUfrag3, kIcePwd3,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_PASSIVE,
+ nullptr);
+ // Reject video and data section.
+ remote_answer->contents()[1].rejected = true;
+ remote_answer->contents()[2].rejected = true;
+
+ local_offer->AddGroup(bundle_group);
+ remote_answer->AddGroup(bundle_group);
+
+ EXPECT_TRUE(transport_controller_
+ ->SetLocalDescription(SdpType::kOffer, local_offer.get())
+ .ok());
+ EXPECT_TRUE(transport_controller_
+ ->SetRemoteDescription(SdpType::kAnswer, remote_answer.get())
+ .ok());
+
+ // Verify the RtpTransport/DtlsTransport is destroyed correctly.
+ EXPECT_EQ(nullptr, transport_controller_->GetRtpTransport(kVideoMid1));
+ EXPECT_EQ(nullptr, transport_controller_->GetDtlsTransport(kDataMid1));
+ // Verify the signals are fired correctly.
+ auto it = changed_rtp_transport_by_mid_.find(kVideoMid1);
+ ASSERT_TRUE(it != changed_rtp_transport_by_mid_.end());
+ EXPECT_EQ(nullptr, it->second);
+ auto it2 = changed_dtls_transport_by_mid_.find(kDataMid1);
+ ASSERT_TRUE(it2 != changed_dtls_transport_by_mid_.end());
+ EXPECT_EQ(nullptr, it2->second);
+}
+
+// Tests that changing the bundled MID in subsequent offer/answer exchange is
+// not supported.
+// TODO(bugs.webrtc.org/6704): Change this test to expect success once issue is
+// fixed
+TEST_F(JsepTransportControllerTest, ChangeBundledMidNotSupported) {
+ CreateJsepTransportController(JsepTransportController::Config());
+ cricket::ContentGroup bundle_group(cricket::GROUP_TYPE_BUNDLE);
+ bundle_group.AddContentName(kAudioMid1);
+ bundle_group.AddContentName(kVideoMid1);
+
+ auto local_offer = std::make_unique<cricket::SessionDescription>();
+ AddAudioSection(local_offer.get(), kAudioMid1, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(local_offer.get(), kVideoMid1, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+
+ auto remote_answer = std::make_unique<cricket::SessionDescription>();
+ AddAudioSection(remote_answer.get(), kAudioMid1, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_PASSIVE,
+ nullptr);
+ AddVideoSection(remote_answer.get(), kVideoMid1, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_PASSIVE,
+ nullptr);
+
+ local_offer->AddGroup(bundle_group);
+ remote_answer->AddGroup(bundle_group);
+ EXPECT_TRUE(transport_controller_
+ ->SetLocalDescription(SdpType::kOffer, local_offer.get())
+ .ok());
+ EXPECT_TRUE(transport_controller_
+ ->SetRemoteDescription(SdpType::kAnswer, remote_answer.get())
+ .ok());
+ EXPECT_EQ(transport_controller_->GetRtpTransport(kAudioMid1),
+ transport_controller_->GetRtpTransport(kVideoMid1));
+
+ // Reorder the bundle group.
+ EXPECT_TRUE(bundle_group.RemoveContentName(kAudioMid1));
+ bundle_group.AddContentName(kAudioMid1);
+ // The answerer uses the new bundle group and now the bundle mid is changed to
+ // `kVideo1`.
+ remote_answer->RemoveGroupByName(cricket::GROUP_TYPE_BUNDLE);
+ remote_answer->AddGroup(bundle_group);
+ EXPECT_TRUE(transport_controller_
+ ->SetLocalDescription(SdpType::kOffer, local_offer.get())
+ .ok());
+ EXPECT_FALSE(transport_controller_
+ ->SetRemoteDescription(SdpType::kAnswer, remote_answer.get())
+ .ok());
+}
+// Test that rejecting only the first m= section of a BUNDLE group is treated as
+// an error, but rejecting all of them works as expected.
+TEST_F(JsepTransportControllerTest, RejectFirstContentInBundleGroup) {
+ CreateJsepTransportController(JsepTransportController::Config());
+ cricket::ContentGroup bundle_group(cricket::GROUP_TYPE_BUNDLE);
+ bundle_group.AddContentName(kAudioMid1);
+ bundle_group.AddContentName(kVideoMid1);
+ bundle_group.AddContentName(kDataMid1);
+
+ auto local_offer = std::make_unique<cricket::SessionDescription>();
+ AddAudioSection(local_offer.get(), kAudioMid1, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(local_offer.get(), kVideoMid1, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddDataSection(local_offer.get(), kDataMid1,
+ cricket::MediaProtocolType::kSctp, kIceUfrag3, kIcePwd3,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+
+ auto remote_answer = std::make_unique<cricket::SessionDescription>();
+ AddAudioSection(remote_answer.get(), kAudioMid1, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_PASSIVE,
+ nullptr);
+ AddVideoSection(remote_answer.get(), kVideoMid1, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_PASSIVE,
+ nullptr);
+ AddDataSection(remote_answer.get(), kDataMid1,
+ cricket::MediaProtocolType::kSctp, kIceUfrag3, kIcePwd3,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_PASSIVE,
+ nullptr);
+ // Reject audio content in answer.
+ remote_answer->contents()[0].rejected = true;
+
+ local_offer->AddGroup(bundle_group);
+ remote_answer->AddGroup(bundle_group);
+
+ EXPECT_TRUE(transport_controller_
+ ->SetLocalDescription(SdpType::kOffer, local_offer.get())
+ .ok());
+ EXPECT_FALSE(transport_controller_
+ ->SetRemoteDescription(SdpType::kAnswer, remote_answer.get())
+ .ok());
+
+ // Reject all the contents.
+ remote_answer->contents()[1].rejected = true;
+ remote_answer->contents()[2].rejected = true;
+ EXPECT_TRUE(transport_controller_
+ ->SetRemoteDescription(SdpType::kAnswer, remote_answer.get())
+ .ok());
+ EXPECT_EQ(nullptr, transport_controller_->GetRtpTransport(kAudioMid1));
+ EXPECT_EQ(nullptr, transport_controller_->GetRtpTransport(kVideoMid1));
+ EXPECT_EQ(nullptr, transport_controller_->GetDtlsTransport(kDataMid1));
+}
+
+// Tests that applying non-RTCP-mux offer would fail when kRtcpMuxPolicyRequire
+// is used.
+TEST_F(JsepTransportControllerTest, ApplyNonRtcpMuxOfferWhenMuxingRequired) {
+ JsepTransportController::Config config;
+ config.rtcp_mux_policy = PeerConnectionInterface::kRtcpMuxPolicyRequire;
+ CreateJsepTransportController(std::move(config));
+ auto local_offer = std::make_unique<cricket::SessionDescription>();
+ AddAudioSection(local_offer.get(), kAudioMid1, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+
+ local_offer->contents()[0].media_description()->set_rtcp_mux(false);
+ // Applying a non-RTCP-mux offer is expected to fail.
+ EXPECT_FALSE(transport_controller_
+ ->SetLocalDescription(SdpType::kOffer, local_offer.get())
+ .ok());
+}
+
+// Tests that applying non-RTCP-mux answer would fail when kRtcpMuxPolicyRequire
+// is used.
+TEST_F(JsepTransportControllerTest, ApplyNonRtcpMuxAnswerWhenMuxingRequired) {
+ JsepTransportController::Config config;
+ config.rtcp_mux_policy = PeerConnectionInterface::kRtcpMuxPolicyRequire;
+ CreateJsepTransportController(std::move(config));
+ auto local_offer = std::make_unique<cricket::SessionDescription>();
+ AddAudioSection(local_offer.get(), kAudioMid1, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ EXPECT_TRUE(transport_controller_
+ ->SetLocalDescription(SdpType::kOffer, local_offer.get())
+ .ok());
+
+ auto remote_answer = std::make_unique<cricket::SessionDescription>();
+ AddAudioSection(remote_answer.get(), kAudioMid1, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_PASSIVE,
+ nullptr);
+ // Applying a non-RTCP-mux answer is expected to fail.
+ remote_answer->contents()[0].media_description()->set_rtcp_mux(false);
+ EXPECT_FALSE(transport_controller_
+ ->SetRemoteDescription(SdpType::kAnswer, remote_answer.get())
+ .ok());
+}
+
+// This tests that the BUNDLE group in answer should be a subset of the offered
+// group.
+TEST_F(JsepTransportControllerTest,
+ AddContentToBundleGroupInAnswerNotSupported) {
+ CreateJsepTransportController(JsepTransportController::Config());
+ auto local_offer = CreateSessionDescriptionWithoutBundle();
+ auto remote_answer = CreateSessionDescriptionWithoutBundle();
+
+ cricket::ContentGroup offer_bundle_group(cricket::GROUP_TYPE_BUNDLE);
+ offer_bundle_group.AddContentName(kAudioMid1);
+ local_offer->AddGroup(offer_bundle_group);
+
+ cricket::ContentGroup answer_bundle_group(cricket::GROUP_TYPE_BUNDLE);
+ answer_bundle_group.AddContentName(kAudioMid1);
+ answer_bundle_group.AddContentName(kVideoMid1);
+ remote_answer->AddGroup(answer_bundle_group);
+ EXPECT_TRUE(transport_controller_
+ ->SetLocalDescription(SdpType::kOffer, local_offer.get())
+ .ok());
+ EXPECT_FALSE(transport_controller_
+ ->SetRemoteDescription(SdpType::kAnswer, remote_answer.get())
+ .ok());
+}
+
+// This tests that the BUNDLE group with non-existing MID should be rejectd.
+TEST_F(JsepTransportControllerTest, RejectBundleGroupWithNonExistingMid) {
+ CreateJsepTransportController(JsepTransportController::Config());
+ auto local_offer = CreateSessionDescriptionWithoutBundle();
+ auto remote_answer = CreateSessionDescriptionWithoutBundle();
+
+ cricket::ContentGroup invalid_bundle_group(cricket::GROUP_TYPE_BUNDLE);
+ // The BUNDLE group is invalid because there is no data section in the
+ // description.
+ invalid_bundle_group.AddContentName(kDataMid1);
+ local_offer->AddGroup(invalid_bundle_group);
+ remote_answer->AddGroup(invalid_bundle_group);
+
+ EXPECT_FALSE(transport_controller_
+ ->SetLocalDescription(SdpType::kOffer, local_offer.get())
+ .ok());
+ EXPECT_FALSE(transport_controller_
+ ->SetRemoteDescription(SdpType::kAnswer, remote_answer.get())
+ .ok());
+}
+
+// This tests that an answer shouldn't be able to remove an m= section from an
+// established group without rejecting it.
+TEST_F(JsepTransportControllerTest, RemoveContentFromBundleGroup) {
+ CreateJsepTransportController(JsepTransportController::Config());
+
+ auto local_offer = CreateSessionDescriptionWithBundleGroup();
+ auto remote_answer = CreateSessionDescriptionWithBundleGroup();
+ EXPECT_TRUE(transport_controller_
+ ->SetLocalDescription(SdpType::kOffer, local_offer.get())
+ .ok());
+ EXPECT_TRUE(transport_controller_
+ ->SetRemoteDescription(SdpType::kAnswer, remote_answer.get())
+ .ok());
+
+ // Do an re-offer/answer.
+ EXPECT_TRUE(transport_controller_
+ ->SetLocalDescription(SdpType::kOffer, local_offer.get())
+ .ok());
+ auto new_answer = CreateSessionDescriptionWithoutBundle();
+ cricket::ContentGroup new_bundle_group(cricket::GROUP_TYPE_BUNDLE);
+ // The answer removes video from the BUNDLE group without rejecting it is
+ // invalid.
+ new_bundle_group.AddContentName(kAudioMid1);
+ new_answer->AddGroup(new_bundle_group);
+
+ // Applying invalid answer is expected to fail.
+ EXPECT_FALSE(transport_controller_
+ ->SetRemoteDescription(SdpType::kAnswer, new_answer.get())
+ .ok());
+
+ // Rejected the video content.
+ auto video_content = new_answer->GetContentByName(kVideoMid1);
+ ASSERT_TRUE(video_content);
+ video_content->rejected = true;
+ EXPECT_TRUE(transport_controller_
+ ->SetRemoteDescription(SdpType::kAnswer, new_answer.get())
+ .ok());
+}
+
+// Test that the JsepTransportController can process a new local and remote
+// description that changes the tagged BUNDLE group with the max-bundle policy
+// specified.
+// This is a regression test for bugs.webrtc.org/9954
+TEST_F(JsepTransportControllerTest, ChangeTaggedMediaSectionMaxBundle) {
+ CreateJsepTransportController(JsepTransportController::Config());
+
+ auto local_offer = std::make_unique<cricket::SessionDescription>();
+ AddAudioSection(local_offer.get(), kAudioMid1, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ cricket::ContentGroup bundle_group(cricket::GROUP_TYPE_BUNDLE);
+ bundle_group.AddContentName(kAudioMid1);
+ local_offer->AddGroup(bundle_group);
+ EXPECT_TRUE(transport_controller_
+ ->SetLocalDescription(SdpType::kOffer, local_offer.get())
+ .ok());
+
+ std::unique_ptr<cricket::SessionDescription> remote_answer(
+ local_offer->Clone());
+ EXPECT_TRUE(transport_controller_
+ ->SetRemoteDescription(SdpType::kAnswer, remote_answer.get())
+ .ok());
+
+ std::unique_ptr<cricket::SessionDescription> local_reoffer(
+ local_offer->Clone());
+ local_reoffer->contents()[0].rejected = true;
+ AddVideoSection(local_reoffer.get(), kVideoMid1, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ local_reoffer->RemoveGroupByName(cricket::GROUP_TYPE_BUNDLE);
+ cricket::ContentGroup new_bundle_group(cricket::GROUP_TYPE_BUNDLE);
+ new_bundle_group.AddContentName(kVideoMid1);
+ local_reoffer->AddGroup(new_bundle_group);
+
+ EXPECT_TRUE(transport_controller_
+ ->SetLocalDescription(SdpType::kOffer, local_reoffer.get())
+ .ok());
+ std::unique_ptr<cricket::SessionDescription> remote_reanswer(
+ local_reoffer->Clone());
+ EXPECT_TRUE(
+ transport_controller_
+ ->SetRemoteDescription(SdpType::kAnswer, remote_reanswer.get())
+ .ok());
+}
+
+TEST_F(JsepTransportControllerTest, RollbackRestoresRejectedTransport) {
+ static const char kMid1Audio[] = "1_audio";
+
+ // Perform initial offer/answer.
+ CreateJsepTransportController(JsepTransportController::Config());
+ auto local_offer = std::make_unique<cricket::SessionDescription>();
+ AddAudioSection(local_offer.get(), kMid1Audio, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ std::unique_ptr<cricket::SessionDescription> remote_answer(
+ local_offer->Clone());
+ EXPECT_TRUE(transport_controller_
+ ->SetLocalDescription(SdpType::kOffer, local_offer.get())
+ .ok());
+ EXPECT_TRUE(transport_controller_
+ ->SetRemoteDescription(SdpType::kAnswer, remote_answer.get())
+ .ok());
+
+ auto mid1_transport = transport_controller_->GetRtpTransport(kMid1Audio);
+
+ // Apply a reoffer which rejects the m= section, causing the transport to be
+ // set to null.
+ auto local_reoffer = std::make_unique<cricket::SessionDescription>();
+ AddAudioSection(local_reoffer.get(), kMid1Audio, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ local_reoffer->contents()[0].rejected = true;
+
+ EXPECT_TRUE(transport_controller_
+ ->SetLocalDescription(SdpType::kOffer, local_reoffer.get())
+ .ok());
+ auto old_mid1_transport = mid1_transport;
+ mid1_transport = transport_controller_->GetRtpTransport(kMid1Audio);
+ EXPECT_EQ(nullptr, mid1_transport);
+
+ // Rolling back shouldn't just create a new transport for MID 1, it should
+ // restore the old transport.
+ EXPECT_TRUE(transport_controller_->RollbackTransports().ok());
+ mid1_transport = transport_controller_->GetRtpTransport(kMid1Audio);
+ EXPECT_EQ(old_mid1_transport, mid1_transport);
+}
+
+// If an offer with a modified BUNDLE group causes a MID->transport mapping to
+// change, rollback should restore the previous mapping.
+TEST_F(JsepTransportControllerTest, RollbackRestoresPreviousTransportMapping) {
+ static const char kMid1Audio[] = "1_audio";
+ static const char kMid2Audio[] = "2_audio";
+ static const char kMid3Audio[] = "3_audio";
+
+ // Perform an initial offer/answer to establish a (kMid1Audio,kMid2Audio)
+ // group.
+ CreateJsepTransportController(JsepTransportController::Config());
+ cricket::ContentGroup bundle_group(cricket::GROUP_TYPE_BUNDLE);
+ bundle_group.AddContentName(kMid1Audio);
+ bundle_group.AddContentName(kMid2Audio);
+
+ auto local_offer = std::make_unique<cricket::SessionDescription>();
+ AddAudioSection(local_offer.get(), kMid1Audio, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(local_offer.get(), kMid2Audio, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddAudioSection(local_offer.get(), kMid3Audio, kIceUfrag3, kIcePwd3,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ local_offer->AddGroup(bundle_group);
+
+ std::unique_ptr<cricket::SessionDescription> remote_answer(
+ local_offer->Clone());
+
+ EXPECT_TRUE(transport_controller_
+ ->SetLocalDescription(SdpType::kOffer, local_offer.get())
+ .ok());
+ EXPECT_TRUE(transport_controller_
+ ->SetRemoteDescription(SdpType::kAnswer, remote_answer.get())
+ .ok());
+
+ auto mid1_transport = transport_controller_->GetRtpTransport(kMid1Audio);
+ auto mid2_transport = transport_controller_->GetRtpTransport(kMid2Audio);
+ auto mid3_transport = transport_controller_->GetRtpTransport(kMid3Audio);
+ EXPECT_EQ(mid1_transport, mid2_transport);
+ EXPECT_NE(mid1_transport, mid3_transport);
+
+ // Apply a reoffer adding kMid3Audio to the group; transport mapping should
+ // change, even without an answer, since this is an existing group.
+ bundle_group.AddContentName(kMid3Audio);
+ auto local_reoffer = std::make_unique<cricket::SessionDescription>();
+ AddAudioSection(local_reoffer.get(), kMid1Audio, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(local_reoffer.get(), kMid2Audio, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddAudioSection(local_reoffer.get(), kMid3Audio, kIceUfrag3, kIcePwd3,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ local_reoffer->AddGroup(bundle_group);
+
+ EXPECT_TRUE(transport_controller_
+ ->SetLocalDescription(SdpType::kOffer, local_reoffer.get())
+ .ok());
+
+ // Store the old transport pointer and verify that the offer actually changed
+ // transports.
+ auto old_mid3_transport = mid3_transport;
+ mid1_transport = transport_controller_->GetRtpTransport(kMid1Audio);
+ mid2_transport = transport_controller_->GetRtpTransport(kMid2Audio);
+ mid3_transport = transport_controller_->GetRtpTransport(kMid3Audio);
+ EXPECT_EQ(mid1_transport, mid2_transport);
+ EXPECT_EQ(mid1_transport, mid3_transport);
+
+ // Rolling back shouldn't just create a new transport for MID 3, it should
+ // restore the old transport.
+ EXPECT_TRUE(transport_controller_->RollbackTransports().ok());
+ mid3_transport = transport_controller_->GetRtpTransport(kMid3Audio);
+ EXPECT_EQ(old_mid3_transport, mid3_transport);
+}
+
+// Test that if an offer adds a MID to a specific BUNDLE group and is then
+// rolled back, it can be added to a different BUNDLE group in a new offer.
+// This is effectively testing that rollback resets the BundleManager state.
+TEST_F(JsepTransportControllerTest, RollbackAndAddToDifferentBundleGroup) {
+ static const char kMid1Audio[] = "1_audio";
+ static const char kMid2Audio[] = "2_audio";
+ static const char kMid3Audio[] = "3_audio";
+
+ // Perform an initial offer/answer to establish two bundle groups, each with
+ // one MID.
+ CreateJsepTransportController(JsepTransportController::Config());
+ cricket::ContentGroup bundle_group1(cricket::GROUP_TYPE_BUNDLE);
+ bundle_group1.AddContentName(kMid1Audio);
+ cricket::ContentGroup bundle_group2(cricket::GROUP_TYPE_BUNDLE);
+ bundle_group2.AddContentName(kMid2Audio);
+
+ auto local_offer = std::make_unique<cricket::SessionDescription>();
+ AddAudioSection(local_offer.get(), kMid1Audio, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(local_offer.get(), kMid2Audio, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ local_offer->AddGroup(bundle_group1);
+ local_offer->AddGroup(bundle_group2);
+
+ std::unique_ptr<cricket::SessionDescription> remote_answer(
+ local_offer->Clone());
+
+ EXPECT_TRUE(transport_controller_
+ ->SetLocalDescription(SdpType::kOffer, local_offer.get())
+ .ok());
+ EXPECT_TRUE(transport_controller_
+ ->SetRemoteDescription(SdpType::kAnswer, remote_answer.get())
+ .ok());
+
+ // Apply an offer that adds kMid3Audio to the first BUNDLE group.,
+ cricket::ContentGroup modified_bundle_group1(cricket::GROUP_TYPE_BUNDLE);
+ modified_bundle_group1.AddContentName(kMid1Audio);
+ modified_bundle_group1.AddContentName(kMid3Audio);
+ auto subsequent_offer_1 = std::make_unique<cricket::SessionDescription>();
+ AddAudioSection(subsequent_offer_1.get(), kMid1Audio, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(subsequent_offer_1.get(), kMid2Audio, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(subsequent_offer_1.get(), kMid3Audio, kIceUfrag3, kIcePwd3,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ subsequent_offer_1->AddGroup(modified_bundle_group1);
+ subsequent_offer_1->AddGroup(bundle_group2);
+
+ EXPECT_TRUE(
+ transport_controller_
+ ->SetLocalDescription(SdpType::kOffer, subsequent_offer_1.get())
+ .ok());
+
+ auto mid1_transport = transport_controller_->GetRtpTransport(kMid1Audio);
+ auto mid2_transport = transport_controller_->GetRtpTransport(kMid2Audio);
+ auto mid3_transport = transport_controller_->GetRtpTransport(kMid3Audio);
+ EXPECT_NE(mid1_transport, mid2_transport);
+ EXPECT_EQ(mid1_transport, mid3_transport);
+
+ // Rollback and expect the transport to be reset.
+ EXPECT_TRUE(transport_controller_->RollbackTransports().ok());
+ EXPECT_EQ(nullptr, transport_controller_->GetRtpTransport(kMid3Audio));
+
+ // Apply an offer that adds kMid3Audio to the second BUNDLE group.,
+ cricket::ContentGroup modified_bundle_group2(cricket::GROUP_TYPE_BUNDLE);
+ modified_bundle_group2.AddContentName(kMid2Audio);
+ modified_bundle_group2.AddContentName(kMid3Audio);
+ auto subsequent_offer_2 = std::make_unique<cricket::SessionDescription>();
+ AddAudioSection(subsequent_offer_2.get(), kMid1Audio, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(subsequent_offer_2.get(), kMid2Audio, kIceUfrag2, kIcePwd2,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(subsequent_offer_2.get(), kMid3Audio, kIceUfrag3, kIcePwd3,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ subsequent_offer_2->AddGroup(bundle_group1);
+ subsequent_offer_2->AddGroup(modified_bundle_group2);
+
+ EXPECT_TRUE(
+ transport_controller_
+ ->SetLocalDescription(SdpType::kOffer, subsequent_offer_2.get())
+ .ok());
+
+ mid1_transport = transport_controller_->GetRtpTransport(kMid1Audio);
+ mid2_transport = transport_controller_->GetRtpTransport(kMid2Audio);
+ mid3_transport = transport_controller_->GetRtpTransport(kMid3Audio);
+ EXPECT_NE(mid1_transport, mid2_transport);
+ EXPECT_EQ(mid2_transport, mid3_transport);
+}
+
+// Test that a bundle-only offer without rtcp-mux in the bundle-only section
+// is accepted.
+TEST_F(JsepTransportControllerTest, BundleOnlySectionDoesNotNeedRtcpMux) {
+ CreateJsepTransportController(JsepTransportController::Config());
+ cricket::ContentGroup bundle_group(cricket::GROUP_TYPE_BUNDLE);
+ bundle_group.AddContentName(kAudioMid1);
+ bundle_group.AddContentName(kVideoMid1);
+
+ auto offer = std::make_unique<cricket::SessionDescription>();
+ AddAudioSection(offer.get(), kAudioMid1, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ AddVideoSection(offer.get(), kVideoMid1, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ offer->AddGroup(bundle_group);
+
+ // Remove rtcp-mux and set bundle-only on the second content.
+ offer->contents()[1].media_description()->set_rtcp_mux(false);
+ offer->contents()[1].bundle_only = true;
+
+ EXPECT_TRUE(
+ transport_controller_->SetRemoteDescription(SdpType::kOffer, offer.get())
+ .ok());
+}
+
+// Test that with max-bundle a single unbundled m-line is accepted.
+TEST_F(JsepTransportControllerTest,
+ MaxBundleDoesNotRequireBundleForFirstMline) {
+ auto config = JsepTransportController::Config();
+ config.bundle_policy = PeerConnectionInterface::kBundlePolicyMaxBundle;
+ CreateJsepTransportController(std::move(config));
+
+ auto offer = std::make_unique<cricket::SessionDescription>();
+ AddAudioSection(offer.get(), kAudioMid1, kIceUfrag1, kIcePwd1,
+ cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS,
+ nullptr);
+ EXPECT_TRUE(
+ transport_controller_->SetRemoteDescription(SdpType::kOffer, offer.get())
+ .ok());
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/jsep_transport_unittest.cc b/third_party/libwebrtc/pc/jsep_transport_unittest.cc
new file mode 100644
index 0000000000..f057d37a0d
--- /dev/null
+++ b/third_party/libwebrtc/pc/jsep_transport_unittest.cc
@@ -0,0 +1,1386 @@
+/*
+ * Copyright 2018 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/jsep_transport.h"
+
+#include <stdint.h>
+#include <string.h>
+
+#include <ostream>
+#include <string>
+#include <tuple>
+#include <utility>
+
+#include "api/candidate.h"
+#include "media/base/fake_rtp.h"
+#include "p2p/base/fake_dtls_transport.h"
+#include "p2p/base/fake_ice_transport.h"
+#include "p2p/base/p2p_constants.h"
+#include "p2p/base/packet_transport_internal.h"
+#include "rtc_base/async_packet_socket.h"
+#include "rtc_base/buffer.h"
+#include "rtc_base/byte_order.h"
+#include "rtc_base/copy_on_write_buffer.h"
+#include "rtc_base/helpers.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/net_helper.h"
+#include "rtc_base/socket_address.h"
+#include "rtc_base/ssl_certificate.h"
+#include "rtc_base/ssl_identity.h"
+#include "rtc_base/third_party/sigslot/sigslot.h"
+#include "test/gtest.h"
+#include "test/scoped_key_value_config.h"
+
+namespace cricket {
+namespace {
+using webrtc::SdpType;
+
+static const char kIceUfrag1[] = "U001";
+static const char kIcePwd1[] = "TESTICEPWD00000000000001";
+static const char kIceUfrag2[] = "U002";
+static const char kIcePwd2[] = "TESTIEPWD00000000000002";
+static const char kTransportName[] = "Test Transport";
+
+enum class SrtpMode {
+ kSdes,
+ kDtlsSrtp,
+};
+
+struct NegotiateRoleParams {
+ ConnectionRole local_role;
+ ConnectionRole remote_role;
+ SdpType local_type;
+ SdpType remote_type;
+};
+
+std::ostream& operator<<(std::ostream& os, const ConnectionRole& role) {
+ std::string str = "invalid";
+ ConnectionRoleToString(role, &str);
+ os << str;
+ return os;
+}
+
+std::ostream& operator<<(std::ostream& os, const NegotiateRoleParams& param) {
+ os << "[Local role " << param.local_role << " Remote role "
+ << param.remote_role << " LocalType " << SdpTypeToString(param.local_type)
+ << " RemoteType " << SdpTypeToString(param.remote_type) << "]";
+ return os;
+}
+
+rtc::scoped_refptr<webrtc::IceTransportInterface> CreateIceTransport(
+ std::unique_ptr<FakeIceTransport> internal) {
+ if (!internal) {
+ return nullptr;
+ }
+
+ return rtc::make_ref_counted<FakeIceTransportWrapper>(std::move(internal));
+}
+
+class JsepTransport2Test : public ::testing::Test, public sigslot::has_slots<> {
+ protected:
+ std::unique_ptr<webrtc::SrtpTransport> CreateSdesTransport(
+ rtc::PacketTransportInternal* rtp_packet_transport,
+ rtc::PacketTransportInternal* rtcp_packet_transport) {
+ auto srtp_transport = std::make_unique<webrtc::SrtpTransport>(
+ rtcp_packet_transport == nullptr, field_trials_);
+
+ srtp_transport->SetRtpPacketTransport(rtp_packet_transport);
+ if (rtcp_packet_transport) {
+ srtp_transport->SetRtcpPacketTransport(rtp_packet_transport);
+ }
+ return srtp_transport;
+ }
+
+ std::unique_ptr<webrtc::DtlsSrtpTransport> CreateDtlsSrtpTransport(
+ cricket::DtlsTransportInternal* rtp_dtls_transport,
+ cricket::DtlsTransportInternal* rtcp_dtls_transport) {
+ auto dtls_srtp_transport = std::make_unique<webrtc::DtlsSrtpTransport>(
+ rtcp_dtls_transport == nullptr, field_trials_);
+ dtls_srtp_transport->SetDtlsTransports(rtp_dtls_transport,
+ rtcp_dtls_transport);
+ return dtls_srtp_transport;
+ }
+
+ // Create a new JsepTransport with a FakeDtlsTransport and a
+ // FakeIceTransport.
+ std::unique_ptr<JsepTransport> CreateJsepTransport2(bool rtcp_mux_enabled,
+ SrtpMode srtp_mode) {
+ auto ice_internal = std::make_unique<FakeIceTransport>(
+ kTransportName, ICE_CANDIDATE_COMPONENT_RTP);
+ auto rtp_dtls_transport =
+ std::make_unique<FakeDtlsTransport>(ice_internal.get());
+ auto ice = CreateIceTransport(std::move(ice_internal));
+
+ std::unique_ptr<FakeIceTransport> rtcp_ice_internal;
+ std::unique_ptr<FakeDtlsTransport> rtcp_dtls_transport;
+ if (!rtcp_mux_enabled) {
+ rtcp_ice_internal = std::make_unique<FakeIceTransport>(
+ kTransportName, ICE_CANDIDATE_COMPONENT_RTCP);
+ rtcp_dtls_transport =
+ std::make_unique<FakeDtlsTransport>(rtcp_ice_internal.get());
+ }
+ auto rtcp_ice = CreateIceTransport(std::move(rtcp_ice_internal));
+
+ std::unique_ptr<webrtc::RtpTransport> unencrypted_rtp_transport;
+ std::unique_ptr<webrtc::SrtpTransport> sdes_transport;
+ std::unique_ptr<webrtc::DtlsSrtpTransport> dtls_srtp_transport;
+ switch (srtp_mode) {
+ case SrtpMode::kSdes:
+ sdes_transport = CreateSdesTransport(rtp_dtls_transport.get(),
+ rtcp_dtls_transport.get());
+ sdes_transport_ = sdes_transport.get();
+ break;
+ case SrtpMode::kDtlsSrtp:
+ dtls_srtp_transport = CreateDtlsSrtpTransport(
+ rtp_dtls_transport.get(), rtcp_dtls_transport.get());
+ break;
+ default:
+ RTC_DCHECK_NOTREACHED();
+ }
+
+ auto jsep_transport = std::make_unique<JsepTransport>(
+ kTransportName, /*local_certificate=*/nullptr, std::move(ice),
+ std::move(rtcp_ice), std::move(unencrypted_rtp_transport),
+ std::move(sdes_transport), std::move(dtls_srtp_transport),
+ std::move(rtp_dtls_transport), std::move(rtcp_dtls_transport),
+ /*sctp_transport=*/nullptr,
+ /*rtcp_mux_active_callback=*/[&]() { OnRtcpMuxActive(); });
+
+ signal_rtcp_mux_active_received_ = false;
+ return jsep_transport;
+ }
+
+ JsepTransportDescription MakeJsepTransportDescription(
+ bool rtcp_mux_enabled,
+ const char* ufrag,
+ const char* pwd,
+ const rtc::scoped_refptr<rtc::RTCCertificate>& cert,
+ ConnectionRole role = CONNECTIONROLE_NONE) {
+ JsepTransportDescription jsep_description;
+ jsep_description.rtcp_mux_enabled = rtcp_mux_enabled;
+
+ std::unique_ptr<rtc::SSLFingerprint> fingerprint;
+ if (cert) {
+ fingerprint = rtc::SSLFingerprint::CreateFromCertificate(*cert);
+ }
+ jsep_description.transport_desc =
+ TransportDescription(std::vector<std::string>(), ufrag, pwd,
+ ICEMODE_FULL, role, fingerprint.get());
+ return jsep_description;
+ }
+
+ Candidate CreateCandidate(int component) {
+ Candidate c;
+ c.set_address(rtc::SocketAddress("192.168.1.1", 8000));
+ c.set_component(component);
+ c.set_protocol(UDP_PROTOCOL_NAME);
+ c.set_priority(1);
+ return c;
+ }
+
+ void OnRtcpMuxActive() { signal_rtcp_mux_active_received_ = true; }
+
+ rtc::AutoThread main_thread_;
+ std::unique_ptr<JsepTransport> jsep_transport_;
+ bool signal_rtcp_mux_active_received_ = false;
+ // The SrtpTransport is owned by `jsep_transport_`. Keep a raw pointer here
+ // for testing.
+ webrtc::SrtpTransport* sdes_transport_ = nullptr;
+
+ webrtc::test::ScopedKeyValueConfig field_trials_;
+};
+
+// The parameterized tests cover both cases when RTCP mux is enable and
+// disabled.
+class JsepTransport2WithRtcpMux : public JsepTransport2Test,
+ public ::testing::WithParamInterface<bool> {};
+
+// This test verifies the ICE parameters are properly applied to the transports.
+TEST_P(JsepTransport2WithRtcpMux, SetIceParameters) {
+ bool rtcp_mux_enabled = GetParam();
+ jsep_transport_ = CreateJsepTransport2(rtcp_mux_enabled, SrtpMode::kDtlsSrtp);
+
+ JsepTransportDescription jsep_description;
+ jsep_description.transport_desc = TransportDescription(kIceUfrag1, kIcePwd1);
+ jsep_description.rtcp_mux_enabled = rtcp_mux_enabled;
+ ASSERT_TRUE(
+ jsep_transport_
+ ->SetLocalJsepTransportDescription(jsep_description, SdpType::kOffer)
+ .ok());
+ auto fake_ice_transport = static_cast<FakeIceTransport*>(
+ jsep_transport_->rtp_dtls_transport()->ice_transport());
+ EXPECT_EQ(ICEMODE_FULL, fake_ice_transport->remote_ice_mode());
+ EXPECT_EQ(kIceUfrag1, fake_ice_transport->ice_ufrag());
+ EXPECT_EQ(kIcePwd1, fake_ice_transport->ice_pwd());
+ if (!rtcp_mux_enabled) {
+ fake_ice_transport = static_cast<FakeIceTransport*>(
+ jsep_transport_->rtcp_dtls_transport()->ice_transport());
+ ASSERT_TRUE(fake_ice_transport);
+ EXPECT_EQ(ICEMODE_FULL, fake_ice_transport->remote_ice_mode());
+ EXPECT_EQ(kIceUfrag1, fake_ice_transport->ice_ufrag());
+ EXPECT_EQ(kIcePwd1, fake_ice_transport->ice_pwd());
+ }
+
+ jsep_description.transport_desc = TransportDescription(kIceUfrag2, kIcePwd2);
+ ASSERT_TRUE(jsep_transport_
+ ->SetRemoteJsepTransportDescription(jsep_description,
+ SdpType::kAnswer)
+ .ok());
+ fake_ice_transport = static_cast<FakeIceTransport*>(
+ jsep_transport_->rtp_dtls_transport()->ice_transport());
+ EXPECT_EQ(ICEMODE_FULL, fake_ice_transport->remote_ice_mode());
+ EXPECT_EQ(kIceUfrag2, fake_ice_transport->remote_ice_ufrag());
+ EXPECT_EQ(kIcePwd2, fake_ice_transport->remote_ice_pwd());
+ if (!rtcp_mux_enabled) {
+ fake_ice_transport = static_cast<FakeIceTransport*>(
+ jsep_transport_->rtcp_dtls_transport()->ice_transport());
+ ASSERT_TRUE(fake_ice_transport);
+ EXPECT_EQ(ICEMODE_FULL, fake_ice_transport->remote_ice_mode());
+ EXPECT_EQ(kIceUfrag2, fake_ice_transport->remote_ice_ufrag());
+ EXPECT_EQ(kIcePwd2, fake_ice_transport->remote_ice_pwd());
+ }
+}
+
+// Similarly, test DTLS parameters are properly applied to the transports.
+TEST_P(JsepTransport2WithRtcpMux, SetDtlsParameters) {
+ bool rtcp_mux_enabled = GetParam();
+ jsep_transport_ = CreateJsepTransport2(rtcp_mux_enabled, SrtpMode::kDtlsSrtp);
+
+ // Create certificates.
+ rtc::scoped_refptr<rtc::RTCCertificate> local_cert =
+ rtc::RTCCertificate::Create(
+ rtc::SSLIdentity::Create("local", rtc::KT_DEFAULT));
+ rtc::scoped_refptr<rtc::RTCCertificate> remote_cert =
+ rtc::RTCCertificate::Create(
+ rtc::SSLIdentity::Create("remote", rtc::KT_DEFAULT));
+ jsep_transport_->SetLocalCertificate(local_cert);
+
+ // Apply offer.
+ JsepTransportDescription local_description =
+ MakeJsepTransportDescription(rtcp_mux_enabled, kIceUfrag1, kIcePwd1,
+ local_cert, CONNECTIONROLE_ACTPASS);
+ ASSERT_TRUE(
+ jsep_transport_
+ ->SetLocalJsepTransportDescription(local_description, SdpType::kOffer)
+ .ok());
+ // Apply Answer.
+ JsepTransportDescription remote_description =
+ MakeJsepTransportDescription(rtcp_mux_enabled, kIceUfrag2, kIcePwd2,
+ remote_cert, CONNECTIONROLE_ACTIVE);
+ ASSERT_TRUE(jsep_transport_
+ ->SetRemoteJsepTransportDescription(remote_description,
+ SdpType::kAnswer)
+ .ok());
+
+ // Verify that SSL role and remote fingerprint were set correctly based on
+ // transport descriptions.
+ auto role = jsep_transport_->GetDtlsRole();
+ ASSERT_TRUE(role);
+ EXPECT_EQ(rtc::SSL_SERVER, role); // Because remote description was "active".
+ auto fake_dtls =
+ static_cast<FakeDtlsTransport*>(jsep_transport_->rtp_dtls_transport());
+ EXPECT_EQ(remote_description.transport_desc.identity_fingerprint->ToString(),
+ fake_dtls->dtls_fingerprint().ToString());
+
+ if (!rtcp_mux_enabled) {
+ auto fake_rtcp_dtls =
+ static_cast<FakeDtlsTransport*>(jsep_transport_->rtcp_dtls_transport());
+ EXPECT_EQ(
+ remote_description.transport_desc.identity_fingerprint->ToString(),
+ fake_rtcp_dtls->dtls_fingerprint().ToString());
+ }
+}
+
+// Same as above test, but with remote transport description using
+// CONNECTIONROLE_PASSIVE, expecting SSL_CLIENT role.
+TEST_P(JsepTransport2WithRtcpMux, SetDtlsParametersWithPassiveAnswer) {
+ bool rtcp_mux_enabled = GetParam();
+ jsep_transport_ = CreateJsepTransport2(rtcp_mux_enabled, SrtpMode::kDtlsSrtp);
+
+ // Create certificates.
+ rtc::scoped_refptr<rtc::RTCCertificate> local_cert =
+ rtc::RTCCertificate::Create(
+ rtc::SSLIdentity::Create("local", rtc::KT_DEFAULT));
+ rtc::scoped_refptr<rtc::RTCCertificate> remote_cert =
+ rtc::RTCCertificate::Create(
+ rtc::SSLIdentity::Create("remote", rtc::KT_DEFAULT));
+ jsep_transport_->SetLocalCertificate(local_cert);
+
+ // Apply offer.
+ JsepTransportDescription local_description =
+ MakeJsepTransportDescription(rtcp_mux_enabled, kIceUfrag1, kIcePwd1,
+ local_cert, CONNECTIONROLE_ACTPASS);
+ ASSERT_TRUE(
+ jsep_transport_
+ ->SetLocalJsepTransportDescription(local_description, SdpType::kOffer)
+ .ok());
+ // Apply Answer.
+ JsepTransportDescription remote_description =
+ MakeJsepTransportDescription(rtcp_mux_enabled, kIceUfrag2, kIcePwd2,
+ remote_cert, CONNECTIONROLE_PASSIVE);
+ ASSERT_TRUE(jsep_transport_
+ ->SetRemoteJsepTransportDescription(remote_description,
+ SdpType::kAnswer)
+ .ok());
+
+ // Verify that SSL role and remote fingerprint were set correctly based on
+ // transport descriptions.
+ auto role = jsep_transport_->GetDtlsRole();
+ ASSERT_TRUE(role);
+ EXPECT_EQ(rtc::SSL_CLIENT,
+ role); // Because remote description was "passive".
+ auto fake_dtls =
+ static_cast<FakeDtlsTransport*>(jsep_transport_->rtp_dtls_transport());
+ EXPECT_EQ(remote_description.transport_desc.identity_fingerprint->ToString(),
+ fake_dtls->dtls_fingerprint().ToString());
+
+ if (!rtcp_mux_enabled) {
+ auto fake_rtcp_dtls =
+ static_cast<FakeDtlsTransport*>(jsep_transport_->rtcp_dtls_transport());
+ EXPECT_EQ(
+ remote_description.transport_desc.identity_fingerprint->ToString(),
+ fake_rtcp_dtls->dtls_fingerprint().ToString());
+ }
+}
+
+// Tests SetNeedsIceRestartFlag and need_ice_restart, ensuring needs_ice_restart
+// only starts returning "false" once an ICE restart has been initiated.
+TEST_P(JsepTransport2WithRtcpMux, NeedsIceRestart) {
+ bool rtcp_mux_enabled = GetParam();
+ jsep_transport_ = CreateJsepTransport2(rtcp_mux_enabled, SrtpMode::kDtlsSrtp);
+
+ // Use the same JsepTransportDescription for both offer and answer.
+ JsepTransportDescription description;
+ description.transport_desc = TransportDescription(kIceUfrag1, kIcePwd1);
+ ASSERT_TRUE(
+ jsep_transport_
+ ->SetLocalJsepTransportDescription(description, SdpType::kOffer)
+ .ok());
+ ASSERT_TRUE(
+ jsep_transport_
+ ->SetRemoteJsepTransportDescription(description, SdpType::kAnswer)
+ .ok());
+ // Flag initially should be false.
+ EXPECT_FALSE(jsep_transport_->needs_ice_restart());
+
+ // After setting flag, it should be true.
+ jsep_transport_->SetNeedsIceRestartFlag();
+ EXPECT_TRUE(jsep_transport_->needs_ice_restart());
+
+ ASSERT_TRUE(
+ jsep_transport_
+ ->SetLocalJsepTransportDescription(description, SdpType::kOffer)
+ .ok());
+ ASSERT_TRUE(
+ jsep_transport_
+ ->SetRemoteJsepTransportDescription(description, SdpType::kAnswer)
+ .ok());
+ EXPECT_TRUE(jsep_transport_->needs_ice_restart());
+
+ // Doing an offer/answer that restarts ICE should clear the flag.
+ description.transport_desc = TransportDescription(kIceUfrag2, kIcePwd2);
+ ASSERT_TRUE(
+ jsep_transport_
+ ->SetLocalJsepTransportDescription(description, SdpType::kOffer)
+ .ok());
+ ASSERT_TRUE(
+ jsep_transport_
+ ->SetRemoteJsepTransportDescription(description, SdpType::kAnswer)
+ .ok());
+ EXPECT_FALSE(jsep_transport_->needs_ice_restart());
+}
+
+TEST_P(JsepTransport2WithRtcpMux, GetStats) {
+ bool rtcp_mux_enabled = GetParam();
+ jsep_transport_ = CreateJsepTransport2(rtcp_mux_enabled, SrtpMode::kDtlsSrtp);
+
+ size_t expected_stats_size = rtcp_mux_enabled ? 1u : 2u;
+ TransportStats stats;
+ EXPECT_TRUE(jsep_transport_->GetStats(&stats));
+ EXPECT_EQ(expected_stats_size, stats.channel_stats.size());
+ EXPECT_EQ(ICE_CANDIDATE_COMPONENT_RTP, stats.channel_stats[0].component);
+ if (!rtcp_mux_enabled) {
+ EXPECT_EQ(ICE_CANDIDATE_COMPONENT_RTCP, stats.channel_stats[1].component);
+ }
+}
+
+// Tests that VerifyCertificateFingerprint only returns true when the
+// certificate matches the fingerprint.
+TEST_P(JsepTransport2WithRtcpMux, VerifyCertificateFingerprint) {
+ bool rtcp_mux_enabled = GetParam();
+ jsep_transport_ = CreateJsepTransport2(rtcp_mux_enabled, SrtpMode::kDtlsSrtp);
+
+ EXPECT_FALSE(
+ jsep_transport_->VerifyCertificateFingerprint(nullptr, nullptr).ok());
+ rtc::KeyType key_types[] = {rtc::KT_RSA, rtc::KT_ECDSA};
+
+ for (auto& key_type : key_types) {
+ rtc::scoped_refptr<rtc::RTCCertificate> certificate =
+ rtc::RTCCertificate::Create(
+ rtc::SSLIdentity::Create("testing", key_type));
+ ASSERT_NE(nullptr, certificate);
+
+ std::string digest_algorithm;
+ ASSERT_TRUE(certificate->GetSSLCertificate().GetSignatureDigestAlgorithm(
+ &digest_algorithm));
+ ASSERT_FALSE(digest_algorithm.empty());
+ std::unique_ptr<rtc::SSLFingerprint> good_fingerprint =
+ rtc::SSLFingerprint::CreateUnique(digest_algorithm,
+ *certificate->identity());
+ ASSERT_NE(nullptr, good_fingerprint);
+
+ EXPECT_TRUE(jsep_transport_
+ ->VerifyCertificateFingerprint(certificate.get(),
+ good_fingerprint.get())
+ .ok());
+ EXPECT_FALSE(jsep_transport_
+ ->VerifyCertificateFingerprint(certificate.get(), nullptr)
+ .ok());
+ EXPECT_FALSE(
+ jsep_transport_
+ ->VerifyCertificateFingerprint(nullptr, good_fingerprint.get())
+ .ok());
+
+ rtc::SSLFingerprint bad_fingerprint = *good_fingerprint;
+ bad_fingerprint.digest.AppendData("0", 1);
+ EXPECT_FALSE(
+ jsep_transport_
+ ->VerifyCertificateFingerprint(certificate.get(), &bad_fingerprint)
+ .ok());
+ }
+}
+
+// Tests the logic of DTLS role negotiation for an initial offer/answer.
+TEST_P(JsepTransport2WithRtcpMux, ValidDtlsRoleNegotiation) {
+ bool rtcp_mux_enabled = GetParam();
+ // Just use the same certificate for both sides; doesn't really matter in a
+ // non end-to-end test.
+ rtc::scoped_refptr<rtc::RTCCertificate> certificate =
+ rtc::RTCCertificate::Create(
+ rtc::SSLIdentity::Create("testing", rtc::KT_ECDSA));
+
+ JsepTransportDescription local_description = MakeJsepTransportDescription(
+ rtcp_mux_enabled, kIceUfrag1, kIcePwd1, certificate);
+ JsepTransportDescription remote_description = MakeJsepTransportDescription(
+ rtcp_mux_enabled, kIceUfrag2, kIcePwd2, certificate);
+
+ // Parameters which set the SSL role to SSL_CLIENT.
+ NegotiateRoleParams valid_client_params[] = {
+ {CONNECTIONROLE_ACTIVE, CONNECTIONROLE_ACTPASS, SdpType::kAnswer,
+ SdpType::kOffer},
+ {CONNECTIONROLE_ACTIVE, CONNECTIONROLE_ACTPASS, SdpType::kPrAnswer,
+ SdpType::kOffer},
+ {CONNECTIONROLE_ACTPASS, CONNECTIONROLE_PASSIVE, SdpType::kOffer,
+ SdpType::kAnswer},
+ {CONNECTIONROLE_ACTPASS, CONNECTIONROLE_PASSIVE, SdpType::kOffer,
+ SdpType::kPrAnswer},
+ // Combinations permitted by RFC 8842 section 5.3
+ {CONNECTIONROLE_ACTIVE, CONNECTIONROLE_PASSIVE, SdpType::kAnswer,
+ SdpType::kOffer},
+ {CONNECTIONROLE_ACTIVE, CONNECTIONROLE_PASSIVE, SdpType::kPrAnswer,
+ SdpType::kOffer},
+ };
+
+ for (auto& param : valid_client_params) {
+ jsep_transport_ =
+ CreateJsepTransport2(rtcp_mux_enabled, SrtpMode::kDtlsSrtp);
+ jsep_transport_->SetLocalCertificate(certificate);
+
+ local_description.transport_desc.connection_role = param.local_role;
+ remote_description.transport_desc.connection_role = param.remote_role;
+
+ // Set the offer first.
+ if (param.local_type == SdpType::kOffer) {
+ EXPECT_TRUE(jsep_transport_
+ ->SetLocalJsepTransportDescription(local_description,
+ param.local_type)
+ .ok());
+ EXPECT_TRUE(jsep_transport_
+ ->SetRemoteJsepTransportDescription(remote_description,
+ param.remote_type)
+ .ok());
+ } else {
+ EXPECT_TRUE(jsep_transport_
+ ->SetRemoteJsepTransportDescription(remote_description,
+ param.remote_type)
+ .ok());
+ EXPECT_TRUE(jsep_transport_
+ ->SetLocalJsepTransportDescription(local_description,
+ param.local_type)
+ .ok());
+ }
+ EXPECT_EQ(rtc::SSL_CLIENT, *jsep_transport_->GetDtlsRole());
+ }
+
+ // Parameters which set the SSL role to SSL_SERVER.
+ NegotiateRoleParams valid_server_params[] = {
+ {CONNECTIONROLE_PASSIVE, CONNECTIONROLE_ACTPASS, SdpType::kAnswer,
+ SdpType::kOffer},
+ {CONNECTIONROLE_PASSIVE, CONNECTIONROLE_ACTPASS, SdpType::kPrAnswer,
+ SdpType::kOffer},
+ {CONNECTIONROLE_ACTPASS, CONNECTIONROLE_ACTIVE, SdpType::kOffer,
+ SdpType::kAnswer},
+ {CONNECTIONROLE_ACTPASS, CONNECTIONROLE_ACTIVE, SdpType::kOffer,
+ SdpType::kPrAnswer},
+ // Combinations permitted by RFC 8842 section 5.3
+ {CONNECTIONROLE_PASSIVE, CONNECTIONROLE_ACTIVE, SdpType::kPrAnswer,
+ SdpType::kOffer},
+ };
+
+ for (auto& param : valid_server_params) {
+ jsep_transport_ =
+ CreateJsepTransport2(rtcp_mux_enabled, SrtpMode::kDtlsSrtp);
+ jsep_transport_->SetLocalCertificate(certificate);
+
+ local_description.transport_desc.connection_role = param.local_role;
+ remote_description.transport_desc.connection_role = param.remote_role;
+
+ // Set the offer first.
+ if (param.local_type == SdpType::kOffer) {
+ EXPECT_TRUE(jsep_transport_
+ ->SetLocalJsepTransportDescription(local_description,
+ param.local_type)
+ .ok());
+ EXPECT_TRUE(jsep_transport_
+ ->SetRemoteJsepTransportDescription(remote_description,
+ param.remote_type)
+ .ok());
+ } else {
+ EXPECT_TRUE(jsep_transport_
+ ->SetRemoteJsepTransportDescription(remote_description,
+ param.remote_type)
+ .ok());
+ EXPECT_TRUE(jsep_transport_
+ ->SetLocalJsepTransportDescription(local_description,
+ param.local_type)
+ .ok());
+ }
+ EXPECT_EQ(rtc::SSL_SERVER, *jsep_transport_->GetDtlsRole());
+ }
+}
+
+// Tests the logic of DTLS role negotiation for an initial offer/answer.
+TEST_P(JsepTransport2WithRtcpMux, InvalidDtlsRoleNegotiation) {
+ bool rtcp_mux_enabled = GetParam();
+ // Just use the same certificate for both sides; doesn't really matter in a
+ // non end-to-end test.
+ rtc::scoped_refptr<rtc::RTCCertificate> certificate =
+ rtc::RTCCertificate::Create(
+ rtc::SSLIdentity::Create("testing", rtc::KT_ECDSA));
+
+ JsepTransportDescription local_description = MakeJsepTransportDescription(
+ rtcp_mux_enabled, kIceUfrag1, kIcePwd1, certificate);
+ JsepTransportDescription remote_description = MakeJsepTransportDescription(
+ rtcp_mux_enabled, kIceUfrag2, kIcePwd2, certificate);
+
+ NegotiateRoleParams duplicate_params[] = {
+ {CONNECTIONROLE_ACTIVE, CONNECTIONROLE_ACTIVE, SdpType::kAnswer,
+ SdpType::kOffer},
+ {CONNECTIONROLE_ACTPASS, CONNECTIONROLE_ACTPASS, SdpType::kAnswer,
+ SdpType::kOffer},
+ {CONNECTIONROLE_PASSIVE, CONNECTIONROLE_PASSIVE, SdpType::kAnswer,
+ SdpType::kOffer},
+ {CONNECTIONROLE_ACTIVE, CONNECTIONROLE_ACTIVE, SdpType::kPrAnswer,
+ SdpType::kOffer},
+ {CONNECTIONROLE_ACTPASS, CONNECTIONROLE_ACTPASS, SdpType::kPrAnswer,
+ SdpType::kOffer},
+ {CONNECTIONROLE_PASSIVE, CONNECTIONROLE_PASSIVE, SdpType::kPrAnswer,
+ SdpType::kOffer},
+ {CONNECTIONROLE_ACTIVE, CONNECTIONROLE_ACTIVE, SdpType::kOffer,
+ SdpType::kAnswer},
+ {CONNECTIONROLE_ACTPASS, CONNECTIONROLE_ACTPASS, SdpType::kOffer,
+ SdpType::kAnswer},
+ {CONNECTIONROLE_PASSIVE, CONNECTIONROLE_PASSIVE, SdpType::kOffer,
+ SdpType::kAnswer},
+ {CONNECTIONROLE_ACTIVE, CONNECTIONROLE_ACTIVE, SdpType::kOffer,
+ SdpType::kPrAnswer},
+ {CONNECTIONROLE_ACTPASS, CONNECTIONROLE_ACTPASS, SdpType::kOffer,
+ SdpType::kPrAnswer},
+ {CONNECTIONROLE_PASSIVE, CONNECTIONROLE_PASSIVE, SdpType::kOffer,
+ SdpType::kPrAnswer}};
+
+ for (auto& param : duplicate_params) {
+ jsep_transport_ =
+ CreateJsepTransport2(rtcp_mux_enabled, SrtpMode::kDtlsSrtp);
+ jsep_transport_->SetLocalCertificate(certificate);
+
+ local_description.transport_desc.connection_role = param.local_role;
+ remote_description.transport_desc.connection_role = param.remote_role;
+
+ if (param.local_type == SdpType::kOffer) {
+ EXPECT_TRUE(jsep_transport_
+ ->SetLocalJsepTransportDescription(local_description,
+ param.local_type)
+ .ok());
+ EXPECT_FALSE(jsep_transport_
+ ->SetRemoteJsepTransportDescription(remote_description,
+ param.remote_type)
+ .ok());
+ } else {
+ EXPECT_TRUE(jsep_transport_
+ ->SetRemoteJsepTransportDescription(remote_description,
+ param.remote_type)
+ .ok());
+ EXPECT_FALSE(jsep_transport_
+ ->SetLocalJsepTransportDescription(local_description,
+ param.local_type)
+ .ok());
+ }
+ }
+
+ // Invalid parameters due to the offerer not using a role consistent with the
+ // state
+ NegotiateRoleParams offerer_without_actpass_params[] = {
+ // Cannot use ACTPASS in an answer
+ {CONNECTIONROLE_ACTPASS, CONNECTIONROLE_PASSIVE, SdpType::kAnswer,
+ SdpType::kOffer},
+ {CONNECTIONROLE_ACTPASS, CONNECTIONROLE_PASSIVE, SdpType::kPrAnswer,
+ SdpType::kOffer},
+ // Cannot send ACTIVE or PASSIVE in an offer (must handle, must not send)
+ {CONNECTIONROLE_ACTIVE, CONNECTIONROLE_PASSIVE, SdpType::kOffer,
+ SdpType::kAnswer},
+ {CONNECTIONROLE_PASSIVE, CONNECTIONROLE_ACTIVE, SdpType::kOffer,
+ SdpType::kAnswer},
+ {CONNECTIONROLE_PASSIVE, CONNECTIONROLE_ACTPASS, SdpType::kOffer,
+ SdpType::kAnswer},
+ {CONNECTIONROLE_ACTIVE, CONNECTIONROLE_PASSIVE, SdpType::kOffer,
+ SdpType::kPrAnswer},
+ {CONNECTIONROLE_PASSIVE, CONNECTIONROLE_ACTIVE, SdpType::kOffer,
+ SdpType::kPrAnswer},
+ {CONNECTIONROLE_PASSIVE, CONNECTIONROLE_ACTPASS, SdpType::kOffer,
+ SdpType::kPrAnswer}};
+
+ for (auto& param : offerer_without_actpass_params) {
+ jsep_transport_ =
+ CreateJsepTransport2(rtcp_mux_enabled, SrtpMode::kDtlsSrtp);
+ jsep_transport_->SetLocalCertificate(certificate);
+
+ local_description.transport_desc.connection_role = param.local_role;
+ remote_description.transport_desc.connection_role = param.remote_role;
+
+ if (param.local_type == SdpType::kOffer) {
+ EXPECT_TRUE(jsep_transport_
+ ->SetLocalJsepTransportDescription(local_description,
+ param.local_type)
+ .ok())
+ << param;
+ EXPECT_FALSE(jsep_transport_
+ ->SetRemoteJsepTransportDescription(remote_description,
+ param.remote_type)
+ .ok())
+ << param;
+ } else {
+ EXPECT_TRUE(jsep_transport_
+ ->SetRemoteJsepTransportDescription(remote_description,
+ param.remote_type)
+ .ok())
+ << param;
+ EXPECT_FALSE(jsep_transport_
+ ->SetLocalJsepTransportDescription(local_description,
+ param.local_type)
+ .ok())
+ << param;
+ }
+ }
+}
+
+INSTANTIATE_TEST_SUITE_P(JsepTransport2Test,
+ JsepTransport2WithRtcpMux,
+ ::testing::Bool());
+
+// Test that a reoffer in the opposite direction is successful as long as the
+// role isn't changing. Doesn't test every possible combination like the test
+// above.
+TEST_F(JsepTransport2Test, ValidDtlsReofferFromAnswerer) {
+ // Just use the same certificate for both sides; doesn't really matter in a
+ // non end-to-end test.
+ rtc::scoped_refptr<rtc::RTCCertificate> certificate =
+ rtc::RTCCertificate::Create(
+ rtc::SSLIdentity::Create("testing", rtc::KT_ECDSA));
+ bool rtcp_mux_enabled = true;
+ jsep_transport_ = CreateJsepTransport2(rtcp_mux_enabled, SrtpMode::kDtlsSrtp);
+ jsep_transport_->SetLocalCertificate(certificate);
+
+ JsepTransportDescription local_offer =
+ MakeJsepTransportDescription(rtcp_mux_enabled, kIceUfrag1, kIcePwd1,
+ certificate, CONNECTIONROLE_ACTPASS);
+ JsepTransportDescription remote_answer =
+ MakeJsepTransportDescription(rtcp_mux_enabled, kIceUfrag2, kIcePwd2,
+ certificate, CONNECTIONROLE_ACTIVE);
+
+ EXPECT_TRUE(
+ jsep_transport_
+ ->SetLocalJsepTransportDescription(local_offer, SdpType::kOffer)
+ .ok());
+ EXPECT_TRUE(
+ jsep_transport_
+ ->SetRemoteJsepTransportDescription(remote_answer, SdpType::kAnswer)
+ .ok());
+
+ // We were actpass->active previously, now in the other direction it's
+ // actpass->passive.
+ JsepTransportDescription remote_offer =
+ MakeJsepTransportDescription(rtcp_mux_enabled, kIceUfrag2, kIcePwd2,
+ certificate, CONNECTIONROLE_ACTPASS);
+ JsepTransportDescription local_answer =
+ MakeJsepTransportDescription(rtcp_mux_enabled, kIceUfrag1, kIcePwd1,
+ certificate, CONNECTIONROLE_PASSIVE);
+
+ EXPECT_TRUE(
+ jsep_transport_
+ ->SetRemoteJsepTransportDescription(remote_offer, SdpType::kOffer)
+ .ok());
+ EXPECT_TRUE(
+ jsep_transport_
+ ->SetLocalJsepTransportDescription(local_answer, SdpType::kAnswer)
+ .ok());
+}
+
+// Test that a reoffer in the opposite direction fails if the role changes.
+// Inverse of test above.
+TEST_F(JsepTransport2Test, InvalidDtlsReofferFromAnswerer) {
+ // Just use the same certificate for both sides; doesn't really matter in a
+ // non end-to-end test.
+ rtc::scoped_refptr<rtc::RTCCertificate> certificate =
+ rtc::RTCCertificate::Create(
+ rtc::SSLIdentity::Create("testing", rtc::KT_ECDSA));
+ bool rtcp_mux_enabled = true;
+ jsep_transport_ = CreateJsepTransport2(rtcp_mux_enabled, SrtpMode::kDtlsSrtp);
+ jsep_transport_->SetLocalCertificate(certificate);
+
+ JsepTransportDescription local_offer =
+ MakeJsepTransportDescription(rtcp_mux_enabled, kIceUfrag1, kIcePwd1,
+ certificate, CONNECTIONROLE_ACTPASS);
+ JsepTransportDescription remote_answer =
+ MakeJsepTransportDescription(rtcp_mux_enabled, kIceUfrag2, kIcePwd2,
+ certificate, CONNECTIONROLE_ACTIVE);
+
+ EXPECT_TRUE(
+ jsep_transport_
+ ->SetLocalJsepTransportDescription(local_offer, SdpType::kOffer)
+ .ok());
+ EXPECT_TRUE(
+ jsep_transport_
+ ->SetRemoteJsepTransportDescription(remote_answer, SdpType::kAnswer)
+ .ok());
+
+ // Changing role to passive here isn't allowed. Though for some reason this
+ // only fails in SetLocalTransportDescription.
+ JsepTransportDescription remote_offer =
+ MakeJsepTransportDescription(rtcp_mux_enabled, kIceUfrag2, kIcePwd2,
+ certificate, CONNECTIONROLE_PASSIVE);
+ JsepTransportDescription local_answer =
+ MakeJsepTransportDescription(rtcp_mux_enabled, kIceUfrag1, kIcePwd1,
+ certificate, CONNECTIONROLE_ACTIVE);
+
+ EXPECT_TRUE(
+ jsep_transport_
+ ->SetRemoteJsepTransportDescription(remote_offer, SdpType::kOffer)
+ .ok());
+ EXPECT_FALSE(
+ jsep_transport_
+ ->SetLocalJsepTransportDescription(local_answer, SdpType::kAnswer)
+ .ok());
+}
+
+// Test that a remote offer with the current negotiated role can be accepted.
+// This is allowed by dtls-sdp, though we'll never generate such an offer,
+// since JSEP requires generating "actpass".
+TEST_F(JsepTransport2Test, RemoteOfferWithCurrentNegotiatedDtlsRole) {
+ rtc::scoped_refptr<rtc::RTCCertificate> certificate =
+ rtc::RTCCertificate::Create(
+ rtc::SSLIdentity::Create("testing", rtc::KT_ECDSA));
+ bool rtcp_mux_enabled = true;
+ jsep_transport_ = CreateJsepTransport2(rtcp_mux_enabled, SrtpMode::kDtlsSrtp);
+ jsep_transport_->SetLocalCertificate(certificate);
+
+ JsepTransportDescription remote_desc =
+ MakeJsepTransportDescription(rtcp_mux_enabled, kIceUfrag1, kIcePwd1,
+ certificate, CONNECTIONROLE_ACTPASS);
+ JsepTransportDescription local_desc =
+ MakeJsepTransportDescription(rtcp_mux_enabled, kIceUfrag2, kIcePwd2,
+ certificate, CONNECTIONROLE_ACTIVE);
+
+ // Normal initial offer/answer with "actpass" in the offer and "active" in
+ // the answer.
+ ASSERT_TRUE(
+ jsep_transport_
+ ->SetRemoteJsepTransportDescription(remote_desc, SdpType::kOffer)
+ .ok());
+ ASSERT_TRUE(
+ jsep_transport_
+ ->SetLocalJsepTransportDescription(local_desc, SdpType::kAnswer)
+ .ok());
+
+ // Sanity check that role was actually negotiated.
+ absl::optional<rtc::SSLRole> role = jsep_transport_->GetDtlsRole();
+ ASSERT_TRUE(role);
+ EXPECT_EQ(rtc::SSL_CLIENT, *role);
+
+ // Subsequent offer with current negotiated role of "passive".
+ remote_desc.transport_desc.connection_role = CONNECTIONROLE_PASSIVE;
+ EXPECT_TRUE(
+ jsep_transport_
+ ->SetRemoteJsepTransportDescription(remote_desc, SdpType::kOffer)
+ .ok());
+ EXPECT_TRUE(
+ jsep_transport_
+ ->SetLocalJsepTransportDescription(local_desc, SdpType::kAnswer)
+ .ok());
+}
+
+// Test that a remote offer with the inverse of the current negotiated DTLS
+// role is rejected.
+TEST_F(JsepTransport2Test, RemoteOfferThatChangesNegotiatedDtlsRole) {
+ rtc::scoped_refptr<rtc::RTCCertificate> certificate =
+ rtc::RTCCertificate::Create(
+ rtc::SSLIdentity::Create("testing", rtc::KT_ECDSA));
+ bool rtcp_mux_enabled = true;
+ jsep_transport_ = CreateJsepTransport2(rtcp_mux_enabled, SrtpMode::kDtlsSrtp);
+ jsep_transport_->SetLocalCertificate(certificate);
+
+ JsepTransportDescription remote_desc =
+ MakeJsepTransportDescription(rtcp_mux_enabled, kIceUfrag1, kIcePwd1,
+ certificate, CONNECTIONROLE_ACTPASS);
+ JsepTransportDescription local_desc =
+ MakeJsepTransportDescription(rtcp_mux_enabled, kIceUfrag2, kIcePwd2,
+ certificate, CONNECTIONROLE_ACTIVE);
+
+ // Normal initial offer/answer with "actpass" in the offer and "active" in
+ // the answer.
+ ASSERT_TRUE(
+ jsep_transport_
+ ->SetRemoteJsepTransportDescription(remote_desc, SdpType::kOffer)
+ .ok());
+ ASSERT_TRUE(
+ jsep_transport_
+ ->SetLocalJsepTransportDescription(local_desc, SdpType::kAnswer)
+ .ok());
+
+ // Sanity check that role was actually negotiated.
+ absl::optional<rtc::SSLRole> role = jsep_transport_->GetDtlsRole();
+ ASSERT_TRUE(role);
+ EXPECT_EQ(rtc::SSL_CLIENT, *role);
+
+ // Subsequent offer with current negotiated role of "passive".
+ remote_desc.transport_desc.connection_role = CONNECTIONROLE_ACTIVE;
+ EXPECT_TRUE(
+ jsep_transport_
+ ->SetRemoteJsepTransportDescription(remote_desc, SdpType::kOffer)
+ .ok());
+ EXPECT_FALSE(
+ jsep_transport_
+ ->SetLocalJsepTransportDescription(local_desc, SdpType::kAnswer)
+ .ok());
+}
+
+// Test that a remote offer which changes both fingerprint and role is accepted.
+TEST_F(JsepTransport2Test, RemoteOfferThatChangesFingerprintAndDtlsRole) {
+ rtc::scoped_refptr<rtc::RTCCertificate> certificate =
+ rtc::RTCCertificate::Create(
+ rtc::SSLIdentity::Create("testing1", rtc::KT_ECDSA));
+ rtc::scoped_refptr<rtc::RTCCertificate> certificate2 =
+ rtc::RTCCertificate::Create(
+ rtc::SSLIdentity::Create("testing2", rtc::KT_ECDSA));
+ bool rtcp_mux_enabled = true;
+ jsep_transport_ = CreateJsepTransport2(rtcp_mux_enabled, SrtpMode::kDtlsSrtp);
+ jsep_transport_->SetLocalCertificate(certificate);
+
+ JsepTransportDescription remote_desc =
+ MakeJsepTransportDescription(rtcp_mux_enabled, kIceUfrag1, kIcePwd1,
+ certificate, CONNECTIONROLE_ACTPASS);
+ JsepTransportDescription remote_desc2 =
+ MakeJsepTransportDescription(rtcp_mux_enabled, kIceUfrag1, kIcePwd1,
+ certificate2, CONNECTIONROLE_ACTPASS);
+
+ JsepTransportDescription local_desc =
+ MakeJsepTransportDescription(rtcp_mux_enabled, kIceUfrag2, kIcePwd2,
+ certificate, CONNECTIONROLE_ACTIVE);
+
+ // Normal initial offer/answer with "actpass" in the offer and "active" in
+ // the answer.
+ ASSERT_TRUE(
+ jsep_transport_
+ ->SetRemoteJsepTransportDescription(remote_desc, SdpType::kOffer)
+ .ok());
+ ASSERT_TRUE(
+ jsep_transport_
+ ->SetLocalJsepTransportDescription(local_desc, SdpType::kAnswer)
+ .ok());
+
+ // Sanity check that role was actually negotiated.
+ absl::optional<rtc::SSLRole> role = jsep_transport_->GetDtlsRole();
+ ASSERT_TRUE(role);
+ EXPECT_EQ(rtc::SSL_CLIENT, *role);
+
+ // Subsequent exchange with new remote fingerprint and different role.
+ local_desc.transport_desc.connection_role = CONNECTIONROLE_PASSIVE;
+ EXPECT_TRUE(
+ jsep_transport_
+ ->SetRemoteJsepTransportDescription(remote_desc2, SdpType::kOffer)
+ .ok());
+ EXPECT_TRUE(
+ jsep_transport_
+ ->SetLocalJsepTransportDescription(local_desc, SdpType::kAnswer)
+ .ok());
+
+ role = jsep_transport_->GetDtlsRole();
+ ASSERT_TRUE(role);
+ EXPECT_EQ(rtc::SSL_SERVER, *role);
+}
+
+// Testing that a legacy client that doesn't use the setup attribute will be
+// interpreted as having an active role.
+TEST_F(JsepTransport2Test, DtlsSetupWithLegacyAsAnswerer) {
+ rtc::scoped_refptr<rtc::RTCCertificate> certificate =
+ rtc::RTCCertificate::Create(
+ rtc::SSLIdentity::Create("testing", rtc::KT_ECDSA));
+ bool rtcp_mux_enabled = true;
+ jsep_transport_ = CreateJsepTransport2(rtcp_mux_enabled, SrtpMode::kDtlsSrtp);
+ jsep_transport_->SetLocalCertificate(certificate);
+
+ JsepTransportDescription remote_desc =
+ MakeJsepTransportDescription(rtcp_mux_enabled, kIceUfrag1, kIcePwd1,
+ certificate, CONNECTIONROLE_ACTPASS);
+ JsepTransportDescription local_desc =
+ MakeJsepTransportDescription(rtcp_mux_enabled, kIceUfrag2, kIcePwd2,
+ certificate, CONNECTIONROLE_ACTIVE);
+
+ local_desc.transport_desc.connection_role = CONNECTIONROLE_ACTPASS;
+ ASSERT_TRUE(
+ jsep_transport_
+ ->SetLocalJsepTransportDescription(local_desc, SdpType::kOffer)
+ .ok());
+ // Use CONNECTIONROLE_NONE to simulate legacy endpoint.
+ remote_desc.transport_desc.connection_role = CONNECTIONROLE_NONE;
+ ASSERT_TRUE(
+ jsep_transport_
+ ->SetRemoteJsepTransportDescription(remote_desc, SdpType::kAnswer)
+ .ok());
+
+ absl::optional<rtc::SSLRole> role = jsep_transport_->GetDtlsRole();
+ ASSERT_TRUE(role);
+ // Since legacy answer omitted setup atribute, and we offered actpass, we
+ // should act as passive (server).
+ EXPECT_EQ(rtc::SSL_SERVER, *role);
+}
+
+// Tests that when the RTCP mux is successfully negotiated, the RTCP transport
+// will be destroyed and the SignalRtpMuxActive will be fired.
+TEST_F(JsepTransport2Test, RtcpMuxNegotiation) {
+ jsep_transport_ =
+ CreateJsepTransport2(/*rtcp_mux_enabled=*/false, SrtpMode::kDtlsSrtp);
+ JsepTransportDescription local_desc;
+ local_desc.rtcp_mux_enabled = true;
+ ASSERT_NE(nullptr, jsep_transport_->rtcp_dtls_transport());
+ EXPECT_FALSE(signal_rtcp_mux_active_received_);
+
+ // The remote side supports RTCP-mux.
+ JsepTransportDescription remote_desc;
+ remote_desc.rtcp_mux_enabled = true;
+ ASSERT_TRUE(
+ jsep_transport_
+ ->SetLocalJsepTransportDescription(local_desc, SdpType::kOffer)
+ .ok());
+ ASSERT_TRUE(
+ jsep_transport_
+ ->SetRemoteJsepTransportDescription(remote_desc, SdpType::kAnswer)
+ .ok());
+
+ EXPECT_EQ(nullptr, jsep_transport_->rtcp_dtls_transport());
+ EXPECT_TRUE(signal_rtcp_mux_active_received_);
+
+ // The remote side doesn't support RTCP-mux.
+ jsep_transport_ =
+ CreateJsepTransport2(/*rtcp_mux_enabled=*/false, SrtpMode::kDtlsSrtp);
+ signal_rtcp_mux_active_received_ = false;
+ remote_desc.rtcp_mux_enabled = false;
+ ASSERT_TRUE(
+ jsep_transport_
+ ->SetLocalJsepTransportDescription(local_desc, SdpType::kOffer)
+ .ok());
+ ASSERT_TRUE(
+ jsep_transport_
+ ->SetRemoteJsepTransportDescription(remote_desc, SdpType::kAnswer)
+ .ok());
+
+ EXPECT_NE(nullptr, jsep_transport_->rtcp_dtls_transport());
+ EXPECT_FALSE(signal_rtcp_mux_active_received_);
+}
+
+TEST_F(JsepTransport2Test, SdesNegotiation) {
+ jsep_transport_ =
+ CreateJsepTransport2(/*rtcp_mux_enabled=*/true, SrtpMode::kSdes);
+ ASSERT_TRUE(sdes_transport_);
+ EXPECT_FALSE(sdes_transport_->IsSrtpActive());
+
+ JsepTransportDescription offer_desc;
+ offer_desc.cryptos.push_back(cricket::CryptoParams(
+ 1, rtc::kCsAesCm128HmacSha1_32, "inline:" + rtc::CreateRandomString(40),
+ std::string()));
+ ASSERT_TRUE(
+ jsep_transport_
+ ->SetLocalJsepTransportDescription(offer_desc, SdpType::kOffer)
+ .ok());
+
+ JsepTransportDescription answer_desc;
+ answer_desc.cryptos.push_back(cricket::CryptoParams(
+ 1, rtc::kCsAesCm128HmacSha1_32, "inline:" + rtc::CreateRandomString(40),
+ std::string()));
+ ASSERT_TRUE(
+ jsep_transport_
+ ->SetRemoteJsepTransportDescription(answer_desc, SdpType::kAnswer)
+ .ok());
+ EXPECT_TRUE(sdes_transport_->IsSrtpActive());
+}
+
+TEST_F(JsepTransport2Test, SdesNegotiationWithEmptyCryptosInAnswer) {
+ jsep_transport_ =
+ CreateJsepTransport2(/*rtcp_mux_enabled=*/true, SrtpMode::kSdes);
+ ASSERT_TRUE(sdes_transport_);
+ EXPECT_FALSE(sdes_transport_->IsSrtpActive());
+
+ JsepTransportDescription offer_desc;
+ offer_desc.cryptos.push_back(cricket::CryptoParams(
+ 1, rtc::kCsAesCm128HmacSha1_32, "inline:" + rtc::CreateRandomString(40),
+ std::string()));
+ ASSERT_TRUE(
+ jsep_transport_
+ ->SetLocalJsepTransportDescription(offer_desc, SdpType::kOffer)
+ .ok());
+
+ JsepTransportDescription answer_desc;
+ ASSERT_TRUE(
+ jsep_transport_
+ ->SetRemoteJsepTransportDescription(answer_desc, SdpType::kAnswer)
+ .ok());
+ // SRTP is not active because the crypto parameter is answer is empty.
+ EXPECT_FALSE(sdes_transport_->IsSrtpActive());
+}
+
+TEST_F(JsepTransport2Test, SdesNegotiationWithMismatchedCryptos) {
+ jsep_transport_ =
+ CreateJsepTransport2(/*rtcp_mux_enabled=*/true, SrtpMode::kSdes);
+ ASSERT_TRUE(sdes_transport_);
+ EXPECT_FALSE(sdes_transport_->IsSrtpActive());
+
+ JsepTransportDescription offer_desc;
+ offer_desc.cryptos.push_back(cricket::CryptoParams(
+ 1, rtc::kCsAesCm128HmacSha1_32, "inline:" + rtc::CreateRandomString(40),
+ std::string()));
+ ASSERT_TRUE(
+ jsep_transport_
+ ->SetLocalJsepTransportDescription(offer_desc, SdpType::kOffer)
+ .ok());
+
+ JsepTransportDescription answer_desc;
+ answer_desc.cryptos.push_back(cricket::CryptoParams(
+ 1, rtc::kCsAesCm128HmacSha1_80, "inline:" + rtc::CreateRandomString(40),
+ std::string()));
+ // Expected to fail because the crypto parameters don't match.
+ ASSERT_FALSE(
+ jsep_transport_
+ ->SetRemoteJsepTransportDescription(answer_desc, SdpType::kAnswer)
+ .ok());
+}
+
+// Tests that the remote candidates can be added to the transports after both
+// local and remote descriptions are set.
+TEST_F(JsepTransport2Test, AddRemoteCandidates) {
+ jsep_transport_ =
+ CreateJsepTransport2(/*rtcp_mux_enabled=*/true, SrtpMode::kDtlsSrtp);
+ auto fake_ice_transport = static_cast<FakeIceTransport*>(
+ jsep_transport_->rtp_dtls_transport()->ice_transport());
+
+ Candidates candidates;
+ candidates.push_back(CreateCandidate(/*COMPONENT_RTP*/ 1));
+ candidates.push_back(CreateCandidate(/*COMPONENT_RTP*/ 1));
+
+ JsepTransportDescription desc;
+ ASSERT_TRUE(
+ jsep_transport_->SetLocalJsepTransportDescription(desc, SdpType::kOffer)
+ .ok());
+ // Expected to fail because the remote description is unset.
+ EXPECT_FALSE(jsep_transport_->AddRemoteCandidates(candidates).ok());
+
+ ASSERT_TRUE(
+ jsep_transport_->SetRemoteJsepTransportDescription(desc, SdpType::kAnswer)
+ .ok());
+ EXPECT_EQ(0u, fake_ice_transport->remote_candidates().size());
+ EXPECT_TRUE(jsep_transport_->AddRemoteCandidates(candidates).ok());
+ EXPECT_EQ(candidates.size(), fake_ice_transport->remote_candidates().size());
+}
+
+enum class Scenario {
+ kSdes,
+ kDtlsBeforeCallerSendOffer,
+ kDtlsBeforeCallerSetAnswer,
+ kDtlsAfterCallerSetAnswer,
+};
+
+class JsepTransport2HeaderExtensionTest
+ : public JsepTransport2Test,
+ public ::testing::WithParamInterface<std::tuple<Scenario, bool>> {
+ protected:
+ JsepTransport2HeaderExtensionTest() {}
+
+ void CreateJsepTransportPair(SrtpMode mode) {
+ jsep_transport1_ = CreateJsepTransport2(/*rtcp_mux_enabled=*/true, mode);
+ jsep_transport2_ = CreateJsepTransport2(/*rtcp_mux_enabled=*/true, mode);
+
+ auto fake_dtls1 =
+ static_cast<FakeDtlsTransport*>(jsep_transport1_->rtp_dtls_transport());
+ auto fake_dtls2 =
+ static_cast<FakeDtlsTransport*>(jsep_transport2_->rtp_dtls_transport());
+
+ fake_dtls1->fake_ice_transport()->SignalReadPacket.connect(
+ this, &JsepTransport2HeaderExtensionTest::OnReadPacket1);
+ fake_dtls2->fake_ice_transport()->SignalReadPacket.connect(
+ this, &JsepTransport2HeaderExtensionTest::OnReadPacket2);
+
+ if (mode == SrtpMode::kDtlsSrtp) {
+ auto cert1 = rtc::RTCCertificate::Create(
+ rtc::SSLIdentity::Create("session1", rtc::KT_DEFAULT));
+ jsep_transport1_->rtp_dtls_transport()->SetLocalCertificate(cert1);
+ auto cert2 = rtc::RTCCertificate::Create(
+ rtc::SSLIdentity::Create("session1", rtc::KT_DEFAULT));
+ jsep_transport2_->rtp_dtls_transport()->SetLocalCertificate(cert2);
+ }
+ }
+
+ void OnReadPacket1(rtc::PacketTransportInternal* transport,
+ const char* data,
+ size_t size,
+ const int64_t& /* packet_time_us */,
+ int flags) {
+ RTC_LOG(LS_INFO) << "JsepTransport 1 Received a packet.";
+ CompareHeaderExtensions(
+ reinterpret_cast<const char*>(kPcmuFrameWithExtensions),
+ sizeof(kPcmuFrameWithExtensions), data, size, recv_encrypted_headers1_,
+ false);
+ received_packet_count_++;
+ }
+
+ void OnReadPacket2(rtc::PacketTransportInternal* transport,
+ const char* data,
+ size_t size,
+ const int64_t& /* packet_time_us */,
+ int flags) {
+ RTC_LOG(LS_INFO) << "JsepTransport 2 Received a packet.";
+ CompareHeaderExtensions(
+ reinterpret_cast<const char*>(kPcmuFrameWithExtensions),
+ sizeof(kPcmuFrameWithExtensions), data, size, recv_encrypted_headers2_,
+ false);
+ received_packet_count_++;
+ }
+
+ void ConnectTransport() {
+ auto rtp_dtls_transport1 =
+ static_cast<FakeDtlsTransport*>(jsep_transport1_->rtp_dtls_transport());
+ auto rtp_dtls_transport2 =
+ static_cast<FakeDtlsTransport*>(jsep_transport2_->rtp_dtls_transport());
+ rtp_dtls_transport1->SetDestination(rtp_dtls_transport2);
+ }
+
+ int GetRtpAuthLen() {
+ bool use_gcm = std::get<1>(GetParam());
+ if (use_gcm) {
+ return 16;
+ }
+ return 10;
+ }
+
+ void TestSendRecvPacketWithEncryptedHeaderExtension() {
+ TestOneWaySendRecvPacketWithEncryptedHeaderExtension(
+ jsep_transport1_.get());
+ TestOneWaySendRecvPacketWithEncryptedHeaderExtension(
+ jsep_transport2_.get());
+ }
+
+ void TestOneWaySendRecvPacketWithEncryptedHeaderExtension(
+ JsepTransport* sender_transport) {
+ size_t rtp_len = sizeof(kPcmuFrameWithExtensions);
+ size_t packet_size = rtp_len + GetRtpAuthLen();
+ rtc::Buffer rtp_packet_buffer(packet_size);
+ char* rtp_packet_data = rtp_packet_buffer.data<char>();
+ memcpy(rtp_packet_data, kPcmuFrameWithExtensions, rtp_len);
+ // In order to be able to run this test function multiple times we can not
+ // use the same sequence number twice. Increase the sequence number by one.
+ rtc::SetBE16(reinterpret_cast<uint8_t*>(rtp_packet_data) + 2,
+ ++sequence_number_);
+ rtc::CopyOnWriteBuffer rtp_packet(rtp_packet_data, rtp_len, packet_size);
+
+ int packet_count_before = received_packet_count_;
+ rtc::PacketOptions options;
+ // Send a packet and verify that the packet can be successfully received and
+ // decrypted.
+ ASSERT_TRUE(sender_transport->rtp_transport()->SendRtpPacket(
+ &rtp_packet, options, cricket::PF_SRTP_BYPASS));
+ EXPECT_EQ(packet_count_before + 1, received_packet_count_);
+ }
+
+ int sequence_number_ = 0;
+ int received_packet_count_ = 0;
+ std::unique_ptr<JsepTransport> jsep_transport1_;
+ std::unique_ptr<JsepTransport> jsep_transport2_;
+ std::vector<int> recv_encrypted_headers1_;
+ std::vector<int> recv_encrypted_headers2_;
+};
+
+// Test that the encrypted header extension works and can be changed in
+// different scenarios.
+TEST_P(JsepTransport2HeaderExtensionTest, EncryptedHeaderExtensionNegotiation) {
+ Scenario scenario = std::get<0>(GetParam());
+ bool use_gcm = std::get<1>(GetParam());
+ SrtpMode mode = SrtpMode ::kDtlsSrtp;
+ if (scenario == Scenario::kSdes) {
+ mode = SrtpMode::kSdes;
+ }
+ CreateJsepTransportPair(mode);
+ recv_encrypted_headers1_.push_back(kHeaderExtensionIDs[0]);
+ recv_encrypted_headers2_.push_back(kHeaderExtensionIDs[1]);
+
+ cricket::CryptoParams sdes_param(1, rtc::kCsAesCm128HmacSha1_80,
+ "inline:" + rtc::CreateRandomString(40),
+ std::string());
+ if (use_gcm) {
+ auto fake_dtls1 =
+ static_cast<FakeDtlsTransport*>(jsep_transport1_->rtp_dtls_transport());
+ auto fake_dtls2 =
+ static_cast<FakeDtlsTransport*>(jsep_transport2_->rtp_dtls_transport());
+
+ fake_dtls1->SetSrtpCryptoSuite(rtc::kSrtpAeadAes256Gcm);
+ fake_dtls2->SetSrtpCryptoSuite(rtc::kSrtpAeadAes256Gcm);
+ }
+
+ if (scenario == Scenario::kDtlsBeforeCallerSendOffer) {
+ ConnectTransport();
+ }
+
+ JsepTransportDescription offer_desc;
+ offer_desc.encrypted_header_extension_ids = recv_encrypted_headers1_;
+ if (scenario == Scenario::kSdes) {
+ offer_desc.cryptos.push_back(sdes_param);
+ }
+ ASSERT_TRUE(
+ jsep_transport1_
+ ->SetLocalJsepTransportDescription(offer_desc, SdpType::kOffer)
+ .ok());
+ ASSERT_TRUE(
+ jsep_transport2_
+ ->SetRemoteJsepTransportDescription(offer_desc, SdpType::kOffer)
+ .ok());
+
+ JsepTransportDescription answer_desc;
+ answer_desc.encrypted_header_extension_ids = recv_encrypted_headers2_;
+ if (scenario == Scenario::kSdes) {
+ answer_desc.cryptos.push_back(sdes_param);
+ }
+ ASSERT_TRUE(
+ jsep_transport2_
+ ->SetLocalJsepTransportDescription(answer_desc, SdpType::kAnswer)
+ .ok());
+
+ if (scenario == Scenario::kDtlsBeforeCallerSetAnswer) {
+ ConnectTransport();
+ // Sending packet from transport2 to transport1 should work when they are
+ // partially configured.
+ TestOneWaySendRecvPacketWithEncryptedHeaderExtension(
+ /*sender_transport=*/jsep_transport2_.get());
+ }
+
+ ASSERT_TRUE(
+ jsep_transport1_
+ ->SetRemoteJsepTransportDescription(answer_desc, SdpType::kAnswer)
+ .ok());
+
+ if (scenario == Scenario::kDtlsAfterCallerSetAnswer ||
+ scenario == Scenario::kSdes) {
+ ConnectTransport();
+ }
+ EXPECT_TRUE(jsep_transport1_->rtp_transport()->IsSrtpActive());
+ EXPECT_TRUE(jsep_transport2_->rtp_transport()->IsSrtpActive());
+ TestSendRecvPacketWithEncryptedHeaderExtension();
+
+ // Change the encrypted header extension in a new offer/answer exchange.
+ recv_encrypted_headers1_.clear();
+ recv_encrypted_headers2_.clear();
+ recv_encrypted_headers1_.push_back(kHeaderExtensionIDs[1]);
+ recv_encrypted_headers2_.push_back(kHeaderExtensionIDs[0]);
+ offer_desc.encrypted_header_extension_ids = recv_encrypted_headers1_;
+ answer_desc.encrypted_header_extension_ids = recv_encrypted_headers2_;
+ ASSERT_TRUE(
+ jsep_transport1_
+ ->SetLocalJsepTransportDescription(offer_desc, SdpType::kOffer)
+ .ok());
+ ASSERT_TRUE(
+ jsep_transport2_
+ ->SetRemoteJsepTransportDescription(offer_desc, SdpType::kOffer)
+ .ok());
+ ASSERT_TRUE(
+ jsep_transport2_
+ ->SetLocalJsepTransportDescription(answer_desc, SdpType::kAnswer)
+ .ok());
+ ASSERT_TRUE(
+ jsep_transport1_
+ ->SetRemoteJsepTransportDescription(answer_desc, SdpType::kAnswer)
+ .ok());
+ EXPECT_TRUE(jsep_transport1_->rtp_transport()->IsSrtpActive());
+ EXPECT_TRUE(jsep_transport2_->rtp_transport()->IsSrtpActive());
+ TestSendRecvPacketWithEncryptedHeaderExtension();
+}
+
+INSTANTIATE_TEST_SUITE_P(
+ JsepTransport2Test,
+ JsepTransport2HeaderExtensionTest,
+ ::testing::Values(
+ std::make_tuple(Scenario::kSdes, false),
+ std::make_tuple(Scenario::kDtlsBeforeCallerSendOffer, true),
+ std::make_tuple(Scenario::kDtlsBeforeCallerSetAnswer, true),
+ std::make_tuple(Scenario::kDtlsAfterCallerSetAnswer, true),
+ std::make_tuple(Scenario::kDtlsBeforeCallerSendOffer, false),
+ std::make_tuple(Scenario::kDtlsBeforeCallerSetAnswer, false),
+ std::make_tuple(Scenario::kDtlsAfterCallerSetAnswer, false)));
+
+// This test verifies the ICE parameters are properly applied to the transports.
+TEST_F(JsepTransport2Test, SetIceParametersWithRenomination) {
+ jsep_transport_ =
+ CreateJsepTransport2(/* rtcp_mux_enabled= */ true, SrtpMode::kDtlsSrtp);
+
+ JsepTransportDescription jsep_description;
+ jsep_description.transport_desc = TransportDescription(kIceUfrag1, kIcePwd1);
+ jsep_description.transport_desc.AddOption(ICE_OPTION_RENOMINATION);
+ ASSERT_TRUE(
+ jsep_transport_
+ ->SetLocalJsepTransportDescription(jsep_description, SdpType::kOffer)
+ .ok());
+ auto fake_ice_transport = static_cast<FakeIceTransport*>(
+ jsep_transport_->rtp_dtls_transport()->ice_transport());
+ EXPECT_EQ(ICEMODE_FULL, fake_ice_transport->remote_ice_mode());
+ EXPECT_EQ(kIceUfrag1, fake_ice_transport->ice_ufrag());
+ EXPECT_EQ(kIcePwd1, fake_ice_transport->ice_pwd());
+ EXPECT_TRUE(fake_ice_transport->ice_parameters().renomination);
+
+ jsep_description.transport_desc = TransportDescription(kIceUfrag2, kIcePwd2);
+ jsep_description.transport_desc.AddOption(ICE_OPTION_RENOMINATION);
+ ASSERT_TRUE(jsep_transport_
+ ->SetRemoteJsepTransportDescription(jsep_description,
+ SdpType::kAnswer)
+ .ok());
+ fake_ice_transport = static_cast<FakeIceTransport*>(
+ jsep_transport_->rtp_dtls_transport()->ice_transport());
+ EXPECT_EQ(ICEMODE_FULL, fake_ice_transport->remote_ice_mode());
+ EXPECT_EQ(kIceUfrag2, fake_ice_transport->remote_ice_ufrag());
+ EXPECT_EQ(kIcePwd2, fake_ice_transport->remote_ice_pwd());
+ EXPECT_TRUE(fake_ice_transport->remote_ice_parameters().renomination);
+}
+
+} // namespace
+} // namespace cricket
diff --git a/third_party/libwebrtc/pc/legacy_stats_collector.cc b/third_party/libwebrtc/pc/legacy_stats_collector.cc
new file mode 100644
index 0000000000..3bc65ee3ee
--- /dev/null
+++ b/third_party/libwebrtc/pc/legacy_stats_collector.cc
@@ -0,0 +1,1398 @@
+/*
+ * Copyright 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/legacy_stats_collector.h"
+
+#include <stddef.h>
+#include <stdint.h>
+
+#include <algorithm>
+#include <cmath>
+#include <list>
+#include <set>
+#include <utility>
+#include <vector>
+
+#include "absl/strings/string_view.h"
+#include "absl/types/optional.h"
+#include "api/audio_codecs/audio_encoder.h"
+#include "api/candidate.h"
+#include "api/data_channel_interface.h"
+#include "api/field_trials_view.h"
+#include "api/media_types.h"
+#include "api/rtp_sender_interface.h"
+#include "api/scoped_refptr.h"
+#include "api/sequence_checker.h"
+#include "api/video/video_content_type.h"
+#include "api/video/video_timing.h"
+#include "call/call.h"
+#include "media/base/media_channel.h"
+#include "modules/audio_processing/include/audio_processing_statistics.h"
+#include "p2p/base/ice_transport_internal.h"
+#include "p2p/base/p2p_constants.h"
+#include "pc/channel.h"
+#include "pc/channel_interface.h"
+#include "pc/data_channel_utils.h"
+#include "pc/rtp_receiver.h"
+#include "pc/rtp_receiver_proxy.h"
+#include "pc/rtp_sender_proxy.h"
+#include "pc/rtp_transceiver.h"
+#include "pc/transport_stats.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/ip_address.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/rtc_certificate.h"
+#include "rtc_base/socket_address.h"
+#include "rtc_base/ssl_stream_adapter.h"
+#include "rtc_base/string_encode.h"
+#include "rtc_base/thread.h"
+#include "rtc_base/time_utils.h"
+#include "rtc_base/trace_event.h"
+
+namespace webrtc {
+namespace {
+
+// Field trial which controls whether to report standard-compliant bytes
+// sent/received per stream. If enabled, padding and headers are not included
+// in bytes sent or received.
+constexpr char kUseStandardBytesStats[] = "WebRTC-UseStandardBytesStats";
+
+// The following is the enum RTCStatsIceCandidateType from
+// http://w3c.github.io/webrtc-stats/#rtcstatsicecandidatetype-enum such that
+// our stats report for ice candidate type could conform to that.
+const char STATSREPORT_LOCAL_PORT_TYPE[] = "host";
+const char STATSREPORT_STUN_PORT_TYPE[] = "serverreflexive";
+const char STATSREPORT_PRFLX_PORT_TYPE[] = "peerreflexive";
+const char STATSREPORT_RELAY_PORT_TYPE[] = "relayed";
+
+// Strings used by the stats collector to report adapter types. This fits the
+// general stype of http://w3c.github.io/webrtc-stats than what
+// AdapterTypeToString does.
+const char* STATSREPORT_ADAPTER_TYPE_ETHERNET = "lan";
+const char* STATSREPORT_ADAPTER_TYPE_WIFI = "wlan";
+const char* STATSREPORT_ADAPTER_TYPE_WWAN = "wwan";
+const char* STATSREPORT_ADAPTER_TYPE_VPN = "vpn";
+const char* STATSREPORT_ADAPTER_TYPE_LOOPBACK = "loopback";
+const char* STATSREPORT_ADAPTER_TYPE_WILDCARD = "wildcard";
+
+template <typename ValueType>
+struct TypeForAdd {
+ const StatsReport::StatsValueName name;
+ const ValueType& value;
+};
+
+typedef TypeForAdd<bool> BoolForAdd;
+typedef TypeForAdd<float> FloatForAdd;
+typedef TypeForAdd<int64_t> Int64ForAdd;
+typedef TypeForAdd<int> IntForAdd;
+
+StatsReport* AddTrackReport(StatsCollection* reports,
+ const std::string& track_id) {
+ // Adds an empty track report.
+ StatsReport::Id id(
+ StatsReport::NewTypedId(StatsReport::kStatsReportTypeTrack, track_id));
+ StatsReport* report = reports->ReplaceOrAddNew(id);
+ report->AddString(StatsReport::kStatsValueNameTrackId, track_id);
+ return report;
+}
+
+template <class Track>
+void CreateTrackReport(const Track* track,
+ StatsCollection* reports,
+ TrackIdMap* track_ids) {
+ const std::string& track_id = track->id();
+ StatsReport* report = AddTrackReport(reports, track_id);
+ RTC_DCHECK(report != nullptr);
+ (*track_ids)[track_id] = report;
+}
+
+template <class TrackVector>
+void CreateTrackReports(const TrackVector& tracks,
+ StatsCollection* reports,
+ TrackIdMap* track_ids) {
+ for (const auto& track : tracks) {
+ CreateTrackReport(track.get(), reports, track_ids);
+ }
+}
+
+void ExtractCommonSendProperties(const cricket::MediaSenderInfo& info,
+ StatsReport* report,
+ bool use_standard_bytes_stats) {
+ report->AddString(StatsReport::kStatsValueNameCodecName, info.codec_name);
+ int64_t bytes_sent = info.payload_bytes_sent;
+ if (!use_standard_bytes_stats) {
+ bytes_sent += info.header_and_padding_bytes_sent;
+ }
+ report->AddInt64(StatsReport::kStatsValueNameBytesSent, bytes_sent);
+ if (info.rtt_ms >= 0) {
+ report->AddInt64(StatsReport::kStatsValueNameRtt, info.rtt_ms);
+ }
+}
+
+void ExtractCommonReceiveProperties(const cricket::MediaReceiverInfo& info,
+ StatsReport* report) {
+ report->AddString(StatsReport::kStatsValueNameCodecName, info.codec_name);
+}
+
+void SetAudioProcessingStats(StatsReport* report,
+ const AudioProcessingStats& apm_stats) {
+ if (apm_stats.delay_median_ms) {
+ report->AddInt(StatsReport::kStatsValueNameEchoDelayMedian,
+ *apm_stats.delay_median_ms);
+ }
+ if (apm_stats.delay_standard_deviation_ms) {
+ report->AddInt(StatsReport::kStatsValueNameEchoDelayStdDev,
+ *apm_stats.delay_standard_deviation_ms);
+ }
+ if (apm_stats.echo_return_loss) {
+ report->AddInt(StatsReport::kStatsValueNameEchoReturnLoss,
+ *apm_stats.echo_return_loss);
+ }
+ if (apm_stats.echo_return_loss_enhancement) {
+ report->AddInt(StatsReport::kStatsValueNameEchoReturnLossEnhancement,
+ *apm_stats.echo_return_loss_enhancement);
+ }
+ if (apm_stats.residual_echo_likelihood) {
+ report->AddFloat(StatsReport::kStatsValueNameResidualEchoLikelihood,
+ static_cast<float>(*apm_stats.residual_echo_likelihood));
+ }
+ if (apm_stats.residual_echo_likelihood_recent_max) {
+ report->AddFloat(
+ StatsReport::kStatsValueNameResidualEchoLikelihoodRecentMax,
+ static_cast<float>(*apm_stats.residual_echo_likelihood_recent_max));
+ }
+ if (apm_stats.divergent_filter_fraction) {
+ report->AddFloat(StatsReport::kStatsValueNameAecDivergentFilterFraction,
+ static_cast<float>(*apm_stats.divergent_filter_fraction));
+ }
+}
+
+void ExtractStats(const cricket::VoiceReceiverInfo& info,
+ StatsReport* report,
+ bool use_standard_bytes_stats) {
+ ExtractCommonReceiveProperties(info, report);
+ const FloatForAdd floats[] = {
+ {StatsReport::kStatsValueNameExpandRate, info.expand_rate},
+ {StatsReport::kStatsValueNameSecondaryDecodedRate,
+ info.secondary_decoded_rate},
+ {StatsReport::kStatsValueNameSecondaryDiscardedRate,
+ info.secondary_discarded_rate},
+ {StatsReport::kStatsValueNameSpeechExpandRate, info.speech_expand_rate},
+ {StatsReport::kStatsValueNameAccelerateRate, info.accelerate_rate},
+ {StatsReport::kStatsValueNamePreemptiveExpandRate,
+ info.preemptive_expand_rate},
+ {StatsReport::kStatsValueNameTotalAudioEnergy, info.total_output_energy},
+ {StatsReport::kStatsValueNameTotalSamplesDuration,
+ info.total_output_duration}};
+
+ const IntForAdd ints[] = {
+ {StatsReport::kStatsValueNameCurrentDelayMs, info.delay_estimate_ms},
+ {StatsReport::kStatsValueNameDecodingCNG, info.decoding_cng},
+ {StatsReport::kStatsValueNameDecodingCTN, info.decoding_calls_to_neteq},
+ {StatsReport::kStatsValueNameDecodingCTSG,
+ info.decoding_calls_to_silence_generator},
+ {StatsReport::kStatsValueNameDecodingMutedOutput,
+ info.decoding_muted_output},
+ {StatsReport::kStatsValueNameDecodingNormal, info.decoding_normal},
+ {StatsReport::kStatsValueNameDecodingPLC, info.decoding_plc},
+ {StatsReport::kStatsValueNameDecodingPLCCNG, info.decoding_plc_cng},
+ {StatsReport::kStatsValueNameJitterBufferMs, info.jitter_buffer_ms},
+ {StatsReport::kStatsValueNameJitterReceived, info.jitter_ms},
+ {StatsReport::kStatsValueNamePacketsLost, info.packets_lost},
+ {StatsReport::kStatsValueNamePacketsReceived, info.packets_received},
+ {StatsReport::kStatsValueNamePreferredJitterBufferMs,
+ info.jitter_buffer_preferred_ms},
+ };
+
+ for (const auto& f : floats)
+ report->AddFloat(f.name, f.value);
+
+ for (const auto& i : ints)
+ report->AddInt(i.name, i.value);
+ if (info.audio_level >= 0) {
+ report->AddInt(StatsReport::kStatsValueNameAudioOutputLevel,
+ info.audio_level);
+ }
+ if (info.decoding_codec_plc)
+ report->AddInt(StatsReport::kStatsValueNameDecodingCodecPLC,
+ info.decoding_codec_plc);
+
+ int64_t bytes_received = info.payload_bytes_received;
+ if (!use_standard_bytes_stats) {
+ bytes_received += info.header_and_padding_bytes_received;
+ }
+ report->AddInt64(StatsReport::kStatsValueNameBytesReceived, bytes_received);
+ if (info.capture_start_ntp_time_ms >= 0) {
+ report->AddInt64(StatsReport::kStatsValueNameCaptureStartNtpTimeMs,
+ info.capture_start_ntp_time_ms);
+ }
+ report->AddString(StatsReport::kStatsValueNameMediaType, "audio");
+}
+
+void ExtractStats(const cricket::VoiceSenderInfo& info,
+ StatsReport* report,
+ bool use_standard_bytes_stats) {
+ ExtractCommonSendProperties(info, report, use_standard_bytes_stats);
+
+ SetAudioProcessingStats(report, info.apm_statistics);
+
+ const FloatForAdd floats[] = {
+ {StatsReport::kStatsValueNameTotalAudioEnergy, info.total_input_energy},
+ {StatsReport::kStatsValueNameTotalSamplesDuration,
+ info.total_input_duration}};
+
+ RTC_DCHECK_GE(info.audio_level, 0);
+ const IntForAdd ints[] = {
+ {StatsReport::kStatsValueNameAudioInputLevel, info.audio_level},
+ {StatsReport::kStatsValueNameJitterReceived, info.jitter_ms},
+ {StatsReport::kStatsValueNamePacketsLost, info.packets_lost},
+ {StatsReport::kStatsValueNamePacketsSent, info.packets_sent},
+ };
+
+ for (const auto& f : floats) {
+ report->AddFloat(f.name, f.value);
+ }
+
+ for (const auto& i : ints) {
+ if (i.value >= 0) {
+ report->AddInt(i.name, i.value);
+ }
+ }
+ report->AddString(StatsReport::kStatsValueNameMediaType, "audio");
+ if (info.ana_statistics.bitrate_action_counter) {
+ report->AddInt(StatsReport::kStatsValueNameAnaBitrateActionCounter,
+ *info.ana_statistics.bitrate_action_counter);
+ }
+ if (info.ana_statistics.channel_action_counter) {
+ report->AddInt(StatsReport::kStatsValueNameAnaChannelActionCounter,
+ *info.ana_statistics.channel_action_counter);
+ }
+ if (info.ana_statistics.dtx_action_counter) {
+ report->AddInt(StatsReport::kStatsValueNameAnaDtxActionCounter,
+ *info.ana_statistics.dtx_action_counter);
+ }
+ if (info.ana_statistics.fec_action_counter) {
+ report->AddInt(StatsReport::kStatsValueNameAnaFecActionCounter,
+ *info.ana_statistics.fec_action_counter);
+ }
+ if (info.ana_statistics.frame_length_increase_counter) {
+ report->AddInt(StatsReport::kStatsValueNameAnaFrameLengthIncreaseCounter,
+ *info.ana_statistics.frame_length_increase_counter);
+ }
+ if (info.ana_statistics.frame_length_decrease_counter) {
+ report->AddInt(StatsReport::kStatsValueNameAnaFrameLengthDecreaseCounter,
+ *info.ana_statistics.frame_length_decrease_counter);
+ }
+ if (info.ana_statistics.uplink_packet_loss_fraction) {
+ report->AddFloat(StatsReport::kStatsValueNameAnaUplinkPacketLossFraction,
+ *info.ana_statistics.uplink_packet_loss_fraction);
+ }
+}
+
+void ExtractStats(const cricket::VideoReceiverInfo& info,
+ StatsReport* report,
+ bool use_standard_bytes_stats) {
+ ExtractCommonReceiveProperties(info, report);
+ report->AddString(StatsReport::kStatsValueNameCodecImplementationName,
+ info.decoder_implementation_name.value_or("unknown"));
+ int64_t bytes_received = info.payload_bytes_received;
+ if (!use_standard_bytes_stats) {
+ bytes_received += info.header_and_padding_bytes_received;
+ }
+ report->AddInt64(StatsReport::kStatsValueNameBytesReceived, bytes_received);
+ if (info.capture_start_ntp_time_ms >= 0) {
+ report->AddInt64(StatsReport::kStatsValueNameCaptureStartNtpTimeMs,
+ info.capture_start_ntp_time_ms);
+ }
+ if (info.first_frame_received_to_decoded_ms >= 0) {
+ report->AddInt64(StatsReport::kStatsValueNameFirstFrameReceivedToDecodedMs,
+ info.first_frame_received_to_decoded_ms);
+ }
+ if (info.qp_sum)
+ report->AddInt64(StatsReport::kStatsValueNameQpSum, *info.qp_sum);
+
+ if (info.nacks_sent) {
+ report->AddInt(StatsReport::kStatsValueNameNacksSent, *info.nacks_sent);
+ }
+
+ const IntForAdd ints[] = {
+ {StatsReport::kStatsValueNameCurrentDelayMs, info.current_delay_ms},
+ {StatsReport::kStatsValueNameDecodeMs, info.decode_ms},
+ {StatsReport::kStatsValueNameFirsSent, info.firs_sent},
+ {StatsReport::kStatsValueNameFrameHeightReceived, info.frame_height},
+ {StatsReport::kStatsValueNameFrameRateDecoded, info.framerate_decoded},
+ {StatsReport::kStatsValueNameFrameRateOutput, info.framerate_output},
+ {StatsReport::kStatsValueNameFrameRateReceived, info.framerate_received},
+ {StatsReport::kStatsValueNameFrameWidthReceived, info.frame_width},
+ {StatsReport::kStatsValueNameJitterBufferMs, info.jitter_buffer_ms},
+ {StatsReport::kStatsValueNameMaxDecodeMs, info.max_decode_ms},
+ {StatsReport::kStatsValueNameMinPlayoutDelayMs,
+ info.min_playout_delay_ms},
+ {StatsReport::kStatsValueNamePacketsLost, info.packets_lost},
+ {StatsReport::kStatsValueNamePacketsReceived, info.packets_received},
+ {StatsReport::kStatsValueNamePlisSent, info.plis_sent},
+ {StatsReport::kStatsValueNameRenderDelayMs, info.render_delay_ms},
+ {StatsReport::kStatsValueNameTargetDelayMs, info.target_delay_ms},
+ {StatsReport::kStatsValueNameFramesDecoded, info.frames_decoded},
+ };
+
+ for (const auto& i : ints)
+ report->AddInt(i.name, i.value);
+ report->AddString(StatsReport::kStatsValueNameMediaType, "video");
+
+ if (info.timing_frame_info) {
+ report->AddString(StatsReport::kStatsValueNameTimingFrameInfo,
+ info.timing_frame_info->ToString());
+ }
+
+ report->AddInt64(StatsReport::kStatsValueNameInterframeDelayMaxMs,
+ info.interframe_delay_max_ms);
+
+ report->AddString(
+ StatsReport::kStatsValueNameContentType,
+ webrtc::videocontenttypehelpers::ToString(info.content_type));
+}
+
+void ExtractStats(const cricket::VideoSenderInfo& info,
+ StatsReport* report,
+ bool use_standard_bytes_stats) {
+ ExtractCommonSendProperties(info, report, use_standard_bytes_stats);
+
+ report->AddString(StatsReport::kStatsValueNameCodecImplementationName,
+ info.encoder_implementation_name.value_or("unknown"));
+ report->AddBoolean(StatsReport::kStatsValueNameBandwidthLimitedResolution,
+ (info.adapt_reason & 0x2) > 0);
+ report->AddBoolean(StatsReport::kStatsValueNameCpuLimitedResolution,
+ (info.adapt_reason & 0x1) > 0);
+ report->AddBoolean(StatsReport::kStatsValueNameHasEnteredLowResolution,
+ info.has_entered_low_resolution);
+
+ if (info.qp_sum)
+ report->AddInt(StatsReport::kStatsValueNameQpSum, *info.qp_sum);
+
+ const IntForAdd ints[] = {
+ {StatsReport::kStatsValueNameAdaptationChanges, info.adapt_changes},
+ {StatsReport::kStatsValueNameAvgEncodeMs, info.avg_encode_ms},
+ {StatsReport::kStatsValueNameEncodeUsagePercent,
+ info.encode_usage_percent},
+ {StatsReport::kStatsValueNameFirsReceived, info.firs_received},
+ {StatsReport::kStatsValueNameFrameHeightSent, info.send_frame_height},
+ {StatsReport::kStatsValueNameFrameRateInput, round(info.framerate_input)},
+ {StatsReport::kStatsValueNameFrameRateSent, info.framerate_sent},
+ {StatsReport::kStatsValueNameFrameWidthSent, info.send_frame_width},
+ {StatsReport::kStatsValueNameNacksReceived, info.nacks_received},
+ {StatsReport::kStatsValueNamePacketsLost, info.packets_lost},
+ {StatsReport::kStatsValueNamePacketsSent, info.packets_sent},
+ {StatsReport::kStatsValueNamePlisReceived, info.plis_received},
+ {StatsReport::kStatsValueNameFramesEncoded, info.frames_encoded},
+ {StatsReport::kStatsValueNameHugeFramesSent, info.huge_frames_sent},
+ };
+
+ for (const auto& i : ints)
+ report->AddInt(i.name, i.value);
+ report->AddString(StatsReport::kStatsValueNameMediaType, "video");
+ report->AddString(
+ StatsReport::kStatsValueNameContentType,
+ webrtc::videocontenttypehelpers::ToString(info.content_type));
+}
+
+void ExtractStats(const cricket::BandwidthEstimationInfo& info,
+ double stats_gathering_started,
+ StatsReport* report) {
+ RTC_DCHECK(report->type() == StatsReport::kStatsReportTypeBwe);
+
+ report->set_timestamp(stats_gathering_started);
+ const IntForAdd ints[] = {
+ {StatsReport::kStatsValueNameAvailableSendBandwidth,
+ info.available_send_bandwidth},
+ {StatsReport::kStatsValueNameAvailableReceiveBandwidth,
+ info.available_recv_bandwidth},
+ {StatsReport::kStatsValueNameTargetEncBitrate, info.target_enc_bitrate},
+ {StatsReport::kStatsValueNameActualEncBitrate, info.actual_enc_bitrate},
+ {StatsReport::kStatsValueNameRetransmitBitrate, info.retransmit_bitrate},
+ {StatsReport::kStatsValueNameTransmitBitrate, info.transmit_bitrate},
+ };
+ for (const auto& i : ints)
+ report->AddInt(i.name, i.value);
+ report->AddInt64(StatsReport::kStatsValueNameBucketDelay, info.bucket_delay);
+}
+
+void ExtractRemoteStats(const cricket::MediaSenderInfo& info,
+ StatsReport* report) {
+ report->set_timestamp(info.remote_stats[0].timestamp);
+ // TODO(hta): Extract some stats here.
+}
+
+void ExtractRemoteStats(const cricket::MediaReceiverInfo& info,
+ StatsReport* report) {
+ report->set_timestamp(info.remote_stats[0].timestamp);
+ // TODO(hta): Extract some stats here.
+}
+
+std::string GetTrackIdBySsrc(
+ uint32_t ssrc,
+ StatsReport::Direction direction,
+ const std::map<uint32_t, std::string>& track_id_by_ssrc) {
+ auto it = track_id_by_ssrc.find(ssrc);
+ if (it != track_id_by_ssrc.end()) {
+ return it->second;
+ }
+ if (direction == StatsReport::kReceive) {
+ // If the track ID was not found, this might be an unsignaled receive
+ // SSRC, so try looking up by the special SSRC 0.
+ it = track_id_by_ssrc.find(0);
+ if (it != track_id_by_ssrc.end()) {
+ RTC_LOG(LS_INFO) << "Assuming SSRC=" << ssrc
+ << " is an unsignalled receive stream corresponding "
+ "to the RtpReceiver with track ID \""
+ << it->second << "\".";
+ return it->second;
+ }
+ }
+ return "";
+}
+
+// Template to extract stats from a data vector.
+// In order to use the template, the functions that are called from it,
+// ExtractStats and ExtractRemoteStats, must be defined and overloaded
+// for each type.
+template <typename T>
+void ExtractStatsFromList(
+ const std::vector<T>& data,
+ const StatsReport::Id& transport_id,
+ LegacyStatsCollector* collector,
+ StatsReport::Direction direction,
+ const std::map<uint32_t, std::string>& track_id_by_ssrc) {
+ for (const auto& d : data) {
+ uint32_t ssrc = d.ssrc();
+ std::string track_id = GetTrackIdBySsrc(ssrc, direction, track_id_by_ssrc);
+ // Each track can have stats for both local and remote objects.
+ // TODO(hta): Handle the case of multiple SSRCs per object.
+ StatsReport* report =
+ collector->PrepareReport(true, ssrc, track_id, transport_id, direction);
+ if (report)
+ ExtractStats(d, report, collector->UseStandardBytesStats());
+
+ if (!d.remote_stats.empty()) {
+ report = collector->PrepareReport(false, ssrc, track_id, transport_id,
+ direction);
+ if (report)
+ ExtractRemoteStats(d, report);
+ }
+ }
+}
+
+} // namespace
+
+const char* IceCandidateTypeToStatsType(const std::string& candidate_type) {
+ if (candidate_type == cricket::LOCAL_PORT_TYPE) {
+ return STATSREPORT_LOCAL_PORT_TYPE;
+ }
+ if (candidate_type == cricket::STUN_PORT_TYPE) {
+ return STATSREPORT_STUN_PORT_TYPE;
+ }
+ if (candidate_type == cricket::PRFLX_PORT_TYPE) {
+ return STATSREPORT_PRFLX_PORT_TYPE;
+ }
+ if (candidate_type == cricket::RELAY_PORT_TYPE) {
+ return STATSREPORT_RELAY_PORT_TYPE;
+ }
+ RTC_DCHECK_NOTREACHED();
+ return "unknown";
+}
+
+const char* AdapterTypeToStatsType(rtc::AdapterType type) {
+ switch (type) {
+ case rtc::ADAPTER_TYPE_UNKNOWN:
+ return "unknown";
+ case rtc::ADAPTER_TYPE_ETHERNET:
+ return STATSREPORT_ADAPTER_TYPE_ETHERNET;
+ case rtc::ADAPTER_TYPE_WIFI:
+ return STATSREPORT_ADAPTER_TYPE_WIFI;
+ case rtc::ADAPTER_TYPE_CELLULAR:
+ case rtc::ADAPTER_TYPE_CELLULAR_2G:
+ case rtc::ADAPTER_TYPE_CELLULAR_3G:
+ case rtc::ADAPTER_TYPE_CELLULAR_4G:
+ case rtc::ADAPTER_TYPE_CELLULAR_5G:
+ return STATSREPORT_ADAPTER_TYPE_WWAN;
+ case rtc::ADAPTER_TYPE_VPN:
+ return STATSREPORT_ADAPTER_TYPE_VPN;
+ case rtc::ADAPTER_TYPE_LOOPBACK:
+ return STATSREPORT_ADAPTER_TYPE_LOOPBACK;
+ case rtc::ADAPTER_TYPE_ANY:
+ return STATSREPORT_ADAPTER_TYPE_WILDCARD;
+ default:
+ RTC_DCHECK_NOTREACHED();
+ return "";
+ }
+}
+
+LegacyStatsCollector::LegacyStatsCollector(PeerConnectionInternal* pc)
+ : pc_(pc),
+ stats_gathering_started_(0),
+ use_standard_bytes_stats_(
+ pc->trials().IsEnabled(kUseStandardBytesStats)) {
+ RTC_DCHECK(pc_);
+}
+
+LegacyStatsCollector::~LegacyStatsCollector() {
+ RTC_DCHECK_RUN_ON(pc_->signaling_thread());
+}
+
+// Wallclock time in ms.
+double LegacyStatsCollector::GetTimeNow() {
+ return static_cast<double>(rtc::TimeUTCMillis());
+}
+
+// Adds a MediaStream with tracks that can be used as a `selector` in a call
+// to GetStats.
+void LegacyStatsCollector::AddStream(MediaStreamInterface* stream) {
+ RTC_DCHECK_RUN_ON(pc_->signaling_thread());
+ RTC_DCHECK(stream != NULL);
+
+ CreateTrackReports<AudioTrackVector>(stream->GetAudioTracks(), &reports_,
+ &track_ids_);
+ CreateTrackReports<VideoTrackVector>(stream->GetVideoTracks(), &reports_,
+ &track_ids_);
+}
+
+void LegacyStatsCollector::AddTrack(MediaStreamTrackInterface* track) {
+ if (track->kind() == MediaStreamTrackInterface::kAudioKind) {
+ CreateTrackReport(static_cast<AudioTrackInterface*>(track), &reports_,
+ &track_ids_);
+ } else if (track->kind() == MediaStreamTrackInterface::kVideoKind) {
+ CreateTrackReport(static_cast<VideoTrackInterface*>(track), &reports_,
+ &track_ids_);
+ } else {
+ RTC_DCHECK_NOTREACHED() << "Illegal track kind";
+ }
+}
+
+void LegacyStatsCollector::AddLocalAudioTrack(AudioTrackInterface* audio_track,
+ uint32_t ssrc) {
+ RTC_DCHECK_RUN_ON(pc_->signaling_thread());
+ RTC_DCHECK(audio_track != NULL);
+#if RTC_DCHECK_IS_ON
+ for (const auto& track : local_audio_tracks_)
+ RTC_DCHECK(track.first != audio_track || track.second != ssrc);
+#endif
+
+ local_audio_tracks_.push_back(std::make_pair(audio_track, ssrc));
+
+ // Create the kStatsReportTypeTrack report for the new track if there is no
+ // report yet.
+ StatsReport::Id id(StatsReport::NewTypedId(StatsReport::kStatsReportTypeTrack,
+ audio_track->id()));
+ StatsReport* report = reports_.Find(id);
+ if (!report) {
+ report = reports_.InsertNew(id);
+ report->AddString(StatsReport::kStatsValueNameTrackId, audio_track->id());
+ }
+}
+
+void LegacyStatsCollector::RemoveLocalAudioTrack(
+ AudioTrackInterface* audio_track,
+ uint32_t ssrc) {
+ RTC_DCHECK(audio_track != NULL);
+ local_audio_tracks_.erase(
+ std::remove_if(
+ local_audio_tracks_.begin(), local_audio_tracks_.end(),
+ [audio_track, ssrc](const LocalAudioTrackVector::value_type& track) {
+ return track.first == audio_track && track.second == ssrc;
+ }),
+ local_audio_tracks_.end());
+}
+
+void LegacyStatsCollector::GetStats(MediaStreamTrackInterface* track,
+ StatsReports* reports) {
+ RTC_DCHECK_RUN_ON(pc_->signaling_thread());
+ RTC_DCHECK(reports != NULL);
+ RTC_DCHECK(reports->empty());
+
+ rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
+
+ if (!track) {
+ reports->reserve(reports_.size());
+ for (auto* r : reports_)
+ reports->push_back(r);
+ return;
+ }
+
+ StatsReport* report = reports_.Find(StatsReport::NewTypedId(
+ StatsReport::kStatsReportTypeSession, pc_->session_id()));
+ if (report)
+ reports->push_back(report);
+
+ report = reports_.Find(
+ StatsReport::NewTypedId(StatsReport::kStatsReportTypeTrack, track->id()));
+
+ if (!report)
+ return;
+
+ reports->push_back(report);
+
+ std::string track_id;
+ for (const auto* r : reports_) {
+ if (r->type() != StatsReport::kStatsReportTypeSsrc)
+ continue;
+
+ const StatsReport::Value* v =
+ r->FindValue(StatsReport::kStatsValueNameTrackId);
+ if (v && v->string_val() == track->id())
+ reports->push_back(r);
+ }
+}
+
+void LegacyStatsCollector::UpdateStats(
+ PeerConnectionInterface::StatsOutputLevel level) {
+ RTC_DCHECK_RUN_ON(pc_->signaling_thread());
+ // Calls to UpdateStats() that occur less than kMinGatherStatsPeriodMs apart
+ // will be ignored. Using a monotonic clock specifically for this, while using
+ // a UTC clock for the reports themselves.
+ const int64_t kMinGatherStatsPeriodMs = 50;
+ int64_t cache_now_ms = rtc::TimeMillis();
+ if (cache_timestamp_ms_ != 0 &&
+ cache_timestamp_ms_ + kMinGatherStatsPeriodMs > cache_now_ms) {
+ return;
+ }
+ cache_timestamp_ms_ = cache_now_ms;
+ stats_gathering_started_ = GetTimeNow();
+
+ // TODO(tommi): ExtractSessionInfo now has a single hop to the network thread
+ // to fetch stats, then applies them on the signaling thread. See if we need
+ // to do this synchronously or if updating the stats without blocking is safe.
+ std::map<std::string, std::string> transport_names_by_mid =
+ ExtractSessionAndDataInfo();
+
+ // TODO(tommi): All of these hop over to the worker thread to fetch
+ // information. We could post a task to run all of these and post
+ // the information back to the signaling thread where we can create and
+ // update stats reports. That would also clean up the threading story a bit
+ // since we'd be creating/updating the stats report objects consistently on
+ // the same thread (this class has no locks right now).
+ ExtractBweInfo();
+ ExtractMediaInfo(transport_names_by_mid);
+ ExtractSenderInfo();
+ UpdateTrackReports();
+}
+
+StatsReport* LegacyStatsCollector::PrepareReport(
+ bool local,
+ uint32_t ssrc,
+ const std::string& track_id,
+ const StatsReport::Id& transport_id,
+ StatsReport::Direction direction) {
+ RTC_DCHECK_RUN_ON(pc_->signaling_thread());
+ StatsReport::Id id(StatsReport::NewIdWithDirection(
+ local ? StatsReport::kStatsReportTypeSsrc
+ : StatsReport::kStatsReportTypeRemoteSsrc,
+ rtc::ToString(ssrc), direction));
+ StatsReport* report = reports_.Find(id);
+ if (!report) {
+ report = reports_.InsertNew(id);
+ }
+
+ // FYI - for remote reports, the timestamp will be overwritten later.
+ report->set_timestamp(stats_gathering_started_);
+
+ report->AddInt64(StatsReport::kStatsValueNameSsrc, ssrc);
+ if (!track_id.empty()) {
+ report->AddString(StatsReport::kStatsValueNameTrackId, track_id);
+ }
+ // Add the mapping of SSRC to transport.
+ report->AddId(StatsReport::kStatsValueNameTransportId, transport_id);
+ return report;
+}
+
+StatsReport* LegacyStatsCollector::PrepareADMReport() {
+ RTC_DCHECK_RUN_ON(pc_->signaling_thread());
+ StatsReport::Id id(StatsReport::NewTypedId(
+ StatsReport::kStatsReportTypeSession, pc_->session_id()));
+ StatsReport* report = reports_.FindOrAddNew(id);
+ return report;
+}
+
+bool LegacyStatsCollector::IsValidTrack(const std::string& track_id) {
+ return reports_.Find(StatsReport::NewTypedId(
+ StatsReport::kStatsReportTypeTrack, track_id)) != nullptr;
+}
+
+StatsReport* LegacyStatsCollector::AddCertificateReports(
+ std::unique_ptr<rtc::SSLCertificateStats> cert_stats) {
+ RTC_DCHECK_RUN_ON(pc_->signaling_thread());
+
+ StatsReport* first_report = nullptr;
+ StatsReport* prev_report = nullptr;
+ for (rtc::SSLCertificateStats* stats = cert_stats.get(); stats;
+ stats = stats->issuer.get()) {
+ StatsReport::Id id(StatsReport::NewTypedId(
+ StatsReport::kStatsReportTypeCertificate, stats->fingerprint));
+
+ StatsReport* report = reports_.ReplaceOrAddNew(id);
+ report->set_timestamp(stats_gathering_started_);
+ report->AddString(StatsReport::kStatsValueNameFingerprint,
+ stats->fingerprint);
+ report->AddString(StatsReport::kStatsValueNameFingerprintAlgorithm,
+ stats->fingerprint_algorithm);
+ report->AddString(StatsReport::kStatsValueNameDer,
+ stats->base64_certificate);
+ if (!first_report)
+ first_report = report;
+ else
+ prev_report->AddId(StatsReport::kStatsValueNameIssuerId, id);
+ prev_report = report;
+ }
+ return first_report;
+}
+
+StatsReport* LegacyStatsCollector::AddConnectionInfoReport(
+ const std::string& content_name,
+ int component,
+ int connection_id,
+ const StatsReport::Id& channel_report_id,
+ const cricket::ConnectionInfo& info) {
+ StatsReport::Id id(
+ StatsReport::NewCandidatePairId(content_name, component, connection_id));
+ StatsReport* report = reports_.ReplaceOrAddNew(id);
+ report->set_timestamp(stats_gathering_started_);
+
+ const BoolForAdd bools[] = {
+ {StatsReport::kStatsValueNameActiveConnection, info.best_connection},
+ {StatsReport::kStatsValueNameReceiving, info.receiving},
+ {StatsReport::kStatsValueNameWritable, info.writable},
+ };
+ for (const auto& b : bools)
+ report->AddBoolean(b.name, b.value);
+
+ report->AddId(StatsReport::kStatsValueNameChannelId, channel_report_id);
+ cricket::CandidateStats local_candidate_stats(info.local_candidate);
+ cricket::CandidateStats remote_candidate_stats(info.remote_candidate);
+ report->AddId(StatsReport::kStatsValueNameLocalCandidateId,
+ AddCandidateReport(local_candidate_stats, true)->id());
+ report->AddId(StatsReport::kStatsValueNameRemoteCandidateId,
+ AddCandidateReport(remote_candidate_stats, false)->id());
+
+ const Int64ForAdd int64s[] = {
+ {StatsReport::kStatsValueNameBytesReceived, info.recv_total_bytes},
+ {StatsReport::kStatsValueNameBytesSent, info.sent_total_bytes},
+ {StatsReport::kStatsValueNamePacketsSent, info.sent_total_packets},
+ {StatsReport::kStatsValueNameRtt, info.rtt},
+ {StatsReport::kStatsValueNameSendPacketsDiscarded,
+ info.sent_discarded_packets},
+ {StatsReport::kStatsValueNameSentPingRequestsTotal,
+ info.sent_ping_requests_total},
+ {StatsReport::kStatsValueNameSentPingRequestsBeforeFirstResponse,
+ info.sent_ping_requests_before_first_response},
+ {StatsReport::kStatsValueNameSentPingResponses, info.sent_ping_responses},
+ {StatsReport::kStatsValueNameRecvPingRequests, info.recv_ping_requests},
+ {StatsReport::kStatsValueNameRecvPingResponses, info.recv_ping_responses},
+ };
+ for (const auto& i : int64s)
+ report->AddInt64(i.name, i.value);
+
+ report->AddString(StatsReport::kStatsValueNameLocalAddress,
+ info.local_candidate.address().ToString());
+ report->AddString(StatsReport::kStatsValueNameLocalCandidateType,
+ info.local_candidate.type());
+ report->AddString(StatsReport::kStatsValueNameRemoteAddress,
+ info.remote_candidate.address().ToString());
+ report->AddString(StatsReport::kStatsValueNameRemoteCandidateType,
+ info.remote_candidate.type());
+ report->AddString(StatsReport::kStatsValueNameTransportType,
+ info.local_candidate.protocol());
+ report->AddString(StatsReport::kStatsValueNameLocalCandidateRelayProtocol,
+ info.local_candidate.relay_protocol());
+
+ return report;
+}
+
+StatsReport* LegacyStatsCollector::AddCandidateReport(
+ const cricket::CandidateStats& candidate_stats,
+ bool local) {
+ const auto& candidate = candidate_stats.candidate();
+ StatsReport::Id id(StatsReport::NewCandidateId(local, candidate.id()));
+ StatsReport* report = reports_.Find(id);
+ if (!report) {
+ report = reports_.InsertNew(id);
+ report->set_timestamp(stats_gathering_started_);
+ if (local) {
+ report->AddString(StatsReport::kStatsValueNameCandidateNetworkType,
+ AdapterTypeToStatsType(candidate.network_type()));
+ }
+ report->AddString(StatsReport::kStatsValueNameCandidateIPAddress,
+ candidate.address().ipaddr().ToString());
+ report->AddString(StatsReport::kStatsValueNameCandidatePortNumber,
+ candidate.address().PortAsString());
+ report->AddInt(StatsReport::kStatsValueNameCandidatePriority,
+ candidate.priority());
+ report->AddString(StatsReport::kStatsValueNameCandidateType,
+ IceCandidateTypeToStatsType(candidate.type()));
+ report->AddString(StatsReport::kStatsValueNameCandidateTransportType,
+ candidate.protocol());
+ }
+ report->set_timestamp(stats_gathering_started_);
+
+ if (local && candidate_stats.stun_stats().has_value()) {
+ const auto& stun_stats = candidate_stats.stun_stats().value();
+ report->AddInt64(StatsReport::kStatsValueNameSentStunKeepaliveRequests,
+ stun_stats.stun_binding_requests_sent);
+ report->AddInt64(StatsReport::kStatsValueNameRecvStunKeepaliveResponses,
+ stun_stats.stun_binding_responses_received);
+ report->AddFloat(StatsReport::kStatsValueNameStunKeepaliveRttTotal,
+ stun_stats.stun_binding_rtt_ms_total);
+ report->AddFloat(StatsReport::kStatsValueNameStunKeepaliveRttSquaredTotal,
+ stun_stats.stun_binding_rtt_ms_squared_total);
+ }
+
+ return report;
+}
+
+std::map<std::string, std::string>
+LegacyStatsCollector::ExtractSessionAndDataInfo() {
+ TRACE_EVENT0("webrtc", "LegacyStatsCollector::ExtractSessionAndDataInfo");
+ RTC_DCHECK_RUN_ON(pc_->signaling_thread());
+
+ SessionStats stats;
+ StatsCollection::Container data_report_collection;
+ auto transceivers = pc_->GetTransceiversInternal();
+ pc_->network_thread()->BlockingCall(
+ [&, sctp_transport_name = pc_->sctp_transport_name(),
+ sctp_mid = pc_->sctp_mid()]() mutable {
+ stats = ExtractSessionInfo_n(
+ transceivers, std::move(sctp_transport_name), std::move(sctp_mid));
+ StatsCollection data_reports;
+ ExtractDataInfo_n(&data_reports);
+ data_report_collection = data_reports.DetachCollection();
+ });
+
+ reports_.MergeCollection(std::move(data_report_collection));
+
+ ExtractSessionInfo_s(stats);
+
+ return std::move(stats.transport_names_by_mid);
+}
+
+LegacyStatsCollector::SessionStats LegacyStatsCollector::ExtractSessionInfo_n(
+ const std::vector<rtc::scoped_refptr<
+ RtpTransceiverProxyWithInternal<RtpTransceiver>>>& transceivers,
+ absl::optional<std::string> sctp_transport_name,
+ absl::optional<std::string> sctp_mid) {
+ TRACE_EVENT0("webrtc", "LegacyStatsCollector::ExtractSessionInfo_n");
+ RTC_DCHECK_RUN_ON(pc_->network_thread());
+ rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
+ SessionStats stats;
+ stats.candidate_stats = pc_->GetPooledCandidateStats();
+ for (auto& transceiver : transceivers) {
+ cricket::ChannelInterface* channel = transceiver->internal()->channel();
+ if (channel) {
+ stats.transport_names_by_mid[channel->mid()] =
+ std::string(channel->transport_name());
+ }
+ }
+
+ if (sctp_transport_name) {
+ RTC_DCHECK(sctp_mid);
+ stats.transport_names_by_mid[*sctp_mid] = *sctp_transport_name;
+ }
+
+ std::set<std::string> transport_names;
+ for (const auto& entry : stats.transport_names_by_mid) {
+ transport_names.insert(entry.second);
+ }
+
+ std::map<std::string, cricket::TransportStats> transport_stats_by_name =
+ pc_->GetTransportStatsByNames(transport_names);
+
+ for (auto& entry : transport_stats_by_name) {
+ stats.transport_stats.emplace_back(entry.first, std::move(entry.second));
+ TransportStats& transport = stats.transport_stats.back();
+
+ // Attempt to get a copy of the certificates from the transport and
+ // expose them in stats reports. All channels in a transport share the
+ // same local and remote certificates.
+ //
+ StatsReport::Id local_cert_report_id, remote_cert_report_id;
+ rtc::scoped_refptr<rtc::RTCCertificate> certificate;
+ if (pc_->GetLocalCertificate(transport.name, &certificate)) {
+ transport.local_cert_stats =
+ certificate->GetSSLCertificateChain().GetStats();
+ }
+
+ std::unique_ptr<rtc::SSLCertChain> remote_cert_chain =
+ pc_->GetRemoteSSLCertChain(transport.name);
+ if (remote_cert_chain) {
+ transport.remote_cert_stats = remote_cert_chain->GetStats();
+ }
+ }
+
+ return stats;
+}
+
+void LegacyStatsCollector::ExtractSessionInfo_s(SessionStats& session_stats) {
+ RTC_DCHECK_RUN_ON(pc_->signaling_thread());
+ rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
+
+ StatsReport::Id id(StatsReport::NewTypedId(
+ StatsReport::kStatsReportTypeSession, pc_->session_id()));
+ StatsReport* report = reports_.ReplaceOrAddNew(id);
+ report->set_timestamp(stats_gathering_started_);
+ report->AddBoolean(StatsReport::kStatsValueNameInitiator,
+ pc_->initial_offerer());
+
+ for (const cricket::CandidateStats& stats : session_stats.candidate_stats) {
+ AddCandidateReport(stats, true);
+ }
+
+ for (auto& transport : session_stats.transport_stats) {
+ // Attempt to get a copy of the certificates from the transport and
+ // expose them in stats reports. All channels in a transport share the
+ // same local and remote certificates.
+ //
+ StatsReport::Id local_cert_report_id, remote_cert_report_id;
+ if (transport.local_cert_stats) {
+ StatsReport* r =
+ AddCertificateReports(std::move(transport.local_cert_stats));
+ if (r)
+ local_cert_report_id = r->id();
+ }
+
+ if (transport.remote_cert_stats) {
+ StatsReport* r =
+ AddCertificateReports(std::move(transport.remote_cert_stats));
+ if (r)
+ remote_cert_report_id = r->id();
+ }
+
+ for (const auto& channel_iter : transport.stats.channel_stats) {
+ StatsReport::Id channel_stats_id(
+ StatsReport::NewComponentId(transport.name, channel_iter.component));
+ StatsReport* channel_report = reports_.ReplaceOrAddNew(channel_stats_id);
+ channel_report->set_timestamp(stats_gathering_started_);
+ channel_report->AddInt(StatsReport::kStatsValueNameComponent,
+ channel_iter.component);
+ if (local_cert_report_id.get()) {
+ channel_report->AddId(StatsReport::kStatsValueNameLocalCertificateId,
+ local_cert_report_id);
+ }
+ if (remote_cert_report_id.get()) {
+ channel_report->AddId(StatsReport::kStatsValueNameRemoteCertificateId,
+ remote_cert_report_id);
+ }
+ int srtp_crypto_suite = channel_iter.srtp_crypto_suite;
+ if (srtp_crypto_suite != rtc::kSrtpInvalidCryptoSuite &&
+ rtc::SrtpCryptoSuiteToName(srtp_crypto_suite).length()) {
+ channel_report->AddString(
+ StatsReport::kStatsValueNameSrtpCipher,
+ rtc::SrtpCryptoSuiteToName(srtp_crypto_suite));
+ }
+ int ssl_cipher_suite = channel_iter.ssl_cipher_suite;
+ if (ssl_cipher_suite != rtc::kTlsNullWithNullNull &&
+ rtc::SSLStreamAdapter::SslCipherSuiteToName(ssl_cipher_suite)
+ .length()) {
+ channel_report->AddString(
+ StatsReport::kStatsValueNameDtlsCipher,
+ rtc::SSLStreamAdapter::SslCipherSuiteToName(ssl_cipher_suite));
+ }
+
+ // Collect stats for non-pooled candidates. Note that the reports
+ // generated here supersedes the candidate reports generated in
+ // AddConnectionInfoReport below, and they may report candidates that are
+ // not paired. Also, the candidate report generated in
+ // AddConnectionInfoReport do not report port stats like StunStats.
+ for (const cricket::CandidateStats& stats :
+ channel_iter.ice_transport_stats.candidate_stats_list) {
+ AddCandidateReport(stats, true);
+ }
+
+ int connection_id = 0;
+ for (const cricket::ConnectionInfo& info :
+ channel_iter.ice_transport_stats.connection_infos) {
+ StatsReport* connection_report = AddConnectionInfoReport(
+ transport.name, channel_iter.component, connection_id++,
+ channel_report->id(), info);
+ if (info.best_connection) {
+ channel_report->AddId(
+ StatsReport::kStatsValueNameSelectedCandidatePairId,
+ connection_report->id());
+ }
+ }
+ }
+ }
+}
+
+void LegacyStatsCollector::ExtractBweInfo() {
+ RTC_DCHECK_RUN_ON(pc_->signaling_thread());
+
+ if (pc_->signaling_state() == PeerConnectionInterface::kClosed)
+ return;
+
+ webrtc::Call::Stats call_stats = pc_->GetCallStats();
+ cricket::BandwidthEstimationInfo bwe_info;
+ bwe_info.available_send_bandwidth = call_stats.send_bandwidth_bps;
+ bwe_info.available_recv_bandwidth = call_stats.recv_bandwidth_bps;
+ bwe_info.bucket_delay = call_stats.pacer_delay_ms;
+
+ // Fill in target encoder bitrate, actual encoder bitrate, rtx bitrate, etc.
+ // TODO(holmer): Also fill this in for audio.
+ auto transceivers = pc_->GetTransceiversInternal();
+ std::vector<cricket::VideoMediaSendChannelInterface*> video_media_channels;
+ for (const auto& transceiver : transceivers) {
+ if (transceiver->media_type() != cricket::MEDIA_TYPE_VIDEO) {
+ continue;
+ }
+ auto* video_channel = transceiver->internal()->channel();
+ if (video_channel) {
+ video_media_channels.push_back(video_channel->video_media_send_channel());
+ }
+ }
+
+ if (!video_media_channels.empty()) {
+ pc_->worker_thread()->BlockingCall([&] {
+ for (const auto& channel : video_media_channels) {
+ channel->FillBitrateInfo(&bwe_info);
+ }
+ });
+ }
+
+ StatsReport::Id report_id(StatsReport::NewBandwidthEstimationId());
+ StatsReport* report = reports_.FindOrAddNew(report_id);
+ ExtractStats(bwe_info, stats_gathering_started_, report);
+}
+
+namespace {
+
+class ChannelStatsGatherer {
+ public:
+ virtual ~ChannelStatsGatherer() = default;
+
+ virtual bool GetStatsOnWorkerThread() = 0;
+
+ virtual void ExtractStats(LegacyStatsCollector* collector) const = 0;
+
+ virtual bool HasRemoteAudio() const = 0;
+
+ std::string mid;
+ std::string transport_name;
+ std::map<uint32_t, std::string> sender_track_id_by_ssrc;
+ std::map<uint32_t, std::string> receiver_track_id_by_ssrc;
+
+ protected:
+ template <typename ReceiverT, typename SenderT>
+ void ExtractSenderReceiverStats(
+ LegacyStatsCollector* collector,
+ const std::vector<ReceiverT>& receiver_data,
+ const std::vector<SenderT>& sender_data) const {
+ RTC_DCHECK(collector);
+ StatsReport::Id transport_id = StatsReport::NewComponentId(
+ transport_name, cricket::ICE_CANDIDATE_COMPONENT_RTP);
+ ExtractStatsFromList(receiver_data, transport_id, collector,
+ StatsReport::kReceive, receiver_track_id_by_ssrc);
+ ExtractStatsFromList(sender_data, transport_id, collector,
+ StatsReport::kSend, sender_track_id_by_ssrc);
+ }
+};
+
+class VoiceChannelStatsGatherer final : public ChannelStatsGatherer {
+ public:
+ explicit VoiceChannelStatsGatherer(cricket::VoiceChannel* voice_channel)
+ : voice_channel_(voice_channel) {
+ RTC_DCHECK(voice_channel_);
+ }
+
+ bool GetStatsOnWorkerThread() override {
+ cricket::VoiceMediaSendInfo send_info;
+ cricket::VoiceMediaReceiveInfo receive_info;
+ bool success =
+ voice_channel_->voice_media_send_channel()->GetStats(&send_info);
+ success &= voice_channel_->voice_media_receive_channel()->GetStats(
+ &receive_info,
+ /*get_and_clear_legacy_stats=*/true);
+ if (success) {
+ voice_media_info = cricket::VoiceMediaInfo(std::move(send_info),
+ std::move(receive_info));
+ }
+ return success;
+ }
+
+ void ExtractStats(LegacyStatsCollector* collector) const override {
+ ExtractSenderReceiverStats(collector, voice_media_info.receivers,
+ voice_media_info.senders);
+ if (voice_media_info.device_underrun_count == -2 ||
+ voice_media_info.device_underrun_count > 0) {
+ StatsReport* report = collector->PrepareADMReport();
+ report->AddInt(StatsReport::kStatsValueNameAudioDeviceUnderrunCounter,
+ voice_media_info.device_underrun_count);
+ }
+ }
+
+ bool HasRemoteAudio() const override {
+ return !voice_media_info.receivers.empty();
+ }
+
+ private:
+ cricket::VoiceChannel* voice_channel_;
+ cricket::VoiceMediaInfo voice_media_info;
+};
+
+class VideoChannelStatsGatherer final : public ChannelStatsGatherer {
+ public:
+ explicit VideoChannelStatsGatherer(cricket::VideoChannel* video_channel)
+ : video_channel_(video_channel) {
+ RTC_DCHECK(video_channel_);
+ }
+
+ bool GetStatsOnWorkerThread() override {
+ cricket::VideoMediaSendInfo send_info;
+ cricket::VideoMediaReceiveInfo receive_info;
+ bool success =
+ video_channel_->video_media_send_channel()->GetStats(&send_info);
+ success &=
+ video_channel_->video_media_receive_channel()->GetStats(&receive_info);
+ if (success) {
+ video_media_info = cricket::VideoMediaInfo(std::move(send_info),
+ std::move(receive_info));
+ }
+ return success;
+ }
+
+ void ExtractStats(LegacyStatsCollector* collector) const override {
+ ExtractSenderReceiverStats(collector, video_media_info.receivers,
+ video_media_info.aggregated_senders);
+ }
+
+ bool HasRemoteAudio() const override { return false; }
+
+ private:
+ cricket::VideoChannel* video_channel_;
+ cricket::VideoMediaInfo video_media_info;
+};
+
+std::unique_ptr<ChannelStatsGatherer> CreateChannelStatsGatherer(
+ cricket::ChannelInterface* channel) {
+ RTC_DCHECK(channel);
+ if (channel->media_type() == cricket::MEDIA_TYPE_AUDIO) {
+ return std::make_unique<VoiceChannelStatsGatherer>(
+ channel->AsVoiceChannel());
+ } else {
+ RTC_DCHECK_EQ(channel->media_type(), cricket::MEDIA_TYPE_VIDEO);
+ return std::make_unique<VideoChannelStatsGatherer>(
+ channel->AsVideoChannel());
+ }
+}
+
+} // namespace
+
+void LegacyStatsCollector::ExtractMediaInfo(
+ const std::map<std::string, std::string>& transport_names_by_mid) {
+ RTC_DCHECK_RUN_ON(pc_->signaling_thread());
+
+ std::vector<std::unique_ptr<ChannelStatsGatherer>> gatherers;
+
+ auto transceivers = pc_->GetTransceiversInternal();
+ {
+ rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
+ for (const auto& transceiver : transceivers) {
+ cricket::ChannelInterface* channel = transceiver->internal()->channel();
+ if (!channel) {
+ continue;
+ }
+ std::unique_ptr<ChannelStatsGatherer> gatherer =
+ CreateChannelStatsGatherer(channel);
+ gatherer->mid = channel->mid();
+ gatherer->transport_name = transport_names_by_mid.at(gatherer->mid);
+
+ for (const auto& sender : transceiver->internal()->senders()) {
+ auto track = sender->track();
+ std::string track_id = (track ? track->id() : "");
+ gatherer->sender_track_id_by_ssrc.insert(
+ std::make_pair(sender->ssrc(), track_id));
+ }
+
+ // Populating `receiver_track_id_by_ssrc` will be done on the worker
+ // thread as the `ssrc` property of the receiver needs to be accessed
+ // there.
+
+ gatherers.push_back(std::move(gatherer));
+ }
+ }
+
+ pc_->worker_thread()->BlockingCall([&] {
+ rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
+ // Populate `receiver_track_id_by_ssrc` for the gatherers.
+ int i = 0;
+ for (const auto& transceiver : transceivers) {
+ cricket::ChannelInterface* channel = transceiver->internal()->channel();
+ if (!channel)
+ continue;
+ ChannelStatsGatherer* gatherer = gatherers[i++].get();
+ RTC_DCHECK_EQ(gatherer->mid, channel->mid());
+
+ for (const auto& receiver : transceiver->internal()->receivers()) {
+ gatherer->receiver_track_id_by_ssrc.insert(std::make_pair(
+ receiver->internal()->ssrc().value_or(0), receiver->track()->id()));
+ }
+ }
+
+ for (auto it = gatherers.begin(); it != gatherers.end();
+ /* incremented manually */) {
+ ChannelStatsGatherer* gatherer = it->get();
+ if (!gatherer->GetStatsOnWorkerThread()) {
+ RTC_LOG(LS_ERROR) << "Failed to get media channel stats for mid="
+ << gatherer->mid;
+ it = gatherers.erase(it);
+ continue;
+ }
+ ++it;
+ }
+ });
+
+ rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
+
+ bool has_remote_audio = false;
+ for (const auto& gatherer : gatherers) {
+ gatherer->ExtractStats(this);
+ has_remote_audio |= gatherer->HasRemoteAudio();
+ }
+
+ UpdateStatsFromExistingLocalAudioTracks(has_remote_audio);
+}
+
+void LegacyStatsCollector::ExtractSenderInfo() {
+ RTC_DCHECK_RUN_ON(pc_->signaling_thread());
+
+ for (const auto& sender : pc_->GetSenders()) {
+ // TODO(bugs.webrtc.org/8694): SSRC == 0 currently means none. Delete check
+ // when that is fixed.
+ if (!sender->ssrc()) {
+ continue;
+ }
+ const rtc::scoped_refptr<MediaStreamTrackInterface> track(sender->track());
+ if (!track || track->kind() != MediaStreamTrackInterface::kVideoKind) {
+ continue;
+ }
+ // Safe, because kind() == kVideoKind implies a subclass of
+ // VideoTrackInterface; see mediastreaminterface.h.
+ VideoTrackSourceInterface* source =
+ static_cast<VideoTrackInterface*>(track.get())->GetSource();
+
+ VideoTrackSourceInterface::Stats stats;
+ if (!source->GetStats(&stats)) {
+ continue;
+ }
+ const StatsReport::Id stats_id = StatsReport::NewIdWithDirection(
+ StatsReport::kStatsReportTypeSsrc, rtc::ToString(sender->ssrc()),
+ StatsReport::kSend);
+ StatsReport* report = reports_.FindOrAddNew(stats_id);
+ report->AddInt(StatsReport::kStatsValueNameFrameWidthInput,
+ stats.input_width);
+ report->AddInt(StatsReport::kStatsValueNameFrameHeightInput,
+ stats.input_height);
+ }
+}
+
+void LegacyStatsCollector::ExtractDataInfo_n(StatsCollection* reports) {
+ RTC_DCHECK_RUN_ON(pc_->network_thread());
+
+ rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
+
+ std::vector<DataChannelStats> data_stats = pc_->GetDataChannelStats();
+ for (const auto& stats : data_stats) {
+ StatsReport::Id id(StatsReport::NewTypedIntId(
+ StatsReport::kStatsReportTypeDataChannel, stats.id));
+ StatsReport* report = reports->ReplaceOrAddNew(id);
+ report->set_timestamp(stats_gathering_started_);
+ report->AddString(StatsReport::kStatsValueNameLabel, stats.label);
+ // Filter out the initial id (-1).
+ if (stats.id >= 0) {
+ report->AddInt(StatsReport::kStatsValueNameDataChannelId, stats.id);
+ }
+ report->AddString(StatsReport::kStatsValueNameProtocol, stats.protocol);
+ report->AddString(StatsReport::kStatsValueNameState,
+ DataChannelInterface::DataStateString(stats.state));
+ }
+}
+
+StatsReport* LegacyStatsCollector::GetReport(const StatsReport::StatsType& type,
+ const std::string& id,
+ StatsReport::Direction direction) {
+ RTC_DCHECK_RUN_ON(pc_->signaling_thread());
+ RTC_DCHECK(type == StatsReport::kStatsReportTypeSsrc ||
+ type == StatsReport::kStatsReportTypeRemoteSsrc);
+ return reports_.Find(StatsReport::NewIdWithDirection(type, id, direction));
+}
+
+void LegacyStatsCollector::UpdateStatsFromExistingLocalAudioTracks(
+ bool has_remote_tracks) {
+ RTC_DCHECK_RUN_ON(pc_->signaling_thread());
+ // Loop through the existing local audio tracks.
+ for (const auto& it : local_audio_tracks_) {
+ AudioTrackInterface* track = it.first;
+ uint32_t ssrc = it.second;
+ StatsReport* report = GetReport(StatsReport::kStatsReportTypeSsrc,
+ rtc::ToString(ssrc), StatsReport::kSend);
+ if (report == NULL) {
+ // This can happen if a local audio track is added to a stream on the
+ // fly and the report has not been set up yet. Do nothing in this case.
+ RTC_LOG(LS_ERROR) << "Stats report does not exist for ssrc " << ssrc;
+ continue;
+ }
+
+ // The same ssrc can be used by both local and remote audio tracks.
+ const StatsReport::Value* v =
+ report->FindValue(StatsReport::kStatsValueNameTrackId);
+ if (!v || v->string_val() != track->id())
+ continue;
+
+ report->set_timestamp(stats_gathering_started_);
+ UpdateReportFromAudioTrack(track, report, has_remote_tracks);
+ }
+}
+
+void LegacyStatsCollector::UpdateReportFromAudioTrack(
+ AudioTrackInterface* track,
+ StatsReport* report,
+ bool has_remote_tracks) {
+ RTC_DCHECK_RUN_ON(pc_->signaling_thread());
+ RTC_DCHECK(track != NULL);
+
+ // Don't overwrite report values if they're not available.
+ int signal_level;
+ if (track->GetSignalLevel(&signal_level)) {
+ RTC_DCHECK_GE(signal_level, 0);
+ report->AddInt(StatsReport::kStatsValueNameAudioInputLevel, signal_level);
+ }
+
+ auto audio_processor(track->GetAudioProcessor());
+
+ if (audio_processor.get()) {
+ AudioProcessorInterface::AudioProcessorStatistics stats =
+ audio_processor->GetStats(has_remote_tracks);
+
+ SetAudioProcessingStats(report, stats.apm_statistics);
+ }
+}
+
+void LegacyStatsCollector::UpdateTrackReports() {
+ RTC_DCHECK_RUN_ON(pc_->signaling_thread());
+
+ rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
+
+ for (const auto& entry : track_ids_) {
+ StatsReport* report = entry.second;
+ report->set_timestamp(stats_gathering_started_);
+ }
+}
+
+void LegacyStatsCollector::InvalidateCache() {
+ RTC_DCHECK_RUN_ON(pc_->signaling_thread());
+ cache_timestamp_ms_ = 0;
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/legacy_stats_collector.h b/third_party/libwebrtc/pc/legacy_stats_collector.h
new file mode 100644
index 0000000000..e905b39d48
--- /dev/null
+++ b/third_party/libwebrtc/pc/legacy_stats_collector.h
@@ -0,0 +1,218 @@
+/*
+ * Copyright 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This file contains a class used for gathering statistics from an ongoing
+// libjingle PeerConnection.
+
+#ifndef PC_LEGACY_STATS_COLLECTOR_H_
+#define PC_LEGACY_STATS_COLLECTOR_H_
+
+#include <stdint.h>
+
+#include <algorithm>
+#include <cstdint>
+#include <map>
+#include <memory>
+#include <string>
+#include <type_traits>
+#include <utility>
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "api/field_trials_view.h"
+#include "api/legacy_stats_types.h"
+#include "api/media_stream_interface.h"
+#include "api/peer_connection_interface.h"
+#include "api/scoped_refptr.h"
+#include "p2p/base/connection_info.h"
+#include "p2p/base/port.h"
+#include "pc/legacy_stats_collector_interface.h"
+#include "pc/peer_connection_internal.h"
+#include "pc/rtp_transceiver.h"
+#include "pc/transport_stats.h"
+#include "rtc_base/network_constants.h"
+#include "rtc_base/ssl_certificate.h"
+#include "rtc_base/thread_annotations.h"
+
+namespace webrtc {
+
+// Conversion function to convert candidate type string to the corresponding one
+// from enum RTCStatsIceCandidateType.
+const char* IceCandidateTypeToStatsType(const std::string& candidate_type);
+
+// Conversion function to convert adapter type to report string which are more
+// fitting to the general style of http://w3c.github.io/webrtc-stats. This is
+// only used by stats collector.
+const char* AdapterTypeToStatsType(rtc::AdapterType type);
+
+// A mapping between track ids and their StatsReport.
+typedef std::map<std::string, StatsReport*> TrackIdMap;
+
+class LegacyStatsCollector : public LegacyStatsCollectorInterface {
+ public:
+ // The caller is responsible for ensuring that the pc outlives the
+ // LegacyStatsCollector instance.
+ explicit LegacyStatsCollector(PeerConnectionInternal* pc);
+ virtual ~LegacyStatsCollector();
+
+ // Adds a MediaStream with tracks that can be used as a `selector` in a call
+ // to GetStats.
+ void AddStream(MediaStreamInterface* stream);
+ void AddTrack(MediaStreamTrackInterface* track);
+
+ // Adds a local audio track that is used for getting some voice statistics.
+ void AddLocalAudioTrack(AudioTrackInterface* audio_track,
+ uint32_t ssrc) override;
+
+ // Removes a local audio tracks that is used for getting some voice
+ // statistics.
+ void RemoveLocalAudioTrack(AudioTrackInterface* audio_track,
+ uint32_t ssrc) override;
+
+ // Gather statistics from the session and store them for future use.
+ void UpdateStats(PeerConnectionInterface::StatsOutputLevel level);
+
+ // Gets a StatsReports of the last collected stats. Note that UpdateStats must
+ // be called before this function to get the most recent stats. `selector` is
+ // a track label or empty string. The most recent reports are stored in
+ // `reports`.
+ // TODO(tommi): Change this contract to accept a callback object instead
+ // of filling in `reports`. As is, there's a requirement that the caller
+ // uses `reports` immediately without allowing any async activity on
+ // the thread (message handling etc) and then discard the results.
+ void GetStats(MediaStreamTrackInterface* track,
+ StatsReports* reports) override;
+
+ // Prepare a local or remote SSRC report for the given ssrc. Used internally
+ // in the ExtractStatsFromList template.
+ StatsReport* PrepareReport(bool local,
+ uint32_t ssrc,
+ const std::string& track_id,
+ const StatsReport::Id& transport_id,
+ StatsReport::Direction direction);
+
+ StatsReport* PrepareADMReport();
+
+ // A track is invalid if there is no report data for it.
+ bool IsValidTrack(const std::string& track_id);
+
+ // Reset the internal cache timestamp to force an update of the stats next
+ // time UpdateStats() is called. This call needs to be made on the signaling
+ // thread and should be made every time configuration changes that affect
+ // stats have been made.
+ void InvalidateCache();
+
+ bool UseStandardBytesStats() const { return use_standard_bytes_stats_; }
+
+ private:
+ friend class LegacyStatsCollectorTest;
+
+ // Struct that's populated on the network thread and carries the values to
+ // the signaling thread where the stats are added to the stats reports.
+ struct TransportStats {
+ TransportStats() = default;
+ TransportStats(std::string transport_name,
+ cricket::TransportStats transport_stats)
+ : name(std::move(transport_name)), stats(std::move(transport_stats)) {}
+ TransportStats(TransportStats&&) = default;
+ TransportStats(const TransportStats&) = delete;
+
+ std::string name;
+ cricket::TransportStats stats;
+ std::unique_ptr<rtc::SSLCertificateStats> local_cert_stats;
+ std::unique_ptr<rtc::SSLCertificateStats> remote_cert_stats;
+ };
+
+ struct SessionStats {
+ SessionStats() = default;
+ SessionStats(SessionStats&&) = default;
+ SessionStats(const SessionStats&) = delete;
+
+ SessionStats& operator=(SessionStats&&) = default;
+ SessionStats& operator=(SessionStats&) = delete;
+
+ cricket::CandidateStatsList candidate_stats;
+ std::vector<TransportStats> transport_stats;
+ std::map<std::string, std::string> transport_names_by_mid;
+ };
+
+ // Overridden in unit tests to fake timing.
+ virtual double GetTimeNow();
+
+ bool CopySelectedReports(const std::string& selector, StatsReports* reports);
+
+ // Helper method for creating IceCandidate report. `is_local` indicates
+ // whether this candidate is local or remote.
+ StatsReport* AddCandidateReport(
+ const cricket::CandidateStats& candidate_stats,
+ bool local);
+
+ // Adds a report for this certificate and every certificate in its chain, and
+ // returns the leaf certificate's report (`cert_stats`'s report).
+ StatsReport* AddCertificateReports(
+ std::unique_ptr<rtc::SSLCertificateStats> cert_stats);
+
+ StatsReport* AddConnectionInfoReport(const std::string& content_name,
+ int component,
+ int connection_id,
+ const StatsReport::Id& channel_report_id,
+ const cricket::ConnectionInfo& info);
+
+ void ExtractDataInfo_n(StatsCollection* reports);
+
+ // Returns the `transport_names_by_mid` member from the SessionStats as
+ // gathered and used to populate the stats. Contains one synchronous hop
+ // to the network thread to get this information along with querying data
+ // channel stats at the same time and populating `reports_`.
+ std::map<std::string, std::string> ExtractSessionAndDataInfo();
+
+ void ExtractBweInfo();
+ void ExtractMediaInfo(
+ const std::map<std::string, std::string>& transport_names_by_mid);
+ void ExtractSenderInfo();
+ webrtc::StatsReport* GetReport(const StatsReport::StatsType& type,
+ const std::string& id,
+ StatsReport::Direction direction);
+
+ // Helper method to get stats from the local audio tracks.
+ void UpdateStatsFromExistingLocalAudioTracks(bool has_remote_tracks);
+ void UpdateReportFromAudioTrack(AudioTrackInterface* track,
+ StatsReport* report,
+ bool has_remote_tracks);
+
+ // Helper method to update the timestamp of track records.
+ void UpdateTrackReports();
+
+ SessionStats ExtractSessionInfo_n(
+ const std::vector<rtc::scoped_refptr<
+ RtpTransceiverProxyWithInternal<RtpTransceiver>>>& transceivers,
+ absl::optional<std::string> sctp_transport_name,
+ absl::optional<std::string> sctp_mid);
+ void ExtractSessionInfo_s(SessionStats& session_stats);
+
+ // A collection for all of our stats reports.
+ StatsCollection reports_;
+ TrackIdMap track_ids_;
+ // Raw pointer to the peer connection the statistics are gathered from.
+ PeerConnectionInternal* const pc_;
+ int64_t cache_timestamp_ms_ RTC_GUARDED_BY(pc_->signaling_thread()) = 0;
+ double stats_gathering_started_;
+ const bool use_standard_bytes_stats_;
+
+ // TODO(tommi): We appear to be holding on to raw pointers to reference
+ // counted objects? We should be using scoped_refptr here.
+ typedef std::vector<std::pair<AudioTrackInterface*, uint32_t>>
+ LocalAudioTrackVector;
+ LocalAudioTrackVector local_audio_tracks_;
+};
+
+} // namespace webrtc
+
+#endif // PC_LEGACY_STATS_COLLECTOR_H_
diff --git a/third_party/libwebrtc/pc/legacy_stats_collector_interface.h b/third_party/libwebrtc/pc/legacy_stats_collector_interface.h
new file mode 100644
index 0000000000..a0c6f3bd65
--- /dev/null
+++ b/third_party/libwebrtc/pc/legacy_stats_collector_interface.h
@@ -0,0 +1,43 @@
+/*
+ * Copyright 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This file contains an interface for the (obsolete) StatsCollector class that
+// is used by compilation units that do not wish to depend on the StatsCollector
+// implementation.
+
+#ifndef PC_LEGACY_STATS_COLLECTOR_INTERFACE_H_
+#define PC_LEGACY_STATS_COLLECTOR_INTERFACE_H_
+
+#include <stdint.h>
+
+#include "api/legacy_stats_types.h"
+#include "api/media_stream_interface.h"
+
+namespace webrtc {
+
+class LegacyStatsCollectorInterface {
+ public:
+ virtual ~LegacyStatsCollectorInterface() {}
+
+ // Adds a local audio track that is used for getting some voice statistics.
+ virtual void AddLocalAudioTrack(AudioTrackInterface* audio_track,
+ uint32_t ssrc) = 0;
+
+ // Removes a local audio tracks that is used for getting some voice
+ // statistics.
+ virtual void RemoveLocalAudioTrack(AudioTrackInterface* audio_track,
+ uint32_t ssrc) = 0;
+ virtual void GetStats(MediaStreamTrackInterface* track,
+ StatsReports* reports) = 0;
+};
+
+} // namespace webrtc
+
+#endif // PC_LEGACY_STATS_COLLECTOR_INTERFACE_H_
diff --git a/third_party/libwebrtc/pc/legacy_stats_collector_unittest.cc b/third_party/libwebrtc/pc/legacy_stats_collector_unittest.cc
new file mode 100644
index 0000000000..3099d1188a
--- /dev/null
+++ b/third_party/libwebrtc/pc/legacy_stats_collector_unittest.cc
@@ -0,0 +1,1964 @@
+/*
+ * Copyright 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/legacy_stats_collector.h"
+
+#include <stdio.h>
+
+#include <cstdint>
+
+#include "absl/algorithm/container.h"
+#include "absl/types/optional.h"
+#include "api/audio_codecs/audio_encoder.h"
+#include "api/candidate.h"
+#include "api/data_channel_interface.h"
+#include "api/media_stream_track.h"
+#include "api/media_types.h"
+#include "api/rtp_sender_interface.h"
+#include "api/scoped_refptr.h"
+#include "call/call.h"
+#include "media/base/media_channel.h"
+#include "modules/audio_processing/include/audio_processing_statistics.h"
+#include "p2p/base/ice_transport_internal.h"
+#include "pc/media_stream.h"
+#include "pc/rtp_receiver.h"
+#include "pc/rtp_sender.h"
+#include "pc/sctp_data_channel.h"
+#include "pc/test/fake_peer_connection_for_stats.h"
+#include "pc/test/fake_video_track_source.h"
+#include "pc/test/mock_rtp_receiver_internal.h"
+#include "pc/test/mock_rtp_sender_internal.h"
+#include "pc/transport_stats.h"
+#include "pc/video_track.h"
+#include "rtc_base/fake_ssl_identity.h"
+#include "rtc_base/message_digest.h"
+#include "rtc_base/net_helper.h"
+#include "rtc_base/null_socket_server.h"
+#include "rtc_base/rtc_certificate.h"
+#include "rtc_base/socket_address.h"
+#include "rtc_base/ssl_identity.h"
+#include "rtc_base/ssl_stream_adapter.h"
+#include "rtc_base/string_encode.h"
+#include "rtc_base/third_party/base64/base64.h"
+#include "rtc_base/thread.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+
+using cricket::ConnectionInfo;
+using cricket::SsrcReceiverInfo;
+using cricket::TransportChannelStats;
+using cricket::VideoMediaInfo;
+using cricket::VideoReceiverInfo;
+using cricket::VideoSenderInfo;
+using cricket::VoiceMediaInfo;
+using cricket::VoiceReceiverInfo;
+using cricket::VoiceSenderInfo;
+using ::testing::_;
+using ::testing::AtMost;
+using ::testing::Return;
+using ::testing::UnorderedElementsAre;
+
+namespace webrtc {
+
+namespace internal {
+// This value comes from openssl/tls1.h
+static const int TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA = 0xC014;
+} // namespace internal
+
+// Error return values
+const char kNotFound[] = "NOT FOUND";
+
+// Constant names for track identification.
+const char kLocalTrackId[] = "local_track_id";
+const char kRemoteTrackId[] = "remote_track_id";
+const uint32_t kSsrcOfTrack = 1234;
+
+class FakeAudioProcessor : public AudioProcessorInterface {
+ public:
+ FakeAudioProcessor() {}
+ ~FakeAudioProcessor() {}
+
+ private:
+ AudioProcessorInterface::AudioProcessorStatistics GetStats(
+ bool has_recv_streams) override {
+ AudioProcessorStatistics stats;
+ if (has_recv_streams) {
+ stats.apm_statistics.echo_return_loss = 2.0;
+ stats.apm_statistics.echo_return_loss_enhancement = 3.0;
+ stats.apm_statistics.delay_median_ms = 4;
+ stats.apm_statistics.delay_standard_deviation_ms = 5;
+ }
+ return stats;
+ }
+};
+
+class FakeAudioTrack : public MediaStreamTrack<AudioTrackInterface> {
+ public:
+ explicit FakeAudioTrack(const std::string& id)
+ : MediaStreamTrack<AudioTrackInterface>(id),
+ processor_(rtc::make_ref_counted<FakeAudioProcessor>()) {}
+ std::string kind() const override { return "audio"; }
+ AudioSourceInterface* GetSource() const override { return NULL; }
+ void AddSink(AudioTrackSinkInterface* sink) override {}
+ void RemoveSink(AudioTrackSinkInterface* sink) override {}
+ bool GetSignalLevel(int* level) override {
+ *level = 1;
+ return true;
+ }
+ rtc::scoped_refptr<AudioProcessorInterface> GetAudioProcessor() override {
+ return processor_;
+ }
+
+ private:
+ rtc::scoped_refptr<FakeAudioProcessor> processor_;
+};
+
+// This fake audio processor is used to verify that the undesired initial values
+// (-1) will be filtered out.
+class FakeAudioProcessorWithInitValue : public AudioProcessorInterface {
+ public:
+ FakeAudioProcessorWithInitValue() {}
+ ~FakeAudioProcessorWithInitValue() {}
+
+ private:
+ AudioProcessorInterface::AudioProcessorStatistics GetStats(
+ bool /*has_recv_streams*/) override {
+ AudioProcessorStatistics stats;
+ return stats;
+ }
+};
+
+class FakeAudioTrackWithInitValue
+ : public MediaStreamTrack<AudioTrackInterface> {
+ public:
+ explicit FakeAudioTrackWithInitValue(const std::string& id)
+ : MediaStreamTrack<AudioTrackInterface>(id),
+ processor_(rtc::make_ref_counted<FakeAudioProcessorWithInitValue>()) {}
+ std::string kind() const override { return "audio"; }
+ AudioSourceInterface* GetSource() const override { return NULL; }
+ void AddSink(AudioTrackSinkInterface* sink) override {}
+ void RemoveSink(AudioTrackSinkInterface* sink) override {}
+ bool GetSignalLevel(int* level) override {
+ *level = 1;
+ return true;
+ }
+ rtc::scoped_refptr<AudioProcessorInterface> GetAudioProcessor() override {
+ return processor_;
+ }
+
+ private:
+ rtc::scoped_refptr<FakeAudioProcessorWithInitValue> processor_;
+};
+
+bool GetValue(const StatsReport* report,
+ StatsReport::StatsValueName name,
+ std::string* value) {
+ const StatsReport::Value* v = report->FindValue(name);
+ if (!v)
+ return false;
+ *value = v->ToString();
+ return true;
+}
+
+std::string ExtractStatsValue(const StatsReport::StatsType& type,
+ const StatsReports& reports,
+ StatsReport::StatsValueName name) {
+ for (const auto* r : reports) {
+ std::string ret;
+ if (r->type() == type && GetValue(r, name, &ret))
+ return ret;
+ }
+
+ return kNotFound;
+}
+
+StatsReport::Id TypedIdFromIdString(StatsReport::StatsType type,
+ const std::string& value) {
+ EXPECT_FALSE(value.empty());
+ StatsReport::Id id;
+ if (value.empty())
+ return id;
+
+ // This has assumptions about how the ID is constructed. As is, this is
+ // OK since this is for testing purposes only, but if we ever need this
+ // in production, we should add a generic method that does this.
+ size_t index = value.find('_');
+ EXPECT_NE(index, std::string::npos);
+ if (index == std::string::npos || index == (value.length() - 1))
+ return id;
+
+ id = StatsReport::NewTypedId(type, value.substr(index + 1));
+ EXPECT_EQ(id->ToString(), value);
+ return id;
+}
+
+StatsReport::Id IdFromCertIdString(const std::string& cert_id) {
+ return TypedIdFromIdString(StatsReport::kStatsReportTypeCertificate, cert_id);
+}
+
+// Finds the `n`-th report of type `type` in `reports`.
+// `n` starts from 1 for finding the first report.
+const StatsReport* FindNthReportByType(const StatsReports& reports,
+ const StatsReport::StatsType& type,
+ int n) {
+ for (size_t i = 0; i < reports.size(); ++i) {
+ if (reports[i]->type() == type) {
+ n--;
+ if (n == 0)
+ return reports[i];
+ }
+ }
+ return nullptr;
+}
+
+// Returns the value of the stat identified by `name` in the `n`-th report of
+// type `type` in `reports`.
+// `n` starts from 1 for finding the first report.
+// If either the `n`-th report is not found, or the stat is not present in that
+// report, then nullopt is returned.
+absl::optional<std::string> GetValueInNthReportByType(
+ const StatsReports& reports,
+ StatsReport::StatsType type,
+ StatsReport::StatsValueName name,
+ int n) {
+ const StatsReport* report = FindNthReportByType(reports, type, n);
+ if (!report) {
+ return absl::nullopt;
+ }
+ std::string value;
+ if (!GetValue(report, name, &value)) {
+ return absl::nullopt;
+ }
+ return value;
+}
+
+std::vector<const StatsReport*> GetReportsByType(const StatsReports& reports,
+ StatsReport::StatsType type) {
+ std::vector<const StatsReport*> filtered_reports;
+ for (const StatsReport* report : reports) {
+ if (report->type() == type) {
+ filtered_reports.push_back(report);
+ }
+ }
+ return filtered_reports;
+}
+
+const StatsReport* FindReportById(const StatsReports& reports,
+ const StatsReport::Id& id) {
+ for (const auto* r : reports) {
+ if (r->id()->Equals(id))
+ return r;
+ }
+ return nullptr;
+}
+
+std::string ExtractSsrcStatsValue(const StatsReports& reports,
+ StatsReport::StatsValueName name) {
+ return ExtractStatsValue(StatsReport::kStatsReportTypeSsrc, reports, name);
+}
+
+std::string ExtractBweStatsValue(const StatsReports& reports,
+ StatsReport::StatsValueName name) {
+ return ExtractStatsValue(StatsReport::kStatsReportTypeBwe, reports, name);
+}
+
+std::string DerToPem(const std::string& der) {
+ return rtc::SSLIdentity::DerToPem(
+ rtc::kPemTypeCertificate,
+ reinterpret_cast<const unsigned char*>(der.c_str()), der.length());
+}
+
+std::vector<std::string> DersToPems(const std::vector<std::string>& ders) {
+ std::vector<std::string> pems(ders.size());
+ absl::c_transform(ders, pems.begin(), DerToPem);
+ return pems;
+}
+
+void CheckCertChainReports(const StatsReports& reports,
+ const std::vector<std::string>& ders,
+ const StatsReport::Id& start_id) {
+ StatsReport::Id cert_id;
+ const StatsReport::Id* certificate_id = &start_id;
+ size_t i = 0;
+ while (true) {
+ const StatsReport* report = FindReportById(reports, *certificate_id);
+ ASSERT_TRUE(report != NULL);
+
+ std::string der_base64;
+ EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNameDer, &der_base64));
+ std::string der = rtc::Base64::Decode(der_base64, rtc::Base64::DO_STRICT);
+ EXPECT_EQ(ders[i], der);
+
+ std::string fingerprint_algorithm;
+ EXPECT_TRUE(GetValue(report,
+ StatsReport::kStatsValueNameFingerprintAlgorithm,
+ &fingerprint_algorithm));
+ // The digest algorithm for a FakeSSLCertificate is always SHA-1.
+ std::string sha_1_str = rtc::DIGEST_SHA_1;
+ EXPECT_EQ(sha_1_str, fingerprint_algorithm);
+
+ std::string fingerprint;
+ EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNameFingerprint,
+ &fingerprint));
+ EXPECT_FALSE(fingerprint.empty());
+
+ ++i;
+ std::string issuer_id;
+ if (!GetValue(report, StatsReport::kStatsValueNameIssuerId, &issuer_id)) {
+ break;
+ }
+
+ cert_id = IdFromCertIdString(issuer_id);
+ certificate_id = &cert_id;
+ }
+ EXPECT_EQ(ders.size(), i);
+}
+
+void VerifyVoiceReceiverInfoReport(const StatsReport* report,
+ const cricket::VoiceReceiverInfo& info) {
+ std::string value_in_report;
+ EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNameAudioOutputLevel,
+ &value_in_report));
+ EXPECT_EQ(rtc::ToString(info.audio_level), value_in_report);
+ EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNameBytesReceived,
+ &value_in_report));
+ EXPECT_EQ(rtc::ToString(info.payload_bytes_received +
+ info.header_and_padding_bytes_received),
+ value_in_report);
+ EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNameJitterReceived,
+ &value_in_report));
+ EXPECT_EQ(rtc::ToString(info.jitter_ms), value_in_report);
+ EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNameJitterBufferMs,
+ &value_in_report));
+ EXPECT_EQ(rtc::ToString(info.jitter_buffer_ms), value_in_report);
+ EXPECT_TRUE(GetValue(report,
+ StatsReport::kStatsValueNamePreferredJitterBufferMs,
+ &value_in_report));
+ EXPECT_EQ(rtc::ToString(info.jitter_buffer_preferred_ms), value_in_report);
+ EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNameCurrentDelayMs,
+ &value_in_report));
+ EXPECT_EQ(rtc::ToString(info.delay_estimate_ms), value_in_report);
+ EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNameExpandRate,
+ &value_in_report));
+ EXPECT_EQ(rtc::ToString(info.expand_rate), value_in_report);
+ EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNameSpeechExpandRate,
+ &value_in_report));
+ EXPECT_EQ(rtc::ToString(info.speech_expand_rate), value_in_report);
+ EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNameAccelerateRate,
+ &value_in_report));
+ EXPECT_EQ(rtc::ToString(info.accelerate_rate), value_in_report);
+ EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNamePreemptiveExpandRate,
+ &value_in_report));
+ EXPECT_EQ(rtc::ToString(info.preemptive_expand_rate), value_in_report);
+ EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNameSecondaryDecodedRate,
+ &value_in_report));
+ EXPECT_EQ(rtc::ToString(info.secondary_decoded_rate), value_in_report);
+ EXPECT_TRUE(GetValue(report,
+ StatsReport::kStatsValueNameSecondaryDiscardedRate,
+ &value_in_report));
+ EXPECT_EQ(rtc::ToString(info.secondary_discarded_rate), value_in_report);
+ EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNamePacketsReceived,
+ &value_in_report));
+ EXPECT_EQ(rtc::ToString(info.packets_received), value_in_report);
+ EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNameDecodingCTSG,
+ &value_in_report));
+ EXPECT_EQ(rtc::ToString(info.decoding_calls_to_silence_generator),
+ value_in_report);
+ EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNameDecodingCTN,
+ &value_in_report));
+ EXPECT_EQ(rtc::ToString(info.decoding_calls_to_neteq), value_in_report);
+ EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNameDecodingNormal,
+ &value_in_report));
+ EXPECT_EQ(rtc::ToString(info.decoding_normal), value_in_report);
+ EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNameDecodingPLC,
+ &value_in_report));
+ EXPECT_EQ(rtc::ToString(info.decoding_plc), value_in_report);
+ EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNameDecodingCodecPLC,
+ &value_in_report));
+ EXPECT_EQ(rtc::ToString(info.decoding_codec_plc), value_in_report);
+ EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNameDecodingCNG,
+ &value_in_report));
+ EXPECT_EQ(rtc::ToString(info.decoding_cng), value_in_report);
+ EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNameDecodingPLCCNG,
+ &value_in_report));
+ EXPECT_EQ(rtc::ToString(info.decoding_plc_cng), value_in_report);
+ EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNameDecodingMutedOutput,
+ &value_in_report));
+ EXPECT_EQ(rtc::ToString(info.decoding_muted_output), value_in_report);
+ EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNameCodecName,
+ &value_in_report));
+}
+
+void VerifyVoiceSenderInfoReport(const StatsReport* report,
+ const cricket::VoiceSenderInfo& sinfo) {
+ std::string value_in_report;
+ EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNameCodecName,
+ &value_in_report));
+ EXPECT_EQ(sinfo.codec_name, value_in_report);
+ EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNameBytesSent,
+ &value_in_report));
+ EXPECT_EQ(rtc::ToString(sinfo.payload_bytes_sent +
+ sinfo.header_and_padding_bytes_sent),
+ value_in_report);
+ EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNamePacketsSent,
+ &value_in_report));
+ EXPECT_EQ(rtc::ToString(sinfo.packets_sent), value_in_report);
+ EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNamePacketsLost,
+ &value_in_report));
+ EXPECT_EQ(rtc::ToString(sinfo.packets_lost), value_in_report);
+ EXPECT_TRUE(
+ GetValue(report, StatsReport::kStatsValueNameRtt, &value_in_report));
+ EXPECT_EQ(rtc::ToString(sinfo.rtt_ms), value_in_report);
+ EXPECT_TRUE(
+ GetValue(report, StatsReport::kStatsValueNameRtt, &value_in_report));
+ EXPECT_EQ(rtc::ToString(sinfo.rtt_ms), value_in_report);
+ EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNameJitterReceived,
+ &value_in_report));
+ EXPECT_EQ(rtc::ToString(sinfo.jitter_ms), value_in_report);
+ if (sinfo.apm_statistics.delay_median_ms) {
+ EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNameEchoDelayMedian,
+ &value_in_report));
+ EXPECT_EQ(rtc::ToString(*sinfo.apm_statistics.delay_median_ms),
+ value_in_report);
+ } else {
+ EXPECT_FALSE(GetValue(report, StatsReport::kStatsValueNameEchoDelayMedian,
+ &value_in_report));
+ }
+ if (sinfo.apm_statistics.delay_standard_deviation_ms) {
+ EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNameEchoDelayStdDev,
+ &value_in_report));
+ EXPECT_EQ(rtc::ToString(*sinfo.apm_statistics.delay_standard_deviation_ms),
+ value_in_report);
+ } else {
+ EXPECT_FALSE(GetValue(report, StatsReport::kStatsValueNameEchoDelayStdDev,
+ &value_in_report));
+ }
+ if (sinfo.apm_statistics.echo_return_loss) {
+ EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNameEchoReturnLoss,
+ &value_in_report));
+ EXPECT_EQ(rtc::ToString(*sinfo.apm_statistics.echo_return_loss),
+ value_in_report);
+ } else {
+ EXPECT_FALSE(GetValue(report, StatsReport::kStatsValueNameEchoReturnLoss,
+ &value_in_report));
+ }
+ if (sinfo.apm_statistics.echo_return_loss_enhancement) {
+ EXPECT_TRUE(GetValue(report,
+ StatsReport::kStatsValueNameEchoReturnLossEnhancement,
+ &value_in_report));
+ EXPECT_EQ(rtc::ToString(*sinfo.apm_statistics.echo_return_loss_enhancement),
+ value_in_report);
+ } else {
+ EXPECT_FALSE(GetValue(report,
+ StatsReport::kStatsValueNameEchoReturnLossEnhancement,
+ &value_in_report));
+ }
+ if (sinfo.apm_statistics.residual_echo_likelihood) {
+ EXPECT_TRUE(GetValue(report,
+ StatsReport::kStatsValueNameResidualEchoLikelihood,
+ &value_in_report));
+ EXPECT_EQ(rtc::ToString(*sinfo.apm_statistics.residual_echo_likelihood),
+ value_in_report);
+ } else {
+ EXPECT_FALSE(GetValue(report,
+ StatsReport::kStatsValueNameResidualEchoLikelihood,
+ &value_in_report));
+ }
+ if (sinfo.apm_statistics.residual_echo_likelihood_recent_max) {
+ EXPECT_TRUE(GetValue(
+ report, StatsReport::kStatsValueNameResidualEchoLikelihoodRecentMax,
+ &value_in_report));
+ EXPECT_EQ(rtc::ToString(
+ *sinfo.apm_statistics.residual_echo_likelihood_recent_max),
+ value_in_report);
+ } else {
+ EXPECT_FALSE(GetValue(
+ report, StatsReport::kStatsValueNameResidualEchoLikelihoodRecentMax,
+ &value_in_report));
+ }
+ EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNameAudioInputLevel,
+ &value_in_report));
+ EXPECT_EQ(rtc::ToString(sinfo.audio_level), value_in_report);
+ EXPECT_TRUE(GetValue(report,
+ StatsReport::kStatsValueNameAnaBitrateActionCounter,
+ &value_in_report));
+ ASSERT_TRUE(sinfo.ana_statistics.bitrate_action_counter);
+ EXPECT_EQ(rtc::ToString(*sinfo.ana_statistics.bitrate_action_counter),
+ value_in_report);
+ EXPECT_TRUE(GetValue(report,
+ StatsReport::kStatsValueNameAnaChannelActionCounter,
+ &value_in_report));
+ ASSERT_TRUE(sinfo.ana_statistics.channel_action_counter);
+ EXPECT_EQ(rtc::ToString(*sinfo.ana_statistics.channel_action_counter),
+ value_in_report);
+ EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNameAnaDtxActionCounter,
+ &value_in_report));
+ ASSERT_TRUE(sinfo.ana_statistics.dtx_action_counter);
+ EXPECT_EQ(rtc::ToString(*sinfo.ana_statistics.dtx_action_counter),
+ value_in_report);
+ EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNameAnaFecActionCounter,
+ &value_in_report));
+ ASSERT_TRUE(sinfo.ana_statistics.fec_action_counter);
+ EXPECT_EQ(rtc::ToString(*sinfo.ana_statistics.fec_action_counter),
+ value_in_report);
+ EXPECT_TRUE(GetValue(
+ report, StatsReport::kStatsValueNameAnaFrameLengthIncreaseCounter,
+ &value_in_report));
+ ASSERT_TRUE(sinfo.ana_statistics.frame_length_increase_counter);
+ EXPECT_EQ(rtc::ToString(*sinfo.ana_statistics.frame_length_increase_counter),
+ value_in_report);
+ EXPECT_TRUE(GetValue(
+ report, StatsReport::kStatsValueNameAnaFrameLengthDecreaseCounter,
+ &value_in_report));
+ ASSERT_TRUE(sinfo.ana_statistics.frame_length_decrease_counter);
+ EXPECT_EQ(rtc::ToString(*sinfo.ana_statistics.frame_length_decrease_counter),
+ value_in_report);
+ EXPECT_TRUE(GetValue(report,
+ StatsReport::kStatsValueNameAnaUplinkPacketLossFraction,
+ &value_in_report));
+ ASSERT_TRUE(sinfo.ana_statistics.uplink_packet_loss_fraction);
+ EXPECT_EQ(rtc::ToString(*sinfo.ana_statistics.uplink_packet_loss_fraction),
+ value_in_report);
+}
+
+// Helper methods to avoid duplication of code.
+void InitVoiceSenderInfo(cricket::VoiceSenderInfo* voice_sender_info,
+ uint32_t ssrc = kSsrcOfTrack) {
+ voice_sender_info->add_ssrc(ssrc);
+ voice_sender_info->codec_name = "fake_codec";
+ voice_sender_info->payload_bytes_sent = 88;
+ voice_sender_info->header_and_padding_bytes_sent = 12;
+ voice_sender_info->packets_sent = 101;
+ voice_sender_info->rtt_ms = 102;
+ voice_sender_info->fraction_lost = 103;
+ voice_sender_info->jitter_ms = 104;
+ voice_sender_info->packets_lost = 105;
+ voice_sender_info->audio_level = 107;
+ voice_sender_info->apm_statistics.echo_return_loss = 108;
+ voice_sender_info->apm_statistics.echo_return_loss_enhancement = 109;
+ voice_sender_info->apm_statistics.delay_median_ms = 110;
+ voice_sender_info->apm_statistics.delay_standard_deviation_ms = 111;
+ voice_sender_info->ana_statistics.bitrate_action_counter = 112;
+ voice_sender_info->ana_statistics.channel_action_counter = 113;
+ voice_sender_info->ana_statistics.dtx_action_counter = 114;
+ voice_sender_info->ana_statistics.fec_action_counter = 115;
+ voice_sender_info->ana_statistics.frame_length_increase_counter = 116;
+ voice_sender_info->ana_statistics.frame_length_decrease_counter = 117;
+ voice_sender_info->ana_statistics.uplink_packet_loss_fraction = 118.0;
+}
+
+void UpdateVoiceSenderInfoFromAudioTrack(
+ AudioTrackInterface* audio_track,
+ cricket::VoiceSenderInfo* voice_sender_info,
+ bool has_remote_tracks) {
+ audio_track->GetSignalLevel(&voice_sender_info->audio_level);
+ AudioProcessorInterface::AudioProcessorStatistics audio_processor_stats =
+ audio_track->GetAudioProcessor()->GetStats(has_remote_tracks);
+ voice_sender_info->apm_statistics = audio_processor_stats.apm_statistics;
+}
+
+void InitVoiceReceiverInfo(cricket::VoiceReceiverInfo* voice_receiver_info) {
+ voice_receiver_info->add_ssrc(kSsrcOfTrack);
+ voice_receiver_info->payload_bytes_received = 98;
+ voice_receiver_info->header_and_padding_bytes_received = 12;
+ voice_receiver_info->packets_received = 111;
+ voice_receiver_info->packets_lost = 114;
+ voice_receiver_info->jitter_ms = 116;
+ voice_receiver_info->jitter_buffer_ms = 117;
+ voice_receiver_info->jitter_buffer_preferred_ms = 118;
+ voice_receiver_info->delay_estimate_ms = 119;
+ voice_receiver_info->audio_level = 120;
+ voice_receiver_info->expand_rate = 121;
+ voice_receiver_info->speech_expand_rate = 122;
+ voice_receiver_info->secondary_decoded_rate = 123;
+ voice_receiver_info->accelerate_rate = 124;
+ voice_receiver_info->preemptive_expand_rate = 125;
+ voice_receiver_info->secondary_discarded_rate = 126;
+ voice_receiver_info->decoding_codec_plc = 127;
+}
+
+class LegacyStatsCollectorForTest : public LegacyStatsCollector {
+ public:
+ explicit LegacyStatsCollectorForTest(PeerConnectionInternal* pc)
+ : LegacyStatsCollector(pc), time_now_(19477) {}
+
+ double GetTimeNow() override { return time_now_; }
+
+ private:
+ double time_now_;
+};
+
+class LegacyStatsCollectorTest : public ::testing::Test {
+ protected:
+ rtc::scoped_refptr<FakePeerConnectionForStats> CreatePeerConnection() {
+ return rtc::make_ref_counted<FakePeerConnectionForStats>();
+ }
+
+ std::unique_ptr<LegacyStatsCollectorForTest> CreateStatsCollector(
+ PeerConnectionInternal* pc) {
+ return std::make_unique<LegacyStatsCollectorForTest>(pc);
+ }
+
+ void VerifyAudioTrackStats(FakeAudioTrack* audio_track,
+ LegacyStatsCollectorForTest* stats,
+ const VoiceMediaInfo& voice_info,
+ StatsReports* reports) {
+ stats->UpdateStats(PeerConnectionInterface::kStatsOutputLevelStandard);
+ stats->InvalidateCache();
+ stats->GetStats(nullptr, reports);
+
+ // Verify the existence of the track report.
+ const StatsReport* report =
+ FindNthReportByType(*reports, StatsReport::kStatsReportTypeSsrc, 1);
+ ASSERT_TRUE(report);
+ EXPECT_EQ(stats->GetTimeNow(), report->timestamp());
+ std::string track_id =
+ ExtractSsrcStatsValue(*reports, StatsReport::kStatsValueNameTrackId);
+ EXPECT_EQ(audio_track->id(), track_id);
+ std::string ssrc_id =
+ ExtractSsrcStatsValue(*reports, StatsReport::kStatsValueNameSsrc);
+ EXPECT_EQ(rtc::ToString(kSsrcOfTrack), ssrc_id);
+
+ std::string media_type =
+ ExtractSsrcStatsValue(*reports, StatsReport::kStatsValueNameMediaType);
+ EXPECT_EQ("audio", media_type);
+
+ // Verifies the values in the track report.
+ if (!voice_info.senders.empty()) {
+ VerifyVoiceSenderInfoReport(report, voice_info.senders[0]);
+ }
+ if (!voice_info.receivers.empty()) {
+ VerifyVoiceReceiverInfoReport(report, voice_info.receivers[0]);
+ }
+
+ // Verify we get the same result by passing a track to GetStats().
+ StatsReports track_reports; // returned values.
+ stats->GetStats(audio_track, &track_reports);
+ const StatsReport* track_report = FindNthReportByType(
+ track_reports, StatsReport::kStatsReportTypeSsrc, 1);
+ ASSERT_TRUE(track_report);
+ EXPECT_EQ(stats->GetTimeNow(), track_report->timestamp());
+ track_id = ExtractSsrcStatsValue(track_reports,
+ StatsReport::kStatsValueNameTrackId);
+ EXPECT_EQ(audio_track->id(), track_id);
+ ssrc_id =
+ ExtractSsrcStatsValue(track_reports, StatsReport::kStatsValueNameSsrc);
+ EXPECT_EQ(rtc::ToString(kSsrcOfTrack), ssrc_id);
+ if (!voice_info.senders.empty()) {
+ VerifyVoiceSenderInfoReport(track_report, voice_info.senders[0]);
+ }
+ if (!voice_info.receivers.empty()) {
+ VerifyVoiceReceiverInfoReport(track_report, voice_info.receivers[0]);
+ }
+ }
+
+ void TestCertificateReports(const rtc::FakeSSLIdentity& local_identity,
+ const std::vector<std::string>& local_ders,
+ const rtc::FakeSSLIdentity& remote_identity,
+ const std::vector<std::string>& remote_ders) {
+ const std::string kTransportName = "transport";
+
+ auto pc = CreatePeerConnection();
+ auto stats = CreateStatsCollector(pc.get());
+
+ pc->AddVoiceChannel("audio", kTransportName);
+
+ // Fake stats to process.
+ TransportChannelStats channel_stats;
+ channel_stats.component = 1;
+ channel_stats.srtp_crypto_suite = rtc::kSrtpAes128CmSha1_80;
+ channel_stats.ssl_cipher_suite =
+ internal::TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA;
+ pc->SetTransportStats(kTransportName, channel_stats);
+
+ // Fake certificate to report.
+ rtc::scoped_refptr<rtc::RTCCertificate> local_certificate(
+ rtc::RTCCertificate::Create(local_identity.Clone()));
+ pc->SetLocalCertificate(kTransportName, local_certificate);
+ pc->SetRemoteCertChain(kTransportName,
+ remote_identity.cert_chain().Clone());
+
+ stats->UpdateStats(PeerConnectionInterface::kStatsOutputLevelStandard);
+
+ StatsReports reports;
+ stats->GetStats(nullptr, &reports);
+
+ const StatsReport* channel_report =
+ FindNthReportByType(reports, StatsReport::kStatsReportTypeComponent, 1);
+ EXPECT_TRUE(channel_report);
+
+ // Check local certificate chain.
+ std::string local_certificate_id =
+ ExtractStatsValue(StatsReport::kStatsReportTypeComponent, reports,
+ StatsReport::kStatsValueNameLocalCertificateId);
+ if (local_ders.size() > 0) {
+ EXPECT_NE(kNotFound, local_certificate_id);
+ StatsReport::Id id(IdFromCertIdString(local_certificate_id));
+ CheckCertChainReports(reports, local_ders, id);
+ } else {
+ EXPECT_EQ(kNotFound, local_certificate_id);
+ }
+
+ // Check remote certificate chain.
+ std::string remote_certificate_id =
+ ExtractStatsValue(StatsReport::kStatsReportTypeComponent, reports,
+ StatsReport::kStatsValueNameRemoteCertificateId);
+ if (remote_ders.size() > 0) {
+ EXPECT_NE(kNotFound, remote_certificate_id);
+ StatsReport::Id id(IdFromCertIdString(remote_certificate_id));
+ CheckCertChainReports(reports, remote_ders, id);
+ } else {
+ EXPECT_EQ(kNotFound, remote_certificate_id);
+ }
+
+ // Check negotiated ciphers.
+ std::string dtls_cipher_suite =
+ ExtractStatsValue(StatsReport::kStatsReportTypeComponent, reports,
+ StatsReport::kStatsValueNameDtlsCipher);
+ EXPECT_EQ(rtc::SSLStreamAdapter::SslCipherSuiteToName(
+ internal::TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA),
+ dtls_cipher_suite);
+ std::string srtp_crypto_suite =
+ ExtractStatsValue(StatsReport::kStatsReportTypeComponent, reports,
+ StatsReport::kStatsValueNameSrtpCipher);
+ EXPECT_EQ(rtc::SrtpCryptoSuiteToName(rtc::kSrtpAes128CmSha1_80),
+ srtp_crypto_suite);
+ }
+
+ private:
+ rtc::AutoThread main_thread_;
+};
+
+static rtc::scoped_refptr<MockRtpSenderInternal> CreateMockSender(
+ rtc::scoped_refptr<MediaStreamTrackInterface> track,
+ uint32_t ssrc) {
+ auto sender = rtc::make_ref_counted<MockRtpSenderInternal>();
+ EXPECT_CALL(*sender, track()).WillRepeatedly(Return(track));
+ EXPECT_CALL(*sender, ssrc()).WillRepeatedly(Return(ssrc));
+ EXPECT_CALL(*sender, media_type())
+ .WillRepeatedly(
+ Return(track->kind() == MediaStreamTrackInterface::kAudioKind
+ ? cricket::MEDIA_TYPE_AUDIO
+ : cricket::MEDIA_TYPE_VIDEO));
+ EXPECT_CALL(*sender, SetMediaChannel(_)).Times(AtMost(2));
+ EXPECT_CALL(*sender, SetTransceiverAsStopped()).Times(AtMost(1));
+ EXPECT_CALL(*sender, Stop());
+ return sender;
+}
+
+static rtc::scoped_refptr<MockRtpReceiverInternal> CreateMockReceiver(
+ rtc::scoped_refptr<MediaStreamTrackInterface> track,
+ uint32_t ssrc) {
+ auto receiver = rtc::make_ref_counted<MockRtpReceiverInternal>();
+ EXPECT_CALL(*receiver, track()).WillRepeatedly(Return(track));
+ EXPECT_CALL(*receiver, ssrc()).WillRepeatedly(Return(ssrc));
+ EXPECT_CALL(*receiver, media_type())
+ .WillRepeatedly(
+ Return(track->kind() == MediaStreamTrackInterface::kAudioKind
+ ? cricket::MEDIA_TYPE_AUDIO
+ : cricket::MEDIA_TYPE_VIDEO));
+ EXPECT_CALL(*receiver, SetMediaChannel(_)).WillRepeatedly(Return());
+ EXPECT_CALL(*receiver, Stop()).WillRepeatedly(Return());
+ return receiver;
+}
+
+class StatsCollectorTrackTest : public LegacyStatsCollectorTest,
+ public ::testing::WithParamInterface<bool> {
+ public:
+ // Adds a outgoing video track with a given SSRC into the stats.
+ // If GetParam() returns true, the track is also inserted into the local
+ // stream, which is created if necessary.
+ void AddOutgoingVideoTrack(FakePeerConnectionForStats* pc,
+ LegacyStatsCollectorForTest* stats) {
+ video_track_ = VideoTrack::Create(
+ kLocalTrackId, FakeVideoTrackSource::Create(), rtc::Thread::Current());
+ if (GetParam()) {
+ if (!stream_)
+ stream_ = MediaStream::Create("streamid");
+ stream_->AddTrack(video_track());
+ stats->AddStream(stream_.get());
+ } else {
+ stats->AddTrack(video_track_.get());
+ }
+ pc->AddSender(CreateMockSender(video_track_, kSsrcOfTrack));
+ }
+
+ // Adds a incoming video track with a given SSRC into the stats.
+ void AddIncomingVideoTrack(FakePeerConnectionForStats* pc,
+ LegacyStatsCollectorForTest* stats) {
+ video_track_ = VideoTrack::Create(
+ kRemoteTrackId, FakeVideoTrackSource::Create(), rtc::Thread::Current());
+ if (GetParam()) {
+ stream_ = MediaStream::Create("streamid");
+ stream_->AddTrack(video_track());
+ stats->AddStream(stream_.get());
+ } else {
+ stats->AddTrack(video_track_.get());
+ }
+ pc->AddReceiver(CreateMockReceiver(video_track_, kSsrcOfTrack));
+ }
+
+ // Adds a outgoing audio track with a given SSRC into the stats,
+ // and register it into the stats object.
+ // If GetParam() returns true, the track is also inserted into the local
+ // stream, which is created if necessary.
+ rtc::scoped_refptr<RtpSenderInterface> AddOutgoingAudioTrack(
+ FakePeerConnectionForStats* pc,
+ LegacyStatsCollectorForTest* stats) {
+ audio_track_ = rtc::make_ref_counted<FakeAudioTrack>(kLocalTrackId);
+ if (GetParam()) {
+ if (!stream_)
+ stream_ = MediaStream::Create("streamid");
+ stream_->AddTrack(audio_track());
+ stats->AddStream(stream_.get());
+ } else {
+ stats->AddTrack(audio_track_.get());
+ }
+ return pc->AddSender(CreateMockSender(audio_track_, kSsrcOfTrack));
+ }
+
+ // Adds a incoming audio track with a given SSRC into the stats.
+ void AddIncomingAudioTrack(FakePeerConnectionForStats* pc,
+ LegacyStatsCollectorForTest* stats) {
+ audio_track_ = rtc::make_ref_counted<FakeAudioTrack>(kRemoteTrackId);
+ if (GetParam()) {
+ if (stream_ == nullptr)
+ stream_ = MediaStream::Create("streamid");
+ stream_->AddTrack(audio_track());
+ stats->AddStream(stream_.get());
+ } else {
+ stats->AddTrack(audio_track_.get());
+ }
+ pc->AddReceiver(CreateMockReceiver(audio_track_, kSsrcOfTrack));
+ }
+
+ rtc::scoped_refptr<AudioTrackInterface> audio_track() { return audio_track_; }
+ rtc::scoped_refptr<VideoTrackInterface> video_track() { return video_track_; }
+
+ rtc::scoped_refptr<MediaStream> stream_;
+ rtc::scoped_refptr<VideoTrack> video_track_;
+ rtc::scoped_refptr<FakeAudioTrack> audio_track_;
+};
+
+TEST(StatsCollectionTest, DetachAndMerge) {
+ StatsCollection collection;
+ ASSERT_EQ(collection.size(), 0u);
+
+ // Create a new report with some information.
+ StatsReport::Id id(
+ StatsReport::NewTypedId(StatsReport::kStatsReportTypeTrack, "track_id"));
+ StatsReport* report = collection.ReplaceOrAddNew(id);
+ report->AddString(StatsReport::kStatsValueNameTrackId, "track_id");
+ ASSERT_TRUE(report);
+ // Check that looking it up, yields the same report.
+ ASSERT_EQ(report, collection.FindOrAddNew(id));
+ // There should be one report now.
+ ASSERT_EQ(collection.size(), 1u);
+
+ // Detach the internal container from the StatsCollection.
+ StatsCollection::Container container = collection.DetachCollection();
+ EXPECT_EQ(container.size(), 1u);
+ EXPECT_EQ(collection.size(), 0u);
+ EXPECT_EQ(nullptr, collection.Find(id));
+
+ // Merge it back and test if we find the same report.
+ collection.MergeCollection(std::move(container));
+ EXPECT_EQ(collection.size(), 1u);
+ EXPECT_EQ(report, collection.Find(id));
+}
+
+// Similar to `DetachAndMerge` above but detaches on one thread, merges on
+// another to test that we don't trigger sequence checker.
+TEST(StatsCollectionTest, DetachAndMergeThreaded) {
+ rtc::Thread new_thread(std::make_unique<rtc::NullSocketServer>());
+ new_thread.Start();
+
+ StatsReport::Id id(
+ StatsReport::NewTypedId(StatsReport::kStatsReportTypeTrack, "track_id"));
+
+ StatsReport* expected_report = nullptr;
+
+ StatsCollection::Container container = new_thread.BlockingCall([&] {
+ StatsCollection collection;
+ expected_report = collection.ReplaceOrAddNew(id);
+ expected_report->AddString(StatsReport::kStatsValueNameTrackId, "track_id");
+ return collection.DetachCollection();
+ });
+
+ StatsCollection collection;
+ collection.MergeCollection(std::move(container));
+ EXPECT_EQ(collection.size(), 1u);
+ EXPECT_EQ(expected_report, collection.Find(id));
+
+ new_thread.Stop();
+}
+TEST_F(LegacyStatsCollectorTest, FilterOutNegativeDataChannelId) {
+ auto pc = CreatePeerConnection();
+ auto stats = CreateStatsCollector(pc.get());
+
+ pc->AddSctpDataChannel("hacks");
+
+ stats->UpdateStats(PeerConnectionInterface::kStatsOutputLevelStandard);
+ StatsReports reports;
+ stats->GetStats(nullptr, &reports);
+
+ const StatsReport* report =
+ FindNthReportByType(reports, StatsReport::kStatsReportTypeDataChannel, 1);
+
+ std::string value_in_report;
+ EXPECT_FALSE(GetValue(report, StatsReport::kStatsValueNameDataChannelId,
+ &value_in_report));
+}
+
+// Verify that ExtractDataInfo populates reports.
+TEST_F(LegacyStatsCollectorTest, ExtractDataInfo) {
+ const std::string kDataChannelLabel = "hacks";
+ constexpr int kDataChannelId = 31337;
+ const std::string kConnectingString = DataChannelInterface::DataStateString(
+ DataChannelInterface::DataState::kConnecting);
+
+ auto pc = CreatePeerConnection();
+ auto stats = CreateStatsCollector(pc.get());
+
+ InternalDataChannelInit init;
+ init.id = kDataChannelId;
+ pc->AddSctpDataChannel(kDataChannelLabel, init);
+
+ stats->UpdateStats(PeerConnectionInterface::kStatsOutputLevelStandard);
+ StatsReports reports;
+ stats->GetStats(nullptr, &reports);
+
+ const StatsReport* report =
+ FindNthReportByType(reports, StatsReport::kStatsReportTypeDataChannel, 1);
+
+ StatsReport::Id report_id = StatsReport::NewTypedIntId(
+ StatsReport::kStatsReportTypeDataChannel, kDataChannelId);
+
+ EXPECT_TRUE(report_id->Equals(report->id()));
+
+ EXPECT_EQ(stats->GetTimeNow(), report->timestamp());
+ EXPECT_EQ(kDataChannelLabel,
+ ExtractStatsValue(StatsReport::kStatsReportTypeDataChannel, reports,
+ StatsReport::kStatsValueNameLabel));
+ EXPECT_EQ(rtc::ToString(kDataChannelId),
+ ExtractStatsValue(StatsReport::kStatsReportTypeDataChannel, reports,
+ StatsReport::kStatsValueNameDataChannelId));
+ EXPECT_EQ(kConnectingString,
+ ExtractStatsValue(StatsReport::kStatsReportTypeDataChannel, reports,
+ StatsReport::kStatsValueNameState));
+ EXPECT_EQ("",
+ ExtractStatsValue(StatsReport::kStatsReportTypeDataChannel, reports,
+ StatsReport::kStatsValueNameProtocol));
+}
+
+// This test verifies that 64-bit counters are passed successfully.
+TEST_P(StatsCollectorTrackTest, BytesCounterHandles64Bits) {
+ // The number of bytes must be larger than 0xFFFFFFFF for this test.
+ constexpr int64_t kBytesSent = 12345678901234LL;
+
+ auto pc = CreatePeerConnection();
+ auto stats = CreateStatsCollector(pc.get());
+
+ VideoSenderInfo video_sender_info;
+ video_sender_info.add_ssrc(1234);
+ video_sender_info.payload_bytes_sent = kBytesSent;
+ video_sender_info.header_and_padding_bytes_sent = 0;
+ VideoMediaInfo video_info;
+ video_info.aggregated_senders.push_back(video_sender_info);
+
+ pc->AddVideoChannel("video", "transport", video_info);
+
+ AddOutgoingVideoTrack(pc.get(), stats.get());
+
+ stats->UpdateStats(PeerConnectionInterface::kStatsOutputLevelStandard);
+ StatsReports reports;
+ stats->GetStats(nullptr, &reports);
+
+ EXPECT_EQ(
+ rtc::ToString(kBytesSent),
+ ExtractSsrcStatsValue(reports, StatsReport::kStatsValueNameBytesSent));
+}
+
+// Test that audio BWE information is reported via stats.
+TEST_P(StatsCollectorTrackTest, AudioBandwidthEstimationInfoIsReported) {
+ // Set up an SSRC just to test that we get both kinds of stats back: SSRC and
+ // BWE.
+ constexpr int64_t kBytesSent = 12345678901234LL;
+ constexpr int kSendBandwidth = 1234567;
+ constexpr int kRecvBandwidth = 12345678;
+ constexpr int kPacerDelay = 123;
+
+ auto pc = CreatePeerConnection();
+ auto stats = CreateStatsCollector(pc.get());
+
+ VoiceSenderInfo voice_sender_info;
+ voice_sender_info.add_ssrc(1234);
+ voice_sender_info.payload_bytes_sent = kBytesSent - 12;
+ voice_sender_info.header_and_padding_bytes_sent = 12;
+ VoiceMediaInfo voice_info;
+ voice_info.senders.push_back(voice_sender_info);
+
+ auto voice_media_channels = pc->AddVoiceChannel("audio", "transport");
+ voice_media_channels.first->SetStats(voice_info);
+ voice_media_channels.second->SetStats(voice_info);
+
+ AddOutgoingAudioTrack(pc.get(), stats.get());
+
+ Call::Stats call_stats;
+ call_stats.send_bandwidth_bps = kSendBandwidth;
+ call_stats.recv_bandwidth_bps = kRecvBandwidth;
+ call_stats.pacer_delay_ms = kPacerDelay;
+ pc->SetCallStats(call_stats);
+
+ stats->UpdateStats(PeerConnectionInterface::kStatsOutputLevelStandard);
+ StatsReports reports;
+ stats->GetStats(nullptr, &reports);
+
+ EXPECT_EQ(
+ rtc::ToString(kBytesSent),
+ ExtractSsrcStatsValue(reports, StatsReport::kStatsValueNameBytesSent));
+ EXPECT_EQ(rtc::ToString(kSendBandwidth),
+ ExtractBweStatsValue(
+ reports, StatsReport::kStatsValueNameAvailableSendBandwidth));
+ EXPECT_EQ(
+ rtc::ToString(kRecvBandwidth),
+ ExtractBweStatsValue(
+ reports, StatsReport::kStatsValueNameAvailableReceiveBandwidth));
+ EXPECT_EQ(
+ rtc::ToString(kPacerDelay),
+ ExtractBweStatsValue(reports, StatsReport::kStatsValueNameBucketDelay));
+}
+
+// Test that video BWE information is reported via stats.
+TEST_P(StatsCollectorTrackTest, VideoBandwidthEstimationInfoIsReported) {
+ // Set up an SSRC just to test that we get both kinds of stats back: SSRC and
+ // BWE.
+ constexpr int64_t kBytesSent = 12345678901234LL;
+ constexpr int kSendBandwidth = 1234567;
+ constexpr int kRecvBandwidth = 12345678;
+ constexpr int kPacerDelay = 123;
+
+ auto pc = CreatePeerConnection();
+ auto stats = CreateStatsCollector(pc.get());
+
+ VideoSenderInfo video_sender_info;
+ video_sender_info.add_ssrc(1234);
+ video_sender_info.payload_bytes_sent = kBytesSent - 12;
+ video_sender_info.header_and_padding_bytes_sent = 12;
+
+ VideoMediaInfo video_info;
+ video_info.aggregated_senders.push_back(video_sender_info);
+
+ pc->AddVideoChannel("video", "transport", video_info);
+
+ AddOutgoingVideoTrack(pc.get(), stats.get());
+
+ Call::Stats call_stats;
+ call_stats.send_bandwidth_bps = kSendBandwidth;
+ call_stats.recv_bandwidth_bps = kRecvBandwidth;
+ call_stats.pacer_delay_ms = kPacerDelay;
+ pc->SetCallStats(call_stats);
+
+ stats->UpdateStats(PeerConnectionInterface::kStatsOutputLevelStandard);
+ StatsReports reports;
+ stats->GetStats(nullptr, &reports);
+
+ EXPECT_EQ(
+ rtc::ToString(kBytesSent),
+ ExtractSsrcStatsValue(reports, StatsReport::kStatsValueNameBytesSent));
+ EXPECT_EQ(rtc::ToString(kSendBandwidth),
+ ExtractBweStatsValue(
+ reports, StatsReport::kStatsValueNameAvailableSendBandwidth));
+ EXPECT_EQ(
+ rtc::ToString(kRecvBandwidth),
+ ExtractBweStatsValue(
+ reports, StatsReport::kStatsValueNameAvailableReceiveBandwidth));
+ EXPECT_EQ(
+ rtc::ToString(kPacerDelay),
+ ExtractBweStatsValue(reports, StatsReport::kStatsValueNameBucketDelay));
+}
+
+// This test verifies that an object of type "googSession" always
+// exists in the returned stats.
+TEST_F(LegacyStatsCollectorTest, SessionObjectExists) {
+ auto pc = CreatePeerConnection();
+ auto stats = CreateStatsCollector(pc.get());
+
+ stats->UpdateStats(PeerConnectionInterface::kStatsOutputLevelStandard);
+ StatsReports reports;
+ stats->GetStats(nullptr, &reports);
+
+ EXPECT_TRUE(
+ FindNthReportByType(reports, StatsReport::kStatsReportTypeSession, 1));
+}
+
+// This test verifies that only one object of type "googSession" exists
+// in the returned stats.
+TEST_F(LegacyStatsCollectorTest, OnlyOneSessionObjectExists) {
+ auto pc = CreatePeerConnection();
+ auto stats = CreateStatsCollector(pc.get());
+
+ stats->UpdateStats(PeerConnectionInterface::kStatsOutputLevelStandard);
+ stats->UpdateStats(PeerConnectionInterface::kStatsOutputLevelStandard);
+ StatsReports reports;
+ stats->GetStats(nullptr, &reports);
+
+ EXPECT_TRUE(
+ FindNthReportByType(reports, StatsReport::kStatsReportTypeSession, 1));
+ EXPECT_FALSE(
+ FindNthReportByType(reports, StatsReport::kStatsReportTypeSession, 2));
+}
+
+// This test verifies that the empty track report exists in the returned stats
+// without calling StatsCollector::UpdateStats.
+TEST_P(StatsCollectorTrackTest, TrackObjectExistsWithoutUpdateStats) {
+ auto pc = CreatePeerConnection();
+ auto stats = CreateStatsCollector(pc.get());
+
+ pc->AddVideoChannel("video", "transport");
+ AddOutgoingVideoTrack(pc.get(), stats.get());
+
+ // Verfies the existence of the track report.
+ StatsReports reports;
+ stats->GetStats(nullptr, &reports);
+ ASSERT_EQ(1u, reports.size());
+ EXPECT_EQ(StatsReport::kStatsReportTypeTrack, reports[0]->type());
+ EXPECT_EQ(0, reports[0]->timestamp());
+
+ std::string trackValue =
+ ExtractStatsValue(StatsReport::kStatsReportTypeTrack, reports,
+ StatsReport::kStatsValueNameTrackId);
+ EXPECT_EQ(kLocalTrackId, trackValue);
+}
+
+// This test verifies that the empty track report exists in the returned stats
+// when StatsCollector::UpdateStats is called with ssrc stats.
+TEST_P(StatsCollectorTrackTest, TrackAndSsrcObjectExistAfterUpdateSsrcStats) {
+ constexpr int64_t kBytesSent = 12345678901234LL;
+
+ auto pc = CreatePeerConnection();
+ auto stats = CreateStatsCollector(pc.get());
+
+ VideoSenderInfo video_sender_info;
+ video_sender_info.add_ssrc(1234);
+ video_sender_info.payload_bytes_sent = kBytesSent - 12;
+ video_sender_info.header_and_padding_bytes_sent = 12;
+ VideoMediaInfo video_info;
+ video_info.aggregated_senders.push_back(video_sender_info);
+
+ pc->AddVideoChannel("video", "transport", video_info);
+
+ AddOutgoingVideoTrack(pc.get(), stats.get());
+
+ stats->UpdateStats(PeerConnectionInterface::kStatsOutputLevelStandard);
+ StatsReports reports;
+ stats->GetStats(nullptr, &reports);
+
+ // `reports` should contain at least one session report, one track report,
+ // and one ssrc report.
+ EXPECT_LE(3u, reports.size());
+ const StatsReport* track_report =
+ FindNthReportByType(reports, StatsReport::kStatsReportTypeTrack, 1);
+ EXPECT_TRUE(track_report);
+
+ // Get report for the specific `track`.
+ reports.clear();
+ stats->GetStats(video_track_.get(), &reports);
+ // `reports` should contain at least one session report, one track report,
+ // and one ssrc report.
+ EXPECT_LE(3u, reports.size());
+ track_report =
+ FindNthReportByType(reports, StatsReport::kStatsReportTypeTrack, 1);
+ ASSERT_TRUE(track_report);
+ EXPECT_EQ(stats->GetTimeNow(), track_report->timestamp());
+
+ std::string ssrc_id =
+ ExtractSsrcStatsValue(reports, StatsReport::kStatsValueNameSsrc);
+ EXPECT_EQ(rtc::ToString(kSsrcOfTrack), ssrc_id);
+
+ std::string track_id =
+ ExtractSsrcStatsValue(reports, StatsReport::kStatsValueNameTrackId);
+ EXPECT_EQ(kLocalTrackId, track_id);
+
+ std::string media_type =
+ ExtractSsrcStatsValue(reports, StatsReport::kStatsValueNameMediaType);
+ EXPECT_EQ("video", media_type);
+}
+
+// This test verifies that an SSRC object has the identifier of a Transport
+// stats object, and that this transport stats object exists in stats.
+TEST_P(StatsCollectorTrackTest, TransportObjectLinkedFromSsrcObject) {
+ constexpr int64_t kBytesSent = 12345678901234LL;
+
+ auto pc = CreatePeerConnection();
+ auto stats = CreateStatsCollector(pc.get());
+
+ VideoSenderInfo video_sender_info;
+ video_sender_info.add_ssrc(1234);
+ video_sender_info.payload_bytes_sent = kBytesSent - 12;
+ video_sender_info.header_and_padding_bytes_sent = 12;
+ VideoMediaInfo video_info;
+ video_info.aggregated_senders.push_back(video_sender_info);
+
+ pc->AddVideoChannel("video", "transport", video_info);
+
+ AddOutgoingVideoTrack(pc.get(), stats.get());
+
+ stats->UpdateStats(PeerConnectionInterface::kStatsOutputLevelStandard);
+ StatsReports reports;
+ stats->GetStats(nullptr, &reports);
+
+ std::string transport_id =
+ ExtractStatsValue(StatsReport::kStatsReportTypeSsrc, reports,
+ StatsReport::kStatsValueNameTransportId);
+ ASSERT_NE(kNotFound, transport_id);
+
+ // Transport id component ID will always be 1.
+ // This has assumptions about how the ID is constructed. As is, this is
+ // OK since this is for testing purposes only, but if we ever need this
+ // in production, we should add a generic method that does this.
+ size_t index = transport_id.find('-');
+ ASSERT_NE(std::string::npos, index);
+ std::string content = transport_id.substr(index + 1);
+ index = content.rfind('-');
+ ASSERT_NE(std::string::npos, index);
+ content = content.substr(0, index);
+ StatsReport::Id id(StatsReport::NewComponentId(content, 1));
+ ASSERT_EQ(transport_id, id->ToString());
+ const StatsReport* transport_report = FindReportById(reports, id);
+ ASSERT_TRUE(transport_report);
+}
+
+// This test verifies that a remote stats object will not be created for
+// an outgoing SSRC where remote stats are not returned.
+TEST_P(StatsCollectorTrackTest, RemoteSsrcInfoIsAbsent) {
+ auto pc = CreatePeerConnection();
+ auto stats = CreateStatsCollector(pc.get());
+
+ pc->AddVideoChannel("video", "transport");
+ AddOutgoingVideoTrack(pc.get(), stats.get());
+
+ stats->UpdateStats(PeerConnectionInterface::kStatsOutputLevelStandard);
+ StatsReports reports;
+ stats->GetStats(nullptr, &reports);
+
+ const StatsReport* remote_report =
+ FindNthReportByType(reports, StatsReport::kStatsReportTypeRemoteSsrc, 1);
+ EXPECT_FALSE(remote_report);
+}
+
+// This test verifies that a remote stats object will be created for
+// an outgoing SSRC where stats are returned.
+TEST_P(StatsCollectorTrackTest, RemoteSsrcInfoIsPresent) {
+ auto pc = CreatePeerConnection();
+ auto stats = CreateStatsCollector(pc.get());
+
+ SsrcReceiverInfo remote_ssrc_stats;
+ remote_ssrc_stats.timestamp = 12345.678;
+ remote_ssrc_stats.ssrc = kSsrcOfTrack;
+ VideoSenderInfo video_sender_info;
+ video_sender_info.add_ssrc(kSsrcOfTrack);
+ video_sender_info.remote_stats.push_back(remote_ssrc_stats);
+ VideoMediaInfo video_info;
+ video_info.aggregated_senders.push_back(video_sender_info);
+
+ pc->AddVideoChannel("video", "transport", video_info);
+
+ AddOutgoingVideoTrack(pc.get(), stats.get());
+
+ stats->UpdateStats(PeerConnectionInterface::kStatsOutputLevelStandard);
+ StatsReports reports;
+ stats->GetStats(nullptr, &reports);
+
+ const StatsReport* remote_report =
+ FindNthReportByType(reports, StatsReport::kStatsReportTypeRemoteSsrc, 1);
+ ASSERT_TRUE(remote_report);
+ EXPECT_EQ(12345.678, remote_report->timestamp());
+}
+
+// This test verifies that the empty track report exists in the returned stats
+// when StatsCollector::UpdateStats is called with ssrc stats.
+TEST_P(StatsCollectorTrackTest, ReportsFromRemoteTrack) {
+ constexpr int64_t kNumOfPacketsConcealed = 54321;
+
+ auto pc = CreatePeerConnection();
+ auto stats = CreateStatsCollector(pc.get());
+
+ VideoReceiverInfo video_receiver_info;
+ video_receiver_info.add_ssrc(1234);
+ video_receiver_info.packets_concealed = kNumOfPacketsConcealed;
+ VideoMediaInfo video_info;
+ video_info.receivers.push_back(video_receiver_info);
+
+ pc->AddVideoChannel("video", "transport", video_info);
+
+ AddIncomingVideoTrack(pc.get(), stats.get());
+
+ stats->UpdateStats(PeerConnectionInterface::kStatsOutputLevelStandard);
+ StatsReports reports;
+ stats->GetStats(nullptr, &reports);
+
+ // `reports` should contain at least one session report, one track report,
+ // and one ssrc report.
+ EXPECT_LE(3u, reports.size());
+ const StatsReport* track_report =
+ FindNthReportByType(reports, StatsReport::kStatsReportTypeTrack, 1);
+ ASSERT_TRUE(track_report);
+ EXPECT_EQ(stats->GetTimeNow(), track_report->timestamp());
+
+ std::string ssrc_id =
+ ExtractSsrcStatsValue(reports, StatsReport::kStatsValueNameSsrc);
+ EXPECT_EQ(rtc::ToString(kSsrcOfTrack), ssrc_id);
+
+ std::string track_id =
+ ExtractSsrcStatsValue(reports, StatsReport::kStatsValueNameTrackId);
+ EXPECT_EQ(kRemoteTrackId, track_id);
+}
+
+// This test verifies the Ice Candidate report should contain the correct
+// information from local/remote candidates.
+TEST_F(LegacyStatsCollectorTest, IceCandidateReport) {
+ const std::string kTransportName = "transport";
+ const rtc::AdapterType kNetworkType = rtc::ADAPTER_TYPE_ETHERNET;
+ constexpr uint32_t kPriority = 1000;
+
+ constexpr int kLocalPort = 2000;
+ const std::string kLocalIp = "192.168.0.1";
+ const rtc::SocketAddress kLocalAddress(kLocalIp, kLocalPort);
+
+ constexpr int kRemotePort = 2001;
+ const std::string kRemoteIp = "192.168.0.2";
+ const rtc::SocketAddress kRemoteAddress(kRemoteIp, kRemotePort);
+
+ auto pc = CreatePeerConnection();
+ auto stats = CreateStatsCollector(pc.get());
+
+ cricket::Candidate local;
+ EXPECT_GT(local.id().length(), 0u);
+ local.set_type(cricket::LOCAL_PORT_TYPE);
+ local.set_protocol(cricket::UDP_PROTOCOL_NAME);
+ local.set_address(kLocalAddress);
+ local.set_priority(kPriority);
+ local.set_network_type(kNetworkType);
+
+ cricket::Candidate remote;
+ EXPECT_GT(remote.id().length(), 0u);
+ remote.set_type(cricket::PRFLX_PORT_TYPE);
+ remote.set_protocol(cricket::UDP_PROTOCOL_NAME);
+ remote.set_address(kRemoteAddress);
+ remote.set_priority(kPriority);
+ remote.set_network_type(kNetworkType);
+
+ ConnectionInfo connection_info;
+ connection_info.local_candidate = local;
+ connection_info.remote_candidate = remote;
+ TransportChannelStats channel_stats;
+ channel_stats.ice_transport_stats.connection_infos.push_back(connection_info);
+
+ pc->AddVoiceChannel("audio", kTransportName);
+ pc->SetTransportStats(kTransportName, channel_stats);
+
+ stats->UpdateStats(PeerConnectionInterface::kStatsOutputLevelStandard);
+ StatsReports reports;
+ stats->GetStats(nullptr, &reports);
+
+ // Verify the local candidate report is populated correctly.
+ EXPECT_EQ(
+ "Cand-" + local.id(),
+ ExtractStatsValue(StatsReport::kStatsReportTypeCandidatePair, reports,
+ StatsReport::kStatsValueNameLocalCandidateId));
+ EXPECT_EQ(
+ kLocalIp,
+ ExtractStatsValue(StatsReport::kStatsReportTypeIceLocalCandidate, reports,
+ StatsReport::kStatsValueNameCandidateIPAddress));
+ EXPECT_EQ(
+ rtc::ToString(kLocalPort),
+ ExtractStatsValue(StatsReport::kStatsReportTypeIceLocalCandidate, reports,
+ StatsReport::kStatsValueNameCandidatePortNumber));
+ EXPECT_EQ(
+ cricket::UDP_PROTOCOL_NAME,
+ ExtractStatsValue(StatsReport::kStatsReportTypeIceLocalCandidate, reports,
+ StatsReport::kStatsValueNameCandidateTransportType));
+ EXPECT_EQ(
+ rtc::ToString(kPriority),
+ ExtractStatsValue(StatsReport::kStatsReportTypeIceLocalCandidate, reports,
+ StatsReport::kStatsValueNameCandidatePriority));
+ EXPECT_EQ(
+ IceCandidateTypeToStatsType(cricket::LOCAL_PORT_TYPE),
+ ExtractStatsValue(StatsReport::kStatsReportTypeIceLocalCandidate, reports,
+ StatsReport::kStatsValueNameCandidateType));
+ EXPECT_EQ(
+ AdapterTypeToStatsType(kNetworkType),
+ ExtractStatsValue(StatsReport::kStatsReportTypeIceLocalCandidate, reports,
+ StatsReport::kStatsValueNameCandidateNetworkType));
+
+ // Verify the remote candidate report is populated correctly.
+ EXPECT_EQ(
+ "Cand-" + remote.id(),
+ ExtractStatsValue(StatsReport::kStatsReportTypeCandidatePair, reports,
+ StatsReport::kStatsValueNameRemoteCandidateId));
+ EXPECT_EQ(kRemoteIp,
+ ExtractStatsValue(StatsReport::kStatsReportTypeIceRemoteCandidate,
+ reports,
+ StatsReport::kStatsValueNameCandidateIPAddress));
+ EXPECT_EQ(rtc::ToString(kRemotePort),
+ ExtractStatsValue(StatsReport::kStatsReportTypeIceRemoteCandidate,
+ reports,
+ StatsReport::kStatsValueNameCandidatePortNumber));
+ EXPECT_EQ(cricket::UDP_PROTOCOL_NAME,
+ ExtractStatsValue(
+ StatsReport::kStatsReportTypeIceRemoteCandidate, reports,
+ StatsReport::kStatsValueNameCandidateTransportType));
+ EXPECT_EQ(rtc::ToString(kPriority),
+ ExtractStatsValue(StatsReport::kStatsReportTypeIceRemoteCandidate,
+ reports,
+ StatsReport::kStatsValueNameCandidatePriority));
+ EXPECT_EQ(
+ IceCandidateTypeToStatsType(cricket::PRFLX_PORT_TYPE),
+ ExtractStatsValue(StatsReport::kStatsReportTypeIceRemoteCandidate,
+ reports, StatsReport::kStatsValueNameCandidateType));
+ EXPECT_EQ(kNotFound,
+ ExtractStatsValue(
+ StatsReport::kStatsReportTypeIceRemoteCandidate, reports,
+ StatsReport::kStatsValueNameCandidateNetworkType));
+}
+
+// This test verifies that all chained certificates are correctly
+// reported
+TEST_F(LegacyStatsCollectorTest, ChainedCertificateReportsCreated) {
+ // Build local certificate chain.
+ std::vector<std::string> local_ders(5);
+ local_ders[0] = "These";
+ local_ders[1] = "are";
+ local_ders[2] = "some";
+ local_ders[3] = "der";
+ local_ders[4] = "values";
+ rtc::FakeSSLIdentity local_identity(DersToPems(local_ders));
+
+ // Build remote certificate chain
+ std::vector<std::string> remote_ders(4);
+ remote_ders[0] = "A";
+ remote_ders[1] = "non-";
+ remote_ders[2] = "intersecting";
+ remote_ders[3] = "set";
+ rtc::FakeSSLIdentity remote_identity(DersToPems(remote_ders));
+
+ TestCertificateReports(local_identity, local_ders, remote_identity,
+ remote_ders);
+}
+
+// This test verifies that all certificates without chains are correctly
+// reported.
+TEST_F(LegacyStatsCollectorTest, ChainlessCertificateReportsCreated) {
+ // Build local certificate.
+ std::string local_der = "This is the local der.";
+ rtc::FakeSSLIdentity local_identity(DerToPem(local_der));
+
+ // Build remote certificate.
+ std::string remote_der = "This is somebody else's der.";
+ rtc::FakeSSLIdentity remote_identity(DerToPem(remote_der));
+
+ TestCertificateReports(local_identity, std::vector<std::string>(1, local_der),
+ remote_identity,
+ std::vector<std::string>(1, remote_der));
+}
+
+// This test verifies that the stats are generated correctly when no
+// transport is present.
+TEST_F(LegacyStatsCollectorTest, NoTransport) {
+ auto pc = CreatePeerConnection();
+ auto stats = CreateStatsCollector(pc.get());
+
+ // This will cause the fake PeerConnection to generate a TransportStats entry
+ // but with only a single dummy TransportChannelStats.
+ pc->AddVoiceChannel("audio", "transport");
+
+ stats->UpdateStats(PeerConnectionInterface::kStatsOutputLevelStandard);
+ StatsReports reports;
+ stats->GetStats(nullptr, &reports);
+
+ // Check that the local certificate is absent.
+ std::string local_certificate_id =
+ ExtractStatsValue(StatsReport::kStatsReportTypeComponent, reports,
+ StatsReport::kStatsValueNameLocalCertificateId);
+ ASSERT_EQ(kNotFound, local_certificate_id);
+
+ // Check that the remote certificate is absent.
+ std::string remote_certificate_id =
+ ExtractStatsValue(StatsReport::kStatsReportTypeComponent, reports,
+ StatsReport::kStatsValueNameRemoteCertificateId);
+ ASSERT_EQ(kNotFound, remote_certificate_id);
+
+ // Check that the negotiated ciphers are absent.
+ std::string dtls_cipher_suite =
+ ExtractStatsValue(StatsReport::kStatsReportTypeComponent, reports,
+ StatsReport::kStatsValueNameDtlsCipher);
+ ASSERT_EQ(kNotFound, dtls_cipher_suite);
+ std::string srtp_crypto_suite =
+ ExtractStatsValue(StatsReport::kStatsReportTypeComponent, reports,
+ StatsReport::kStatsValueNameSrtpCipher);
+ ASSERT_EQ(kNotFound, srtp_crypto_suite);
+}
+
+// This test verifies that a remote certificate with an unsupported digest
+// algorithm is correctly ignored.
+TEST_F(LegacyStatsCollectorTest, UnsupportedDigestIgnored) {
+ // Build a local certificate.
+ std::string local_der = "This is the local der.";
+ rtc::FakeSSLIdentity local_identity(DerToPem(local_der));
+
+ // Build a remote certificate with an unsupported digest algorithm.
+ std::string remote_der = "This is somebody else's der.";
+ rtc::FakeSSLCertificate remote_cert(DerToPem(remote_der));
+ remote_cert.set_digest_algorithm("foobar");
+ rtc::FakeSSLIdentity remote_identity(remote_cert);
+
+ TestCertificateReports(local_identity, std::vector<std::string>(1, local_der),
+ remote_identity, std::vector<std::string>());
+}
+
+// This test verifies that the audio/video related stats which are -1 initially
+// will be filtered out.
+TEST_P(StatsCollectorTrackTest, FilterOutNegativeInitialValues) {
+ // This test uses streams, but only works for the stream case.
+ if (!GetParam()) {
+ return;
+ }
+
+ auto pc = CreatePeerConnection();
+ auto stats = CreateStatsCollector(pc.get());
+
+ // Create a local stream with a local audio track and adds it to the stats.
+ stream_ = MediaStream::Create("streamid");
+ auto local_track =
+ rtc::make_ref_counted<FakeAudioTrackWithInitValue>(kLocalTrackId);
+ stream_->AddTrack(rtc::scoped_refptr<AudioTrackInterface>(local_track.get()));
+ pc->AddSender(CreateMockSender(local_track, kSsrcOfTrack));
+ if (GetParam()) {
+ stats->AddStream(stream_.get());
+ }
+ stats->AddLocalAudioTrack(local_track.get(), kSsrcOfTrack);
+
+ // Create a remote stream with a remote audio track and adds it to the stats.
+ rtc::scoped_refptr<MediaStream> remote_stream(
+ MediaStream::Create("remotestreamid"));
+ rtc::scoped_refptr<AudioTrackInterface> remote_track =
+ rtc::make_ref_counted<FakeAudioTrackWithInitValue>(kRemoteTrackId);
+ remote_stream->AddTrack(remote_track);
+ pc->AddReceiver(CreateMockReceiver(remote_track, kSsrcOfTrack));
+ if (GetParam()) {
+ stats->AddStream(remote_stream.get());
+ }
+
+ VoiceSenderInfo voice_sender_info;
+ voice_sender_info.add_ssrc(kSsrcOfTrack);
+ // These values are set to -1 initially in audio_send_stream.
+ // The voice_sender_info will read the values from audio_send_stream.
+ voice_sender_info.rtt_ms = -1;
+ voice_sender_info.packets_lost = -1;
+ voice_sender_info.jitter_ms = -1;
+
+ // Some of the contents in `voice_sender_info` needs to be updated from the
+ // `audio_track_`.
+ UpdateVoiceSenderInfoFromAudioTrack(local_track.get(), &voice_sender_info,
+ true);
+
+ VoiceReceiverInfo voice_receiver_info;
+ voice_receiver_info.add_ssrc(kSsrcOfTrack);
+ voice_receiver_info.capture_start_ntp_time_ms = -1;
+ voice_receiver_info.audio_level = -1;
+
+ // Constructs an ssrc stats update.
+ VoiceMediaInfo voice_info;
+ voice_info.senders.push_back(voice_sender_info);
+ voice_info.receivers.push_back(voice_receiver_info);
+
+ auto voice_media_channels = pc->AddVoiceChannel("voice", "transport");
+ voice_media_channels.first->SetStats(voice_info);
+ voice_media_channels.second->SetStats(voice_info);
+
+ stats->UpdateStats(PeerConnectionInterface::kStatsOutputLevelStandard);
+
+ // Get stats for the local track.
+ StatsReports reports;
+ stats->GetStats(local_track.get(), &reports);
+ const StatsReport* report =
+ FindNthReportByType(reports, StatsReport::kStatsReportTypeSsrc, 1);
+ ASSERT_TRUE(report);
+ // The -1 will not be added to the stats report.
+ std::string value_in_report;
+ EXPECT_FALSE(
+ GetValue(report, StatsReport::kStatsValueNameRtt, &value_in_report));
+ EXPECT_FALSE(GetValue(report, StatsReport::kStatsValueNamePacketsLost,
+ &value_in_report));
+ EXPECT_FALSE(GetValue(report, StatsReport::kStatsValueNameJitterReceived,
+ &value_in_report));
+ EXPECT_FALSE(GetValue(report, StatsReport::kStatsValueNameEchoDelayMedian,
+ &value_in_report));
+ EXPECT_FALSE(GetValue(report, StatsReport::kStatsValueNameEchoDelayStdDev,
+ &value_in_report));
+
+ // Get stats for the remote track.
+ reports.clear();
+ stats->GetStats(remote_track.get(), &reports);
+ report = FindNthReportByType(reports, StatsReport::kStatsReportTypeSsrc, 1);
+ ASSERT_TRUE(report);
+ EXPECT_FALSE(GetValue(report,
+ StatsReport::kStatsValueNameCaptureStartNtpTimeMs,
+ &value_in_report));
+ EXPECT_FALSE(GetValue(report, StatsReport::kStatsValueNameAudioInputLevel,
+ &value_in_report));
+}
+
+// This test verifies that a local stats object can get statistics via
+// AudioTrackInterface::GetStats() method.
+TEST_P(StatsCollectorTrackTest, GetStatsFromLocalAudioTrack) {
+ auto pc = CreatePeerConnection();
+ auto stats = CreateStatsCollector(pc.get());
+
+ AddOutgoingAudioTrack(pc.get(), stats.get());
+ stats->AddLocalAudioTrack(audio_track_.get(), kSsrcOfTrack);
+
+ VoiceSenderInfo voice_sender_info;
+ InitVoiceSenderInfo(&voice_sender_info);
+ UpdateVoiceSenderInfoFromAudioTrack(audio_track_.get(), &voice_sender_info,
+ false);
+ VoiceMediaInfo voice_info;
+ voice_info.senders.push_back(voice_sender_info);
+
+ auto voice_media_channels = pc->AddVoiceChannel("audio", "transport");
+ voice_media_channels.first->SetStats(voice_info);
+ voice_media_channels.second->SetStats(voice_info);
+
+ StatsReports reports; // returned values.
+ VerifyAudioTrackStats(audio_track_.get(), stats.get(), voice_info, &reports);
+
+ // Verify that there is no remote report for the local audio track because
+ // we did not set it up.
+ const StatsReport* remote_report =
+ FindNthReportByType(reports, StatsReport::kStatsReportTypeRemoteSsrc, 1);
+ EXPECT_TRUE(remote_report == NULL);
+}
+
+// This test verifies that audio receive streams populate stats reports
+// correctly.
+TEST_P(StatsCollectorTrackTest, GetStatsFromRemoteStream) {
+ auto pc = CreatePeerConnection();
+ auto stats = CreateStatsCollector(pc.get());
+
+ AddIncomingAudioTrack(pc.get(), stats.get());
+
+ VoiceReceiverInfo voice_receiver_info;
+ InitVoiceReceiverInfo(&voice_receiver_info);
+ voice_receiver_info.codec_name = "fake_codec";
+ VoiceMediaInfo voice_info;
+ voice_info.receivers.push_back(voice_receiver_info);
+
+ auto voice_media_channels = pc->AddVoiceChannel("audio", "transport");
+ voice_media_channels.first->SetStats(voice_info);
+ voice_media_channels.second->SetStats(voice_info);
+
+ StatsReports reports; // returned values.
+ VerifyAudioTrackStats(audio_track_.get(), stats.get(), voice_info, &reports);
+}
+
+// This test verifies that a local stats object won't update its statistics
+// after a RemoveLocalAudioTrack() call.
+TEST_P(StatsCollectorTrackTest, GetStatsAfterRemoveAudioStream) {
+ auto pc = CreatePeerConnection();
+ auto stats = CreateStatsCollector(pc.get());
+
+ AddOutgoingAudioTrack(pc.get(), stats.get());
+ stats->AddLocalAudioTrack(audio_track_.get(), kSsrcOfTrack);
+
+ VoiceSenderInfo voice_sender_info;
+ InitVoiceSenderInfo(&voice_sender_info);
+ VoiceMediaInfo voice_info;
+ voice_info.senders.push_back(voice_sender_info);
+
+ auto voice_media_channels = pc->AddVoiceChannel("audio", "transport");
+ voice_media_channels.first->SetStats(voice_info);
+ voice_media_channels.second->SetStats(voice_info);
+
+ stats->RemoveLocalAudioTrack(audio_track_.get(), kSsrcOfTrack);
+
+ stats->UpdateStats(PeerConnectionInterface::kStatsOutputLevelStandard);
+ StatsReports reports;
+ stats->GetStats(nullptr, &reports);
+
+ // The report will exist since we don't remove them in RemoveStream().
+ const StatsReport* report =
+ FindNthReportByType(reports, StatsReport::kStatsReportTypeSsrc, 1);
+ ASSERT_TRUE(report);
+ EXPECT_EQ(stats->GetTimeNow(), report->timestamp());
+ std::string track_id =
+ ExtractSsrcStatsValue(reports, StatsReport::kStatsValueNameTrackId);
+ EXPECT_EQ(kLocalTrackId, track_id);
+ std::string ssrc_id =
+ ExtractSsrcStatsValue(reports, StatsReport::kStatsValueNameSsrc);
+ EXPECT_EQ(rtc::ToString(kSsrcOfTrack), ssrc_id);
+
+ // Verifies the values in the track report, no value will be changed by the
+ // AudioTrackInterface::GetSignalValue() and
+ // AudioProcessorInterface::GetStats();
+ VerifyVoiceSenderInfoReport(report, voice_sender_info);
+}
+
+// This test verifies that when ongoing and incoming audio tracks are using
+// the same ssrc, they populate stats reports correctly.
+TEST_P(StatsCollectorTrackTest, LocalAndRemoteTracksWithSameSsrc) {
+ auto pc = CreatePeerConnection();
+ auto stats = CreateStatsCollector(pc.get());
+
+ // Create a local stream with a local audio track and adds it to the stats.
+ AddOutgoingAudioTrack(pc.get(), stats.get());
+ stats->AddLocalAudioTrack(audio_track_.get(), kSsrcOfTrack);
+
+ // Create a remote stream with a remote audio track and adds it to the stats.
+ rtc::scoped_refptr<MediaStream> remote_stream(
+ MediaStream::Create("remotestreamid"));
+ rtc::scoped_refptr<AudioTrackInterface> remote_track =
+ rtc::make_ref_counted<FakeAudioTrack>(kRemoteTrackId);
+ pc->AddReceiver(CreateMockReceiver(remote_track, kSsrcOfTrack));
+ remote_stream->AddTrack(remote_track);
+ stats->AddStream(remote_stream.get());
+
+ VoiceSenderInfo voice_sender_info;
+ InitVoiceSenderInfo(&voice_sender_info);
+ // Some of the contents in `voice_sender_info` needs to be updated from the
+ // `audio_track_`.
+ UpdateVoiceSenderInfoFromAudioTrack(audio_track_.get(), &voice_sender_info,
+ true);
+
+ VoiceReceiverInfo voice_receiver_info;
+ InitVoiceReceiverInfo(&voice_receiver_info);
+
+ // Constructs an ssrc stats update.
+ VoiceMediaInfo voice_info;
+ voice_info.senders.push_back(voice_sender_info);
+ voice_info.receivers.push_back(voice_receiver_info);
+
+ // Instruct the session to return stats containing the transport channel.
+ auto voice_media_channels = pc->AddVoiceChannel("audio", "transport");
+ voice_media_channels.first->SetStats(voice_info);
+ voice_media_channels.second->SetStats(voice_info);
+
+ stats->UpdateStats(PeerConnectionInterface::kStatsOutputLevelStandard);
+
+ // Get stats for the local track.
+ StatsReports reports; // returned values.
+ stats->GetStats(audio_track_.get(), &reports);
+
+ const StatsReport* track_report =
+ FindNthReportByType(reports, StatsReport::kStatsReportTypeSsrc, 1);
+ ASSERT_TRUE(track_report);
+ EXPECT_EQ(stats->GetTimeNow(), track_report->timestamp());
+ std::string track_id =
+ ExtractSsrcStatsValue(reports, StatsReport::kStatsValueNameTrackId);
+ EXPECT_EQ(kLocalTrackId, track_id);
+ VerifyVoiceSenderInfoReport(track_report, voice_sender_info);
+
+ // Get stats for the remote track.
+ reports.clear();
+ stats->GetStats(remote_track.get(), &reports);
+ track_report =
+ FindNthReportByType(reports, StatsReport::kStatsReportTypeSsrc, 1);
+ ASSERT_TRUE(track_report);
+ EXPECT_EQ(stats->GetTimeNow(), track_report->timestamp());
+ track_id =
+ ExtractSsrcStatsValue(reports, StatsReport::kStatsValueNameTrackId);
+ EXPECT_EQ(kRemoteTrackId, track_id);
+ VerifyVoiceReceiverInfoReport(track_report, voice_receiver_info);
+}
+
+// This test verifies that when two outgoing audio tracks are using the same
+// ssrc at different times, they populate stats reports correctly.
+// TODO(xians): Figure out if it is possible to encapsulate the setup and
+// avoid duplication of code in test cases.
+TEST_P(StatsCollectorTrackTest, TwoLocalTracksWithSameSsrc) {
+ // This test only makes sense when we're using streams.
+ if (!GetParam()) {
+ return;
+ }
+
+ auto pc = CreatePeerConnection();
+ auto stats = CreateStatsCollector(pc.get());
+
+ // Create a local stream with a local audio track and adds it to the stats.
+ auto sender = AddOutgoingAudioTrack(pc.get(), stats.get());
+ stats->AddLocalAudioTrack(audio_track_.get(), kSsrcOfTrack);
+
+ VoiceSenderInfo voice_sender_info;
+ InitVoiceSenderInfo(&voice_sender_info);
+ UpdateVoiceSenderInfoFromAudioTrack(audio_track_.get(), &voice_sender_info,
+ false);
+ voice_sender_info.add_ssrc(kSsrcOfTrack);
+ VoiceMediaInfo voice_info;
+ voice_info.senders.push_back(voice_sender_info);
+
+ auto voice_media_channels = pc->AddVoiceChannel("voice", "transport");
+ voice_media_channels.first->SetStats(voice_info);
+ voice_media_channels.second->SetStats(voice_info);
+
+ StatsReports reports; // returned values.
+ VerifyAudioTrackStats(audio_track_.get(), stats.get(), voice_info, &reports);
+
+ // Remove the previous audio track from the stream.
+ stream_->RemoveTrack(audio_track());
+ stats->RemoveLocalAudioTrack(audio_track_.get(), kSsrcOfTrack);
+ pc->RemoveSender(sender);
+
+ // Create a new audio track and adds it to the stream and stats.
+ static const std::string kNewTrackId = "new_track_id";
+ auto new_audio_track = rtc::make_ref_counted<FakeAudioTrack>(kNewTrackId);
+ pc->AddSender(CreateMockSender(new_audio_track, kSsrcOfTrack));
+ stream_->AddTrack(
+ rtc::scoped_refptr<AudioTrackInterface>(new_audio_track.get()));
+
+ stats->AddLocalAudioTrack(new_audio_track.get(), kSsrcOfTrack);
+ stats->InvalidateCache();
+
+ VoiceSenderInfo new_voice_sender_info;
+ InitVoiceSenderInfo(&new_voice_sender_info);
+ UpdateVoiceSenderInfoFromAudioTrack(new_audio_track.get(),
+ &new_voice_sender_info, false);
+ VoiceMediaInfo new_voice_info;
+ new_voice_info.senders.push_back(new_voice_sender_info);
+ voice_media_channels.first->SetStats(new_voice_info);
+ voice_media_channels.second->SetStats(new_voice_info);
+
+ reports.clear();
+ VerifyAudioTrackStats(new_audio_track.get(), stats.get(), new_voice_info,
+ &reports);
+}
+
+// Test that if there are two local senders with the same track then two SSRC
+// reports will be created, one for each sender, with the same track ID and one
+// track report will be created for the shared track.
+TEST_P(StatsCollectorTrackTest, TwoLocalSendersWithSameTrack) {
+ constexpr uint32_t kFirstSsrc = 22;
+ constexpr uint32_t kSecondSsrc = 33;
+
+ auto pc = CreatePeerConnection();
+ auto stats = CreateStatsCollector(pc.get());
+
+ auto local_track =
+ rtc::make_ref_counted<FakeAudioTrackWithInitValue>(kLocalTrackId);
+ pc->AddSender(CreateMockSender(local_track, kFirstSsrc));
+ stats->AddLocalAudioTrack(local_track.get(), kFirstSsrc);
+ pc->AddSender(CreateMockSender(local_track, kSecondSsrc));
+ stats->AddLocalAudioTrack(local_track.get(), kSecondSsrc);
+
+ VoiceSenderInfo first_sender_info;
+ InitVoiceSenderInfo(&first_sender_info, kFirstSsrc);
+ UpdateVoiceSenderInfoFromAudioTrack(local_track.get(), &first_sender_info,
+ false);
+
+ VoiceSenderInfo second_sender_info;
+ InitVoiceSenderInfo(&second_sender_info, kSecondSsrc);
+ UpdateVoiceSenderInfoFromAudioTrack(local_track.get(), &second_sender_info,
+ false);
+
+ VoiceMediaInfo voice_info;
+ voice_info.senders.push_back(first_sender_info);
+ voice_info.senders.push_back(second_sender_info);
+
+ auto voice_media_channels = pc->AddVoiceChannel("voice", "transport");
+ voice_media_channels.first->SetStats(voice_info);
+ voice_media_channels.second->SetStats(voice_info);
+
+ stats->UpdateStats(PeerConnectionInterface::kStatsOutputLevelStandard);
+
+ StatsReports reports;
+ stats->GetStats(local_track.get(), &reports);
+
+ // Both SSRC reports have the same track ID.
+ EXPECT_EQ(kLocalTrackId, GetValueInNthReportByType(
+ reports, StatsReport::kStatsReportTypeSsrc,
+ StatsReport::kStatsValueNameTrackId, 1));
+ EXPECT_EQ(kLocalTrackId, GetValueInNthReportByType(
+ reports, StatsReport::kStatsReportTypeSsrc,
+ StatsReport::kStatsValueNameTrackId, 2));
+
+ // The SSRC in each SSRC report is different and correspond to the sender
+ // SSRC.
+ std::vector<absl::optional<std::string>> ssrcs = {
+ GetValueInNthReportByType(reports, StatsReport::kStatsReportTypeSsrc,
+ StatsReport::kStatsValueNameSsrc, 1),
+ GetValueInNthReportByType(reports, StatsReport::kStatsReportTypeSsrc,
+ StatsReport::kStatsValueNameSsrc, 2)};
+ EXPECT_THAT(ssrcs, UnorderedElementsAre(rtc::ToString(kFirstSsrc),
+ rtc::ToString(kSecondSsrc)));
+
+ // There is one track report with the same track ID as the SSRC reports.
+ EXPECT_EQ(
+ 1u, GetReportsByType(reports, StatsReport::kStatsReportTypeTrack).size());
+ EXPECT_EQ(kLocalTrackId, GetValueInNthReportByType(
+ reports, StatsReport::kStatsReportTypeTrack,
+ StatsReport::kStatsValueNameTrackId, 1));
+}
+
+// This test verifies that stats are correctly set in video send ssrc stats.
+TEST_P(StatsCollectorTrackTest, VerifyVideoSendSsrcStats) {
+ auto pc = CreatePeerConnection();
+ auto stats = CreateStatsCollector(pc.get());
+
+ AddOutgoingVideoTrack(pc.get(), stats.get());
+
+ VideoSenderInfo video_sender_info;
+ video_sender_info.add_ssrc(1234);
+ video_sender_info.frames_encoded = 10;
+ video_sender_info.qp_sum = 11;
+ VideoMediaInfo video_info;
+ video_info.aggregated_senders.push_back(video_sender_info);
+
+ pc->AddVideoChannel("video", "transport", video_info);
+
+ stats->UpdateStats(PeerConnectionInterface::kStatsOutputLevelStandard);
+ StatsReports reports;
+ stats->GetStats(nullptr, &reports);
+
+ EXPECT_EQ(rtc::ToString(video_sender_info.frames_encoded),
+ ExtractSsrcStatsValue(reports,
+ StatsReport::kStatsValueNameFramesEncoded));
+ EXPECT_EQ(rtc::ToString(*video_sender_info.qp_sum),
+ ExtractSsrcStatsValue(reports, StatsReport::kStatsValueNameQpSum));
+}
+
+// This test verifies that stats are correctly set in video receive ssrc stats.
+TEST_P(StatsCollectorTrackTest, VerifyVideoReceiveSsrcStatsNew) {
+ auto pc = CreatePeerConnection();
+ auto stats = CreateStatsCollector(pc.get());
+
+ AddIncomingVideoTrack(pc.get(), stats.get());
+
+ VideoReceiverInfo video_receiver_info;
+ video_receiver_info.add_ssrc(1234);
+ video_receiver_info.frames_decoded = 10;
+ video_receiver_info.qp_sum = 11;
+ VideoMediaInfo video_info;
+ video_info.receivers.push_back(video_receiver_info);
+
+ pc->AddVideoChannel("video", "transport", video_info);
+
+ stats->UpdateStats(PeerConnectionInterface::kStatsOutputLevelStandard);
+ StatsReports reports;
+ stats->GetStats(nullptr, &reports);
+
+ EXPECT_EQ(rtc::ToString(video_receiver_info.frames_decoded),
+ ExtractSsrcStatsValue(reports,
+ StatsReport::kStatsValueNameFramesDecoded));
+ EXPECT_EQ(rtc::ToString(*video_receiver_info.qp_sum),
+ ExtractSsrcStatsValue(reports, StatsReport::kStatsValueNameQpSum));
+}
+
+INSTANTIATE_TEST_SUITE_P(HasStream, StatsCollectorTrackTest, ::testing::Bool());
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/local_audio_source.cc b/third_party/libwebrtc/pc/local_audio_source.cc
new file mode 100644
index 0000000000..51949f7f4d
--- /dev/null
+++ b/third_party/libwebrtc/pc/local_audio_source.cc
@@ -0,0 +1,31 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/local_audio_source.h"
+
+using webrtc::MediaSourceInterface;
+
+namespace webrtc {
+
+rtc::scoped_refptr<LocalAudioSource> LocalAudioSource::Create(
+ const cricket::AudioOptions* audio_options) {
+ auto source = rtc::make_ref_counted<LocalAudioSource>();
+ source->Initialize(audio_options);
+ return source;
+}
+
+void LocalAudioSource::Initialize(const cricket::AudioOptions* audio_options) {
+ if (!audio_options)
+ return;
+
+ options_ = *audio_options;
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/local_audio_source.h b/third_party/libwebrtc/pc/local_audio_source.h
new file mode 100644
index 0000000000..587ce10809
--- /dev/null
+++ b/third_party/libwebrtc/pc/local_audio_source.h
@@ -0,0 +1,50 @@
+/*
+ * Copyright 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_LOCAL_AUDIO_SOURCE_H_
+#define PC_LOCAL_AUDIO_SOURCE_H_
+
+#include "api/audio_options.h"
+#include "api/media_stream_interface.h"
+#include "api/notifier.h"
+#include "api/scoped_refptr.h"
+
+// LocalAudioSource implements AudioSourceInterface.
+// This contains settings for switching audio processing on and off.
+
+namespace webrtc {
+
+class LocalAudioSource : public Notifier<AudioSourceInterface> {
+ public:
+ // Creates an instance of LocalAudioSource.
+ static rtc::scoped_refptr<LocalAudioSource> Create(
+ const cricket::AudioOptions* audio_options);
+
+ SourceState state() const override { return kLive; }
+ bool remote() const override { return false; }
+
+ const cricket::AudioOptions options() const override { return options_; }
+
+ void AddSink(AudioTrackSinkInterface* sink) override {}
+ void RemoveSink(AudioTrackSinkInterface* sink) override {}
+
+ protected:
+ LocalAudioSource() {}
+ ~LocalAudioSource() override {}
+
+ private:
+ void Initialize(const cricket::AudioOptions* audio_options);
+
+ cricket::AudioOptions options_;
+};
+
+} // namespace webrtc
+
+#endif // PC_LOCAL_AUDIO_SOURCE_H_
diff --git a/third_party/libwebrtc/pc/local_audio_source_unittest.cc b/third_party/libwebrtc/pc/local_audio_source_unittest.cc
new file mode 100644
index 0000000000..76d3b366c3
--- /dev/null
+++ b/third_party/libwebrtc/pc/local_audio_source_unittest.cc
@@ -0,0 +1,30 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/local_audio_source.h"
+
+#include "absl/types/optional.h"
+#include "test/gtest.h"
+
+using webrtc::LocalAudioSource;
+
+TEST(LocalAudioSourceTest, InitWithAudioOptions) {
+ cricket::AudioOptions audio_options;
+ audio_options.highpass_filter = true;
+ rtc::scoped_refptr<LocalAudioSource> source =
+ LocalAudioSource::Create(&audio_options);
+ EXPECT_EQ(true, source->options().highpass_filter);
+}
+
+TEST(LocalAudioSourceTest, InitWithNoOptions) {
+ rtc::scoped_refptr<LocalAudioSource> source =
+ LocalAudioSource::Create(nullptr);
+ EXPECT_EQ(absl::nullopt, source->options().highpass_filter);
+}
diff --git a/third_party/libwebrtc/pc/media_protocol_names.cc b/third_party/libwebrtc/pc/media_protocol_names.cc
new file mode 100644
index 0000000000..52d676daf5
--- /dev/null
+++ b/third_party/libwebrtc/pc/media_protocol_names.cc
@@ -0,0 +1,105 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/media_protocol_names.h"
+
+#include <ctype.h>
+#include <stddef.h>
+
+#include <string>
+
+namespace cricket {
+
+// The official registry of RTP parameters is at
+// http://www.iana.org/assignments/rtp-parameters/rtp-parameters.xml
+// The UDP/DTLS and TCP/DTLS prefixes are not registered there.
+
+// There are multiple variants of the RTP protocol stack, including
+// UDP/TLS/RTP/SAVPF (WebRTC default), RTP/AVP, RTP/AVPF, RTP/SAVPF,
+// TCP/DTLS/RTP/SAVPF and so on. We accept anything that has RTP/
+// embedded in it somewhere as being an RTP protocol.
+const char kMediaProtocolRtpPrefix[] = "RTP/";
+
+// Protocol names generated by WebRTC
+const char kMediaProtocolSctp[] = "SCTP";
+const char kMediaProtocolUdpDtlsSctp[] = "UDP/DTLS/SCTP";
+const char kMediaProtocolDtlsSctp[] = "DTLS/SCTP";
+const char kMediaProtocolTcpDtlsSctp[] = "TCP/DTLS/SCTP";
+// RFC5124
+const char kMediaProtocolDtlsSavpf[] = "UDP/TLS/RTP/SAVPF";
+const char kMediaProtocolSavpf[] = "RTP/SAVPF";
+const char kMediaProtocolAvpf[] = "RTP/AVPF";
+
+namespace {
+
+// Protocol names that we tolerate, but do not generate.
+// We always generate offers with "UDP/TLS/RTP/SAVPF" when using DTLS-SRTP,
+// but we tolerate "RTP/SAVPF" and "RTP/SAVP" and the "UDP/TLS" and "TCP/TLS"
+// prefixes in offers we receive, for compatibility.
+// RFC4585
+const char kMediaProtocolSavp[] = "RTP/SAVP";
+const char kMediaProtocolAvp[] = "RTP/AVP";
+
+const char kMediaProtocolTcpTlsSavpf[] = "TCP/TLS/RTP/SAVPF";
+const char kMediaProtocolUdpTlsSavpf[] = "UDP/TLS/RTP/SAVPF";
+const char kMediaProtocolTcpTlsSavp[] = "TCP/TLS/RTP/SAVP";
+const char kMediaProtocolUdpTlsSavp[] = "UDP/TLS/RTP/SAVP";
+
+} // namespace
+
+bool IsDtlsSctp(absl::string_view protocol) {
+ return protocol == kMediaProtocolDtlsSctp ||
+ protocol == kMediaProtocolUdpDtlsSctp ||
+ protocol == kMediaProtocolTcpDtlsSctp;
+}
+
+bool IsPlainSctp(absl::string_view protocol) {
+ return protocol == kMediaProtocolSctp;
+}
+
+bool IsSctpProtocol(absl::string_view protocol) {
+ return IsPlainSctp(protocol) || IsDtlsSctp(protocol);
+}
+
+bool IsRtpProtocol(absl::string_view protocol) {
+ if (protocol.empty()) {
+ return true;
+ }
+ size_t pos = protocol.find(cricket::kMediaProtocolRtpPrefix);
+ if (pos == std::string::npos) {
+ return false;
+ }
+ // RTP must be at the beginning of a string or not preceded by alpha
+ if (pos == 0 || !isalpha(static_cast<unsigned char>(protocol[pos - 1]))) {
+ return true;
+ }
+ return false;
+}
+
+// Note that the below functions support some protocol strings purely for
+// legacy compatibility, as required by JSEP in Section 5.1.2, Profile Names
+// and Interoperability.
+
+bool IsDtlsRtp(absl::string_view protocol) {
+ // Most-likely values first.
+ return protocol == kMediaProtocolDtlsSavpf ||
+ protocol == kMediaProtocolTcpTlsSavpf ||
+ protocol == kMediaProtocolUdpTlsSavpf ||
+ protocol == kMediaProtocolUdpTlsSavp ||
+ protocol == kMediaProtocolTcpTlsSavp;
+}
+
+bool IsPlainRtp(absl::string_view protocol) {
+ // Most-likely values first.
+ return protocol == kMediaProtocolSavpf || protocol == kMediaProtocolAvpf ||
+ protocol == kMediaProtocolSavp || protocol == kMediaProtocolAvp;
+}
+
+} // namespace cricket
diff --git a/third_party/libwebrtc/pc/media_protocol_names.h b/third_party/libwebrtc/pc/media_protocol_names.h
new file mode 100644
index 0000000000..989c1dab6c
--- /dev/null
+++ b/third_party/libwebrtc/pc/media_protocol_names.h
@@ -0,0 +1,47 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_MEDIA_PROTOCOL_NAMES_H_
+#define PC_MEDIA_PROTOCOL_NAMES_H_
+
+#include "absl/strings/string_view.h"
+
+namespace cricket {
+
+// Names or name prefixes of protocols as defined by SDP specifications,
+// and generated in SDP produced by WebRTC.
+extern const char kMediaProtocolSctp[];
+extern const char kMediaProtocolUdpDtlsSctp[];
+extern const char kMediaProtocolDtlsSavpf[];
+extern const char kMediaProtocolSavpf[];
+extern const char kMediaProtocolAvpf[];
+
+// Exported for testing only
+extern const char kMediaProtocolTcpDtlsSctp[];
+extern const char kMediaProtocolDtlsSctp[];
+
+// Returns true if the given media section protocol indicates use of RTP.
+bool IsRtpProtocol(absl::string_view protocol);
+// Returns true if the given media section protocol indicates use of SCTP.
+bool IsSctpProtocol(absl::string_view protocol);
+
+// Returns true if the given media protocol is unencrypted SCTP
+bool IsPlainSctp(absl::string_view protocol);
+// Returns true if the given media protocol is encrypted SCTP
+bool IsDtlsSctp(absl::string_view protocol);
+
+// Returns true if the given media protocol is unencrypted RTP
+bool IsPlainRtp(absl::string_view protocol);
+// Returns true if the given media protocol is encrypted RTP
+bool IsDtlsRtp(absl::string_view protocol);
+
+} // namespace cricket
+
+#endif // PC_MEDIA_PROTOCOL_NAMES_H_
diff --git a/third_party/libwebrtc/pc/media_session.cc b/third_party/libwebrtc/pc/media_session.cc
new file mode 100644
index 0000000000..a763919c16
--- /dev/null
+++ b/third_party/libwebrtc/pc/media_session.cc
@@ -0,0 +1,3145 @@
+/*
+ * Copyright 2004 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/media_session.h"
+
+#include <stddef.h>
+
+#include <algorithm>
+#include <map>
+#include <string>
+#include <unordered_map>
+#include <utility>
+
+#include "absl/algorithm/container.h"
+#include "absl/strings/match.h"
+#include "absl/strings/string_view.h"
+#include "absl/types/optional.h"
+#include "api/crypto_params.h"
+#include "media/base/codec.h"
+#include "media/base/media_constants.h"
+#include "media/base/media_engine.h"
+#include "media/base/sdp_video_format_utils.h"
+#include "media/sctp/sctp_transport_internal.h"
+#include "p2p/base/p2p_constants.h"
+#include "pc/media_protocol_names.h"
+#include "pc/rtp_media_utils.h"
+#include "pc/used_ids.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/helpers.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/ssl_stream_adapter.h"
+#include "rtc_base/string_encode.h"
+#include "rtc_base/third_party/base64/base64.h"
+#include "rtc_base/unique_id_generator.h"
+
+namespace {
+
+using rtc::UniqueRandomIdGenerator;
+using webrtc::RTCError;
+using webrtc::RTCErrorType;
+using webrtc::RtpTransceiverDirection;
+
+const char kInline[] = "inline:";
+
+void GetSupportedSdesCryptoSuiteNames(
+ void (*func)(const webrtc::CryptoOptions&, std::vector<int>*),
+ const webrtc::CryptoOptions& crypto_options,
+ std::vector<std::string>* names) {
+ std::vector<int> crypto_suites;
+ func(crypto_options, &crypto_suites);
+ for (const auto crypto : crypto_suites) {
+ names->push_back(rtc::SrtpCryptoSuiteToName(crypto));
+ }
+}
+
+webrtc::RtpExtension RtpExtensionFromCapability(
+ const webrtc::RtpHeaderExtensionCapability& capability) {
+ return webrtc::RtpExtension(capability.uri,
+ capability.preferred_id.value_or(1));
+}
+
+cricket::RtpHeaderExtensions RtpHeaderExtensionsFromCapabilities(
+ const std::vector<webrtc::RtpHeaderExtensionCapability>& capabilities) {
+ cricket::RtpHeaderExtensions exts;
+ for (const auto& capability : capabilities) {
+ exts.push_back(RtpExtensionFromCapability(capability));
+ }
+ return exts;
+}
+
+std::vector<webrtc::RtpHeaderExtensionCapability>
+UnstoppedRtpHeaderExtensionCapabilities(
+ std::vector<webrtc::RtpHeaderExtensionCapability> capabilities) {
+ capabilities.erase(
+ std::remove_if(
+ capabilities.begin(), capabilities.end(),
+ [](const webrtc::RtpHeaderExtensionCapability& capability) {
+ return capability.direction == RtpTransceiverDirection::kStopped;
+ }),
+ capabilities.end());
+ return capabilities;
+}
+
+bool IsCapabilityPresent(const webrtc::RtpHeaderExtensionCapability& capability,
+ const cricket::RtpHeaderExtensions& extensions) {
+ return std::find_if(extensions.begin(), extensions.end(),
+ [&capability](const webrtc::RtpExtension& extension) {
+ return capability.uri == extension.uri;
+ }) != extensions.end();
+}
+
+cricket::RtpHeaderExtensions UnstoppedOrPresentRtpHeaderExtensions(
+ const std::vector<webrtc::RtpHeaderExtensionCapability>& capabilities,
+ const cricket::RtpHeaderExtensions& unencrypted,
+ const cricket::RtpHeaderExtensions& encrypted) {
+ cricket::RtpHeaderExtensions extensions;
+ for (const auto& capability : capabilities) {
+ if (capability.direction != RtpTransceiverDirection::kStopped ||
+ IsCapabilityPresent(capability, unencrypted) ||
+ IsCapabilityPresent(capability, encrypted)) {
+ extensions.push_back(RtpExtensionFromCapability(capability));
+ }
+ }
+ return extensions;
+}
+
+} // namespace
+
+namespace cricket {
+
+namespace {
+
+bool IsRtxCodec(const Codec& codec) {
+ return absl::EqualsIgnoreCase(codec.name, kRtxCodecName);
+}
+
+bool IsRtxCodec(const webrtc::RtpCodecCapability& capability) {
+ return absl::EqualsIgnoreCase(capability.name, kRtxCodecName);
+}
+
+bool ContainsRtxCodec(const std::vector<Codec>& codecs) {
+ for (const auto& codec : codecs) {
+ if (IsRtxCodec(codec)) {
+ return true;
+ }
+ }
+ return false;
+}
+
+bool IsRedCodec(const Codec& codec) {
+ return absl::EqualsIgnoreCase(codec.name, kRedCodecName);
+}
+
+bool IsRedCodec(const webrtc::RtpCodecCapability& capability) {
+ return absl::EqualsIgnoreCase(capability.name, kRedCodecName);
+}
+
+bool IsFlexfecCodec(const Codec& codec) {
+ return absl::EqualsIgnoreCase(codec.name, kFlexfecCodecName);
+}
+
+bool ContainsFlexfecCodec(const std::vector<Codec>& codecs) {
+ for (const auto& codec : codecs) {
+ if (IsFlexfecCodec(codec)) {
+ return true;
+ }
+ }
+ return false;
+}
+
+bool IsUlpfecCodec(const Codec& codec) {
+ return absl::EqualsIgnoreCase(codec.name, kUlpfecCodecName);
+}
+
+bool IsComfortNoiseCodec(const Codec& codec) {
+ return absl::EqualsIgnoreCase(codec.name, kComfortNoiseCodecName);
+}
+
+RtpTransceiverDirection NegotiateRtpTransceiverDirection(
+ RtpTransceiverDirection offer,
+ RtpTransceiverDirection wants) {
+ bool offer_send = webrtc::RtpTransceiverDirectionHasSend(offer);
+ bool offer_recv = webrtc::RtpTransceiverDirectionHasRecv(offer);
+ bool wants_send = webrtc::RtpTransceiverDirectionHasSend(wants);
+ bool wants_recv = webrtc::RtpTransceiverDirectionHasRecv(wants);
+ return webrtc::RtpTransceiverDirectionFromSendRecv(offer_recv && wants_send,
+ offer_send && wants_recv);
+}
+
+bool IsMediaContentOfType(const ContentInfo* content, MediaType media_type) {
+ if (!content || !content->media_description()) {
+ return false;
+ }
+ return content->media_description()->type() == media_type;
+}
+
+bool CreateCryptoParams(int tag,
+ const std::string& cipher,
+ CryptoParams* crypto_out) {
+ int key_len;
+ int salt_len;
+ if (!rtc::GetSrtpKeyAndSaltLengths(rtc::SrtpCryptoSuiteFromName(cipher),
+ &key_len, &salt_len)) {
+ return false;
+ }
+
+ int master_key_len = key_len + salt_len;
+ std::string master_key;
+ if (!rtc::CreateRandomData(master_key_len, &master_key)) {
+ return false;
+ }
+
+ RTC_CHECK_EQ(master_key_len, master_key.size());
+ std::string key = rtc::Base64::Encode(master_key);
+
+ crypto_out->tag = tag;
+ crypto_out->crypto_suite = cipher;
+ crypto_out->key_params = kInline;
+ crypto_out->key_params += key;
+ return true;
+}
+
+bool AddCryptoParams(const std::string& crypto_suite,
+ CryptoParamsVec* cryptos_out) {
+ int size = static_cast<int>(cryptos_out->size());
+
+ cryptos_out->resize(size + 1);
+ return CreateCryptoParams(size, crypto_suite, &cryptos_out->at(size));
+}
+
+void AddMediaCryptos(const CryptoParamsVec& cryptos,
+ MediaContentDescription* media) {
+ for (const CryptoParams& crypto : cryptos) {
+ media->AddCrypto(crypto);
+ }
+}
+
+bool CreateMediaCryptos(const std::vector<std::string>& crypto_suites,
+ MediaContentDescription* media) {
+ CryptoParamsVec cryptos;
+ for (const std::string& crypto_suite : crypto_suites) {
+ if (!AddCryptoParams(crypto_suite, &cryptos)) {
+ return false;
+ }
+ }
+ AddMediaCryptos(cryptos, media);
+ return true;
+}
+
+const CryptoParamsVec* GetCryptos(const ContentInfo* content) {
+ if (!content || !content->media_description()) {
+ return nullptr;
+ }
+ return &content->media_description()->cryptos();
+}
+
+bool FindMatchingCrypto(const CryptoParamsVec& cryptos,
+ const CryptoParams& crypto,
+ CryptoParams* crypto_out) {
+ auto it = absl::c_find_if(
+ cryptos, [&crypto](const CryptoParams& c) { return crypto.Matches(c); });
+ if (it == cryptos.end()) {
+ return false;
+ }
+ *crypto_out = *it;
+ return true;
+}
+
+// For audio, HMAC 32 (if enabled) is prefered over HMAC 80 because of the
+// low overhead.
+void GetSupportedAudioSdesCryptoSuites(
+ const webrtc::CryptoOptions& crypto_options,
+ std::vector<int>* crypto_suites) {
+ if (crypto_options.srtp.enable_aes128_sha1_32_crypto_cipher) {
+ crypto_suites->push_back(rtc::kSrtpAes128CmSha1_32);
+ }
+ crypto_suites->push_back(rtc::kSrtpAes128CmSha1_80);
+ if (crypto_options.srtp.enable_gcm_crypto_suites) {
+ crypto_suites->push_back(rtc::kSrtpAeadAes256Gcm);
+ crypto_suites->push_back(rtc::kSrtpAeadAes128Gcm);
+ }
+}
+
+void GetSupportedAudioSdesCryptoSuiteNames(
+ const webrtc::CryptoOptions& crypto_options,
+ std::vector<std::string>* crypto_suite_names) {
+ GetSupportedSdesCryptoSuiteNames(GetSupportedAudioSdesCryptoSuites,
+ crypto_options, crypto_suite_names);
+}
+
+void GetSupportedVideoSdesCryptoSuites(
+ const webrtc::CryptoOptions& crypto_options,
+ std::vector<int>* crypto_suites) {
+ crypto_suites->push_back(rtc::kSrtpAes128CmSha1_80);
+ if (crypto_options.srtp.enable_gcm_crypto_suites) {
+ crypto_suites->push_back(rtc::kSrtpAeadAes256Gcm);
+ crypto_suites->push_back(rtc::kSrtpAeadAes128Gcm);
+ }
+}
+
+void GetSupportedVideoSdesCryptoSuiteNames(
+ const webrtc::CryptoOptions& crypto_options,
+ std::vector<std::string>* crypto_suite_names) {
+ GetSupportedSdesCryptoSuiteNames(GetSupportedVideoSdesCryptoSuites,
+ crypto_options, crypto_suite_names);
+}
+
+// Support any GCM cipher (if enabled through options). For video support only
+// 80-bit SHA1 HMAC. For audio 32-bit HMAC is tolerated (if enabled) unless
+// bundle is enabled because it is low overhead.
+// Pick the crypto in the list that is supported.
+bool SelectCrypto(const MediaContentDescription* offer,
+ bool bundle,
+ const webrtc::CryptoOptions& crypto_options,
+ CryptoParams* crypto_out) {
+ bool audio = offer->type() == MEDIA_TYPE_AUDIO;
+ const CryptoParamsVec& cryptos = offer->cryptos();
+
+ for (const CryptoParams& crypto : cryptos) {
+ if ((crypto_options.srtp.enable_gcm_crypto_suites &&
+ rtc::IsGcmCryptoSuiteName(crypto.crypto_suite)) ||
+ rtc::kCsAesCm128HmacSha1_80 == crypto.crypto_suite ||
+ (rtc::kCsAesCm128HmacSha1_32 == crypto.crypto_suite && audio &&
+ !bundle && crypto_options.srtp.enable_aes128_sha1_32_crypto_cipher)) {
+ return CreateCryptoParams(crypto.tag, crypto.crypto_suite, crypto_out);
+ }
+ }
+ return false;
+}
+
+// Finds all StreamParams of all media types and attach them to stream_params.
+StreamParamsVec GetCurrentStreamParams(
+ const std::vector<const ContentInfo*>& active_local_contents) {
+ StreamParamsVec stream_params;
+ for (const ContentInfo* content : active_local_contents) {
+ for (const StreamParams& params : content->media_description()->streams()) {
+ stream_params.push_back(params);
+ }
+ }
+ return stream_params;
+}
+
+StreamParams CreateStreamParamsForNewSenderWithSsrcs(
+ const SenderOptions& sender,
+ const std::string& rtcp_cname,
+ bool include_rtx_streams,
+ bool include_flexfec_stream,
+ UniqueRandomIdGenerator* ssrc_generator,
+ const webrtc::FieldTrialsView& field_trials) {
+ StreamParams result;
+ result.id = sender.track_id;
+
+ // TODO(brandtr): Update when we support multistream protection.
+ if (include_flexfec_stream && sender.num_sim_layers > 1) {
+ include_flexfec_stream = false;
+ RTC_LOG(LS_WARNING)
+ << "Our FlexFEC implementation only supports protecting "
+ "a single media streams. This session has multiple "
+ "media streams however, so no FlexFEC SSRC will be generated.";
+ }
+ if (include_flexfec_stream && !field_trials.IsEnabled("WebRTC-FlexFEC-03")) {
+ include_flexfec_stream = false;
+ RTC_LOG(LS_WARNING)
+ << "WebRTC-FlexFEC trial is not enabled, not sending FlexFEC";
+ }
+
+ result.GenerateSsrcs(sender.num_sim_layers, include_rtx_streams,
+ include_flexfec_stream, ssrc_generator);
+
+ result.cname = rtcp_cname;
+ result.set_stream_ids(sender.stream_ids);
+
+ return result;
+}
+
+bool ValidateSimulcastLayers(const std::vector<RidDescription>& rids,
+ const SimulcastLayerList& simulcast_layers) {
+ return absl::c_all_of(
+ simulcast_layers.GetAllLayers(), [&rids](const SimulcastLayer& layer) {
+ return absl::c_any_of(rids, [&layer](const RidDescription& rid) {
+ return rid.rid == layer.rid;
+ });
+ });
+}
+
+StreamParams CreateStreamParamsForNewSenderWithRids(
+ const SenderOptions& sender,
+ const std::string& rtcp_cname) {
+ RTC_DCHECK(!sender.rids.empty());
+ RTC_DCHECK_EQ(sender.num_sim_layers, 0)
+ << "RIDs are the compliant way to indicate simulcast.";
+ RTC_DCHECK(ValidateSimulcastLayers(sender.rids, sender.simulcast_layers));
+ StreamParams result;
+ result.id = sender.track_id;
+ result.cname = rtcp_cname;
+ result.set_stream_ids(sender.stream_ids);
+
+ // More than one rid should be signaled.
+ if (sender.rids.size() > 1) {
+ result.set_rids(sender.rids);
+ }
+
+ return result;
+}
+
+// Adds SimulcastDescription if indicated by the media description options.
+// MediaContentDescription should already be set up with the send rids.
+void AddSimulcastToMediaDescription(
+ const MediaDescriptionOptions& media_description_options,
+ MediaContentDescription* description) {
+ RTC_DCHECK(description);
+
+ // Check if we are using RIDs in this scenario.
+ if (absl::c_all_of(description->streams(), [](const StreamParams& params) {
+ return !params.has_rids();
+ })) {
+ return;
+ }
+
+ RTC_DCHECK_EQ(1, description->streams().size())
+ << "RIDs are only supported in Unified Plan semantics.";
+ RTC_DCHECK_EQ(1, media_description_options.sender_options.size());
+ RTC_DCHECK(description->type() == MediaType::MEDIA_TYPE_AUDIO ||
+ description->type() == MediaType::MEDIA_TYPE_VIDEO);
+
+ // One RID or less indicates that simulcast is not needed.
+ if (description->streams()[0].rids().size() <= 1) {
+ return;
+ }
+
+ // Only negotiate the send layers.
+ SimulcastDescription simulcast;
+ simulcast.send_layers() =
+ media_description_options.sender_options[0].simulcast_layers;
+ description->set_simulcast_description(simulcast);
+}
+
+// Adds a StreamParams for each SenderOptions in `sender_options` to
+// content_description.
+// `current_params` - All currently known StreamParams of any media type.
+bool AddStreamParams(const std::vector<SenderOptions>& sender_options,
+ const std::string& rtcp_cname,
+ UniqueRandomIdGenerator* ssrc_generator,
+ StreamParamsVec* current_streams,
+ MediaContentDescription* content_description,
+ const webrtc::FieldTrialsView& field_trials) {
+ // SCTP streams are not negotiated using SDP/ContentDescriptions.
+ if (IsSctpProtocol(content_description->protocol())) {
+ return true;
+ }
+
+ const bool include_rtx_streams =
+ ContainsRtxCodec(content_description->codecs());
+
+ const bool include_flexfec_stream =
+ ContainsFlexfecCodec(content_description->codecs());
+
+ for (const SenderOptions& sender : sender_options) {
+ StreamParams* param = GetStreamByIds(*current_streams, sender.track_id);
+ if (!param) {
+ // This is a new sender.
+ StreamParams stream_param =
+ sender.rids.empty()
+ ?
+ // Signal SSRCs and legacy simulcast (if requested).
+ CreateStreamParamsForNewSenderWithSsrcs(
+ sender, rtcp_cname, include_rtx_streams,
+ include_flexfec_stream, ssrc_generator, field_trials)
+ :
+ // Signal RIDs and spec-compliant simulcast (if requested).
+ CreateStreamParamsForNewSenderWithRids(sender, rtcp_cname);
+
+ content_description->AddStream(stream_param);
+
+ // Store the new StreamParams in current_streams.
+ // This is necessary so that we can use the CNAME for other media types.
+ current_streams->push_back(stream_param);
+ } else {
+ // Use existing generated SSRCs/groups, but update the sync_label if
+ // necessary. This may be needed if a MediaStreamTrack was moved from one
+ // MediaStream to another.
+ param->set_stream_ids(sender.stream_ids);
+ content_description->AddStream(*param);
+ }
+ }
+ return true;
+}
+
+// Updates the transport infos of the `sdesc` according to the given
+// `bundle_group`. The transport infos of the content names within the
+// `bundle_group` should be updated to use the ufrag, pwd and DTLS role of the
+// first content within the `bundle_group`.
+bool UpdateTransportInfoForBundle(const ContentGroup& bundle_group,
+ SessionDescription* sdesc) {
+ // The bundle should not be empty.
+ if (!sdesc || !bundle_group.FirstContentName()) {
+ return false;
+ }
+
+ // We should definitely have a transport for the first content.
+ const std::string& selected_content_name = *bundle_group.FirstContentName();
+ const TransportInfo* selected_transport_info =
+ sdesc->GetTransportInfoByName(selected_content_name);
+ if (!selected_transport_info) {
+ return false;
+ }
+
+ // Set the other contents to use the same ICE credentials.
+ const std::string& selected_ufrag =
+ selected_transport_info->description.ice_ufrag;
+ const std::string& selected_pwd =
+ selected_transport_info->description.ice_pwd;
+ ConnectionRole selected_connection_role =
+ selected_transport_info->description.connection_role;
+ for (TransportInfo& transport_info : sdesc->transport_infos()) {
+ if (bundle_group.HasContentName(transport_info.content_name) &&
+ transport_info.content_name != selected_content_name) {
+ transport_info.description.ice_ufrag = selected_ufrag;
+ transport_info.description.ice_pwd = selected_pwd;
+ transport_info.description.connection_role = selected_connection_role;
+ }
+ }
+ return true;
+}
+
+// Gets the CryptoParamsVec of the given `content_name` from `sdesc`, and
+// sets it to `cryptos`.
+bool GetCryptosByName(const SessionDescription* sdesc,
+ const std::string& content_name,
+ CryptoParamsVec* cryptos) {
+ if (!sdesc || !cryptos) {
+ return false;
+ }
+ const ContentInfo* content = sdesc->GetContentByName(content_name);
+ if (!content || !content->media_description()) {
+ return false;
+ }
+ *cryptos = content->media_description()->cryptos();
+ return true;
+}
+
+// Prunes the `target_cryptos` by removing the crypto params (crypto_suite)
+// which are not available in `filter`.
+void PruneCryptos(const CryptoParamsVec& filter,
+ CryptoParamsVec* target_cryptos) {
+ if (!target_cryptos) {
+ return;
+ }
+
+ target_cryptos->erase(
+ std::remove_if(target_cryptos->begin(), target_cryptos->end(),
+ // Returns true if the `crypto`'s crypto_suite is not
+ // found in `filter`.
+ [&filter](const CryptoParams& crypto) {
+ for (const CryptoParams& entry : filter) {
+ if (entry.crypto_suite == crypto.crypto_suite)
+ return false;
+ }
+ return true;
+ }),
+ target_cryptos->end());
+}
+
+bool IsRtpContent(SessionDescription* sdesc, const std::string& content_name) {
+ bool is_rtp = false;
+ ContentInfo* content = sdesc->GetContentByName(content_name);
+ if (content && content->media_description()) {
+ is_rtp = IsRtpProtocol(content->media_description()->protocol());
+ }
+ return is_rtp;
+}
+
+// Updates the crypto parameters of the `sdesc` according to the given
+// `bundle_group`. The crypto parameters of all the contents within the
+// `bundle_group` should be updated to use the common subset of the
+// available cryptos.
+bool UpdateCryptoParamsForBundle(const ContentGroup& bundle_group,
+ SessionDescription* sdesc) {
+ // The bundle should not be empty.
+ if (!sdesc || !bundle_group.FirstContentName()) {
+ return false;
+ }
+
+ bool common_cryptos_needed = false;
+ // Get the common cryptos.
+ const ContentNames& content_names = bundle_group.content_names();
+ CryptoParamsVec common_cryptos;
+ bool first = true;
+ for (const std::string& content_name : content_names) {
+ if (!IsRtpContent(sdesc, content_name)) {
+ continue;
+ }
+ // The common cryptos are needed if any of the content does not have DTLS
+ // enabled.
+ if (!sdesc->GetTransportInfoByName(content_name)->description.secure()) {
+ common_cryptos_needed = true;
+ }
+ if (first) {
+ first = false;
+ // Initial the common_cryptos with the first content in the bundle group.
+ if (!GetCryptosByName(sdesc, content_name, &common_cryptos)) {
+ return false;
+ }
+ if (common_cryptos.empty()) {
+ // If there's no crypto params, we should just return.
+ return true;
+ }
+ } else {
+ CryptoParamsVec cryptos;
+ if (!GetCryptosByName(sdesc, content_name, &cryptos)) {
+ return false;
+ }
+ PruneCryptos(cryptos, &common_cryptos);
+ }
+ }
+
+ if (common_cryptos.empty() && common_cryptos_needed) {
+ return false;
+ }
+
+ // Update to use the common cryptos.
+ for (const std::string& content_name : content_names) {
+ if (!IsRtpContent(sdesc, content_name)) {
+ continue;
+ }
+ ContentInfo* content = sdesc->GetContentByName(content_name);
+ if (IsMediaContent(content)) {
+ MediaContentDescription* media_desc = content->media_description();
+ if (!media_desc) {
+ return false;
+ }
+ media_desc->set_cryptos(common_cryptos);
+ }
+ }
+ return true;
+}
+
+std::vector<const ContentInfo*> GetActiveContents(
+ const SessionDescription& description,
+ const MediaSessionOptions& session_options) {
+ std::vector<const ContentInfo*> active_contents;
+ for (size_t i = 0; i < description.contents().size(); ++i) {
+ RTC_DCHECK_LT(i, session_options.media_description_options.size());
+ const ContentInfo& content = description.contents()[i];
+ const MediaDescriptionOptions& media_options =
+ session_options.media_description_options[i];
+ if (!content.rejected && !media_options.stopped &&
+ content.name == media_options.mid) {
+ active_contents.push_back(&content);
+ }
+ }
+ return active_contents;
+}
+
+// Create a media content to be offered for the given `sender_options`,
+// according to the given options.rtcp_mux, session_options.is_muc, codecs,
+// secure_transport, crypto, and current_streams. If we don't currently have
+// crypto (in current_cryptos) and it is enabled (in secure_policy), crypto is
+// created (according to crypto_suites). The created content is added to the
+// offer.
+RTCError CreateContentOffer(
+ const MediaDescriptionOptions& media_description_options,
+ const MediaSessionOptions& session_options,
+ const SecurePolicy& secure_policy,
+ const CryptoParamsVec* current_cryptos,
+ const std::vector<std::string>& crypto_suites,
+ const RtpHeaderExtensions& rtp_extensions,
+ UniqueRandomIdGenerator* ssrc_generator,
+ StreamParamsVec* current_streams,
+ MediaContentDescription* offer) {
+ offer->set_rtcp_mux(session_options.rtcp_mux_enabled);
+ if (offer->type() == cricket::MEDIA_TYPE_VIDEO) {
+ offer->set_rtcp_reduced_size(true);
+ }
+
+ // Build the vector of header extensions with directions for this
+ // media_description's options.
+ RtpHeaderExtensions extensions;
+ for (auto extension_with_id : rtp_extensions) {
+ for (const auto& extension : media_description_options.header_extensions) {
+ if (extension_with_id.uri == extension.uri) {
+ // TODO(crbug.com/1051821): Configure the extension direction from
+ // the information in the media_description_options extension
+ // capability.
+ if (extension.direction != RtpTransceiverDirection::kStopped) {
+ extensions.push_back(extension_with_id);
+ }
+ }
+ }
+ }
+ offer->set_rtp_header_extensions(extensions);
+
+ AddSimulcastToMediaDescription(media_description_options, offer);
+
+ if (secure_policy != SEC_DISABLED) {
+ if (current_cryptos) {
+ AddMediaCryptos(*current_cryptos, offer);
+ }
+ if (offer->cryptos().empty()) {
+ if (!CreateMediaCryptos(crypto_suites, offer)) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR,
+ "Failed to create crypto parameters");
+ }
+ }
+ }
+
+ if (secure_policy == SEC_REQUIRED && offer->cryptos().empty()) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR,
+ "Failed to create crypto parameters");
+ }
+ return RTCError::OK();
+}
+
+RTCError CreateMediaContentOffer(
+ const MediaDescriptionOptions& media_description_options,
+ const MediaSessionOptions& session_options,
+ const std::vector<Codec>& codecs,
+ const SecurePolicy& secure_policy,
+ const CryptoParamsVec* current_cryptos,
+ const std::vector<std::string>& crypto_suites,
+ const RtpHeaderExtensions& rtp_extensions,
+ UniqueRandomIdGenerator* ssrc_generator,
+ StreamParamsVec* current_streams,
+ MediaContentDescription* offer,
+ const webrtc::FieldTrialsView& field_trials) {
+ offer->AddCodecs(codecs);
+ if (!AddStreamParams(media_description_options.sender_options,
+ session_options.rtcp_cname, ssrc_generator,
+ current_streams, offer, field_trials)) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR,
+ "Failed to add stream parameters");
+ }
+
+ return CreateContentOffer(media_description_options, session_options,
+ secure_policy, current_cryptos, crypto_suites,
+ rtp_extensions, ssrc_generator, current_streams,
+ offer);
+}
+
+bool ReferencedCodecsMatch(const std::vector<Codec>& codecs1,
+ const int codec1_id,
+ const std::vector<Codec>& codecs2,
+ const int codec2_id,
+ const webrtc::FieldTrialsView* field_trials) {
+ const Codec* codec1 = FindCodecById(codecs1, codec1_id);
+ const Codec* codec2 = FindCodecById(codecs2, codec2_id);
+ return codec1 != nullptr && codec2 != nullptr &&
+ codec1->Matches(*codec2, field_trials);
+}
+
+void NegotiatePacketization(const Codec& local_codec,
+ const Codec& remote_codec,
+ Codec* negotiated_codec) {
+ negotiated_codec->packetization =
+ (local_codec.packetization == remote_codec.packetization)
+ ? local_codec.packetization
+ : absl::nullopt;
+}
+
+// Finds a codec in `codecs2` that matches `codec_to_match`, which is
+// a member of `codecs1`. If `codec_to_match` is an RED or RTX codec, both
+// the codecs themselves and their associated codecs must match.
+absl::optional<Codec> FindMatchingCodec(
+ const std::vector<Codec>& codecs1,
+ const std::vector<Codec>& codecs2,
+ const Codec& codec_to_match,
+ const webrtc::FieldTrialsView* field_trials) {
+ // `codec_to_match` should be a member of `codecs1`, in order to look up
+ // RED/RTX codecs' associated codecs correctly. If not, that's a programming
+ // error.
+ RTC_DCHECK(absl::c_any_of(codecs1, [&codec_to_match](const Codec& codec) {
+ return &codec == &codec_to_match;
+ }));
+ for (const Codec& potential_match : codecs2) {
+ if (potential_match.Matches(codec_to_match, field_trials)) {
+ if (IsRtxCodec(codec_to_match)) {
+ int apt_value_1 = 0;
+ int apt_value_2 = 0;
+ if (!codec_to_match.GetParam(kCodecParamAssociatedPayloadType,
+ &apt_value_1) ||
+ !potential_match.GetParam(kCodecParamAssociatedPayloadType,
+ &apt_value_2)) {
+ RTC_LOG(LS_WARNING) << "RTX missing associated payload type.";
+ continue;
+ }
+ if (!ReferencedCodecsMatch(codecs1, apt_value_1, codecs2, apt_value_2,
+ field_trials)) {
+ continue;
+ }
+ } else if (IsRedCodec(codec_to_match)) {
+ auto red_parameters_1 =
+ codec_to_match.params.find(kCodecParamNotInNameValueFormat);
+ auto red_parameters_2 =
+ potential_match.params.find(kCodecParamNotInNameValueFormat);
+ bool has_parameters_1 = red_parameters_1 != codec_to_match.params.end();
+ bool has_parameters_2 =
+ red_parameters_2 != potential_match.params.end();
+ if (has_parameters_1 && has_parameters_2) {
+ // Mixed reference codecs (i.e. 111/112) are not supported.
+ // Different levels of redundancy between offer and answer are
+ // since RED is considered to be declarative.
+ std::vector<absl::string_view> redundant_payloads_1 =
+ rtc::split(red_parameters_1->second, '/');
+ std::vector<absl::string_view> redundant_payloads_2 =
+ rtc::split(red_parameters_2->second, '/');
+ if (redundant_payloads_1.size() > 0 &&
+ redundant_payloads_2.size() > 0) {
+ bool consistent = true;
+ for (size_t i = 1; i < redundant_payloads_1.size(); i++) {
+ if (redundant_payloads_1[i] != redundant_payloads_1[0]) {
+ consistent = false;
+ break;
+ }
+ }
+ for (size_t i = 1; i < redundant_payloads_2.size(); i++) {
+ if (redundant_payloads_2[i] != redundant_payloads_2[0]) {
+ consistent = false;
+ break;
+ }
+ }
+ if (!consistent) {
+ continue;
+ }
+
+ int red_value_1;
+ int red_value_2;
+ if (rtc::FromString(redundant_payloads_1[0], &red_value_1) &&
+ rtc::FromString(redundant_payloads_2[0], &red_value_2)) {
+ if (!ReferencedCodecsMatch(codecs1, red_value_1, codecs2,
+ red_value_2, field_trials)) {
+ continue;
+ }
+ }
+ }
+ } else if (has_parameters_1 != has_parameters_2) {
+ continue;
+ }
+ }
+ return potential_match;
+ }
+ }
+ return absl::nullopt;
+}
+
+void NegotiateCodecs(const std::vector<Codec>& local_codecs,
+ const std::vector<Codec>& offered_codecs,
+ std::vector<Codec>* negotiated_codecs,
+ bool keep_offer_order,
+ const webrtc::FieldTrialsView* field_trials) {
+ for (const Codec& ours : local_codecs) {
+ absl::optional<Codec> theirs =
+ FindMatchingCodec(local_codecs, offered_codecs, ours, field_trials);
+ // Note that we intentionally only find one matching codec for each of our
+ // local codecs, in case the remote offer contains duplicate codecs.
+ if (theirs) {
+ Codec negotiated = ours;
+ NegotiatePacketization(ours, *theirs, &negotiated);
+ negotiated.IntersectFeedbackParams(*theirs);
+ if (IsRtxCodec(negotiated)) {
+ const auto apt_it =
+ theirs->params.find(kCodecParamAssociatedPayloadType);
+ // FindMatchingCodec shouldn't return something with no apt value.
+ RTC_DCHECK(apt_it != theirs->params.end());
+ negotiated.SetParam(kCodecParamAssociatedPayloadType, apt_it->second);
+
+ // We support parsing the declarative rtx-time parameter.
+ const auto rtx_time_it = theirs->params.find(kCodecParamRtxTime);
+ if (rtx_time_it != theirs->params.end()) {
+ negotiated.SetParam(kCodecParamRtxTime, rtx_time_it->second);
+ }
+ } else if (IsRedCodec(negotiated)) {
+ const auto red_it =
+ theirs->params.find(kCodecParamNotInNameValueFormat);
+ if (red_it != theirs->params.end()) {
+ negotiated.SetParam(kCodecParamNotInNameValueFormat, red_it->second);
+ }
+ }
+ if (absl::EqualsIgnoreCase(ours.name, kH264CodecName)) {
+ webrtc::H264GenerateProfileLevelIdForAnswer(ours.params, theirs->params,
+ &negotiated.params);
+ }
+ negotiated.id = theirs->id;
+ negotiated.name = theirs->name;
+ negotiated_codecs->push_back(std::move(negotiated));
+ }
+ }
+ if (keep_offer_order) {
+ // RFC3264: Although the answerer MAY list the formats in their desired
+ // order of preference, it is RECOMMENDED that unless there is a
+ // specific reason, the answerer list formats in the same relative order
+ // they were present in the offer.
+ // This can be skipped when the transceiver has any codec preferences.
+ std::unordered_map<int, int> payload_type_preferences;
+ int preference = static_cast<int>(offered_codecs.size() + 1);
+ for (const Codec& codec : offered_codecs) {
+ payload_type_preferences[codec.id] = preference--;
+ }
+ absl::c_sort(*negotiated_codecs, [&payload_type_preferences](
+ const Codec& a, const Codec& b) {
+ return payload_type_preferences[a.id] > payload_type_preferences[b.id];
+ });
+ }
+}
+
+// Find the codec in `codec_list` that `rtx_codec` is associated with.
+const Codec* GetAssociatedCodecForRtx(const std::vector<Codec>& codec_list,
+ const Codec& rtx_codec) {
+ std::string associated_pt_str;
+ if (!rtx_codec.GetParam(kCodecParamAssociatedPayloadType,
+ &associated_pt_str)) {
+ RTC_LOG(LS_WARNING) << "RTX codec " << rtx_codec.id
+ << " is missing an associated payload type.";
+ return nullptr;
+ }
+
+ int associated_pt;
+ if (!rtc::FromString(associated_pt_str, &associated_pt)) {
+ RTC_LOG(LS_WARNING) << "Couldn't convert payload type " << associated_pt_str
+ << " of RTX codec " << rtx_codec.id
+ << " to an integer.";
+ return nullptr;
+ }
+
+ // Find the associated codec for the RTX codec.
+ const Codec* associated_codec = FindCodecById(codec_list, associated_pt);
+ if (!associated_codec) {
+ RTC_LOG(LS_WARNING) << "Couldn't find associated codec with payload type "
+ << associated_pt << " for RTX codec " << rtx_codec.id
+ << ".";
+ }
+ return associated_codec;
+}
+
+// Find the codec in `codec_list` that `red_codec` is associated with.
+const Codec* GetAssociatedCodecForRed(const std::vector<Codec>& codec_list,
+ const Codec& red_codec) {
+ std::string fmtp;
+ if (!red_codec.GetParam(kCodecParamNotInNameValueFormat, &fmtp)) {
+ // Don't log for video/RED where this is normal.
+ if (red_codec.type == Codec::Type::kAudio) {
+ RTC_LOG(LS_WARNING) << "RED codec " << red_codec.id
+ << " is missing an associated payload type.";
+ }
+ return nullptr;
+ }
+
+ std::vector<absl::string_view> redundant_payloads = rtc::split(fmtp, '/');
+ if (redundant_payloads.size() < 2) {
+ return nullptr;
+ }
+
+ absl::string_view associated_pt_str = redundant_payloads[0];
+ int associated_pt;
+ if (!rtc::FromString(associated_pt_str, &associated_pt)) {
+ RTC_LOG(LS_WARNING) << "Couldn't convert first payload type "
+ << associated_pt_str << " of RED codec " << red_codec.id
+ << " to an integer.";
+ return nullptr;
+ }
+
+ // Find the associated codec for the RED codec.
+ const Codec* associated_codec = FindCodecById(codec_list, associated_pt);
+ if (!associated_codec) {
+ RTC_LOG(LS_WARNING) << "Couldn't find associated codec with payload type "
+ << associated_pt << " for RED codec " << red_codec.id
+ << ".";
+ }
+ return associated_codec;
+}
+
+// Adds all codecs from `reference_codecs` to `offered_codecs` that don't
+// already exist in `offered_codecs` and ensure the payload types don't
+// collide.
+void MergeCodecs(const std::vector<Codec>& reference_codecs,
+ std::vector<Codec>* offered_codecs,
+ UsedPayloadTypes* used_pltypes,
+ const webrtc::FieldTrialsView* field_trials) {
+ // Add all new codecs that are not RTX/RED codecs.
+ // The two-pass splitting of the loops means preferring payload types
+ // of actual codecs with respect to collisions.
+ for (const Codec& reference_codec : reference_codecs) {
+ if (!IsRtxCodec(reference_codec) && !IsRedCodec(reference_codec) &&
+ !FindMatchingCodec(reference_codecs, *offered_codecs, reference_codec,
+ field_trials)) {
+ Codec codec = reference_codec;
+ used_pltypes->FindAndSetIdUsed(&codec);
+ offered_codecs->push_back(codec);
+ }
+ }
+
+ // Add all new RTX or RED codecs.
+ for (const Codec& reference_codec : reference_codecs) {
+ if (IsRtxCodec(reference_codec) &&
+ !FindMatchingCodec(reference_codecs, *offered_codecs, reference_codec,
+ field_trials)) {
+ Codec rtx_codec = reference_codec;
+ const Codec* associated_codec =
+ GetAssociatedCodecForRtx(reference_codecs, rtx_codec);
+ if (!associated_codec) {
+ continue;
+ }
+ // Find a codec in the offered list that matches the reference codec.
+ // Its payload type may be different than the reference codec.
+ absl::optional<Codec> matching_codec = FindMatchingCodec(
+ reference_codecs, *offered_codecs, *associated_codec, field_trials);
+ if (!matching_codec) {
+ RTC_LOG(LS_WARNING)
+ << "Couldn't find matching " << associated_codec->name << " codec.";
+ continue;
+ }
+
+ rtx_codec.params[kCodecParamAssociatedPayloadType] =
+ rtc::ToString(matching_codec->id);
+ used_pltypes->FindAndSetIdUsed(&rtx_codec);
+ offered_codecs->push_back(rtx_codec);
+ } else if (IsRedCodec(reference_codec) &&
+ !FindMatchingCodec(reference_codecs, *offered_codecs,
+ reference_codec, field_trials)) {
+ Codec red_codec = reference_codec;
+ const Codec* associated_codec =
+ GetAssociatedCodecForRed(reference_codecs, red_codec);
+ if (associated_codec) {
+ absl::optional<Codec> matching_codec = FindMatchingCodec(
+ reference_codecs, *offered_codecs, *associated_codec, field_trials);
+ if (!matching_codec) {
+ RTC_LOG(LS_WARNING) << "Couldn't find matching "
+ << associated_codec->name << " codec.";
+ continue;
+ }
+
+ red_codec.params[kCodecParamNotInNameValueFormat] =
+ rtc::ToString(matching_codec->id) + "/" +
+ rtc::ToString(matching_codec->id);
+ }
+ used_pltypes->FindAndSetIdUsed(&red_codec);
+ offered_codecs->push_back(red_codec);
+ }
+ }
+}
+
+// `codecs` is a full list of codecs with correct payload type mappings, which
+// don't conflict with mappings of the other media type; `supported_codecs` is
+// a list filtered for the media section`s direction but with default payload
+// types.
+std::vector<Codec> MatchCodecPreference(
+ const std::vector<webrtc::RtpCodecCapability>& codec_preferences,
+ const std::vector<Codec>& codecs,
+ const std::vector<Codec>& supported_codecs,
+ const webrtc::FieldTrialsView* field_trials) {
+ std::vector<Codec> filtered_codecs;
+ bool want_rtx = false;
+ bool want_red = false;
+
+ for (const auto& codec_preference : codec_preferences) {
+ if (IsRtxCodec(codec_preference)) {
+ want_rtx = true;
+ } else if (IsRedCodec(codec_preference)) {
+ want_red = true;
+ }
+ }
+ bool red_was_added = false;
+ for (const auto& codec_preference : codec_preferences) {
+ auto found_codec = absl::c_find_if(
+ supported_codecs, [&codec_preference](const Codec& codec) {
+ webrtc::RtpCodecParameters codec_parameters =
+ codec.ToCodecParameters();
+ return codec_parameters.name == codec_preference.name &&
+ codec_parameters.kind == codec_preference.kind &&
+ codec_parameters.num_channels ==
+ codec_preference.num_channels &&
+ codec_parameters.clock_rate == codec_preference.clock_rate &&
+ codec_parameters.parameters == codec_preference.parameters;
+ });
+
+ if (found_codec != supported_codecs.end()) {
+ absl::optional<Codec> found_codec_with_correct_pt = FindMatchingCodec(
+ supported_codecs, codecs, *found_codec, field_trials);
+ if (found_codec_with_correct_pt) {
+ // RED may already have been added if its primary codec is before RED
+ // in the codec list.
+ bool is_red_codec = IsRedCodec(*found_codec_with_correct_pt);
+ if (!is_red_codec || !red_was_added) {
+ filtered_codecs.push_back(*found_codec_with_correct_pt);
+ red_was_added = is_red_codec ? true : red_was_added;
+ }
+ std::string id = rtc::ToString(found_codec_with_correct_pt->id);
+ // Search for the matching rtx or red codec.
+ if (want_red || want_rtx) {
+ for (const auto& codec : codecs) {
+ if (IsRtxCodec(codec)) {
+ const auto apt =
+ codec.params.find(cricket::kCodecParamAssociatedPayloadType);
+ if (apt != codec.params.end() && apt->second == id) {
+ filtered_codecs.push_back(codec);
+ break;
+ }
+ } else if (IsRedCodec(codec)) {
+ // For RED, do not insert the codec again if it was already
+ // inserted. audio/red for opus gets enabled by having RED before
+ // the primary codec.
+ const auto fmtp =
+ codec.params.find(cricket::kCodecParamNotInNameValueFormat);
+ if (fmtp != codec.params.end()) {
+ std::vector<absl::string_view> redundant_payloads =
+ rtc::split(fmtp->second, '/');
+ if (!redundant_payloads.empty() &&
+ redundant_payloads[0] == id) {
+ if (!red_was_added) {
+ filtered_codecs.push_back(codec);
+ red_was_added = true;
+ }
+ break;
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+
+ return filtered_codecs;
+}
+
+// Compute the union of `codecs1` and `codecs2`.
+std::vector<Codec> ComputeCodecsUnion(
+ const std::vector<Codec>& codecs1,
+ const std::vector<Codec>& codecs2,
+ const webrtc::FieldTrialsView* field_trials) {
+ std::vector<Codec> all_codecs;
+ UsedPayloadTypes used_payload_types;
+ for (const Codec& codec : codecs1) {
+ Codec codec_mutable = codec;
+ used_payload_types.FindAndSetIdUsed(&codec_mutable);
+ all_codecs.push_back(codec_mutable);
+ }
+
+ // Use MergeCodecs to merge the second half of our list as it already checks
+ // and fixes problems with duplicate payload types.
+ MergeCodecs(codecs2, &all_codecs, &used_payload_types, field_trials);
+
+ return all_codecs;
+}
+
+// Adds all extensions from `reference_extensions` to `offered_extensions` that
+// don't already exist in `offered_extensions` and ensure the IDs don't
+// collide. If an extension is added, it's also added to `regular_extensions` or
+// `encrypted_extensions`, and if the extension is in `regular_extensions` or
+// `encrypted_extensions`, its ID is marked as used in `used_ids`.
+// `offered_extensions` is for either audio or video while `regular_extensions`
+// and `encrypted_extensions` are used for both audio and video. There could be
+// overlap between audio extensions and video extensions.
+void MergeRtpHdrExts(const RtpHeaderExtensions& reference_extensions,
+ RtpHeaderExtensions* offered_extensions,
+ RtpHeaderExtensions* regular_extensions,
+ RtpHeaderExtensions* encrypted_extensions,
+ UsedRtpHeaderExtensionIds* used_ids) {
+ for (auto reference_extension : reference_extensions) {
+ if (!webrtc::RtpExtension::FindHeaderExtensionByUriAndEncryption(
+ *offered_extensions, reference_extension.uri,
+ reference_extension.encrypt)) {
+ if (reference_extension.encrypt) {
+ const webrtc::RtpExtension* existing =
+ webrtc::RtpExtension::FindHeaderExtensionByUriAndEncryption(
+ *encrypted_extensions, reference_extension.uri,
+ reference_extension.encrypt);
+ if (existing) {
+ offered_extensions->push_back(*existing);
+ } else {
+ used_ids->FindAndSetIdUsed(&reference_extension);
+ encrypted_extensions->push_back(reference_extension);
+ offered_extensions->push_back(reference_extension);
+ }
+ } else {
+ const webrtc::RtpExtension* existing =
+ webrtc::RtpExtension::FindHeaderExtensionByUriAndEncryption(
+ *regular_extensions, reference_extension.uri,
+ reference_extension.encrypt);
+ if (existing) {
+ offered_extensions->push_back(*existing);
+ } else {
+ used_ids->FindAndSetIdUsed(&reference_extension);
+ regular_extensions->push_back(reference_extension);
+ offered_extensions->push_back(reference_extension);
+ }
+ }
+ }
+ }
+}
+
+void AddEncryptedVersionsOfHdrExts(RtpHeaderExtensions* offered_extensions,
+ RtpHeaderExtensions* encrypted_extensions,
+ UsedRtpHeaderExtensionIds* used_ids) {
+ RtpHeaderExtensions encrypted_extensions_to_add;
+ for (const auto& extension : *offered_extensions) {
+ // Skip existing encrypted offered extension
+ if (extension.encrypt) {
+ continue;
+ }
+
+ // Skip if we cannot encrypt the extension
+ if (!webrtc::RtpExtension::IsEncryptionSupported(extension.uri)) {
+ continue;
+ }
+
+ // Skip if an encrypted extension with that URI already exists in the
+ // offered extensions.
+ const bool have_encrypted_extension =
+ webrtc::RtpExtension::FindHeaderExtensionByUriAndEncryption(
+ *offered_extensions, extension.uri, true);
+ if (have_encrypted_extension) {
+ continue;
+ }
+
+ // Determine if a shared encrypted extension with that URI already exists.
+ const webrtc::RtpExtension* shared_encrypted_extension =
+ webrtc::RtpExtension::FindHeaderExtensionByUriAndEncryption(
+ *encrypted_extensions, extension.uri, true);
+ if (shared_encrypted_extension) {
+ // Re-use the shared encrypted extension
+ encrypted_extensions_to_add.push_back(*shared_encrypted_extension);
+ continue;
+ }
+
+ // None exists. Create a new shared encrypted extension from the
+ // non-encrypted one.
+ webrtc::RtpExtension new_encrypted_extension(extension);
+ new_encrypted_extension.encrypt = true;
+ used_ids->FindAndSetIdUsed(&new_encrypted_extension);
+ encrypted_extensions->push_back(new_encrypted_extension);
+ encrypted_extensions_to_add.push_back(new_encrypted_extension);
+ }
+
+ // Append the additional encrypted extensions to be offered
+ offered_extensions->insert(offered_extensions->end(),
+ encrypted_extensions_to_add.begin(),
+ encrypted_extensions_to_add.end());
+}
+
+// Mostly identical to RtpExtension::FindHeaderExtensionByUri but discards any
+// encrypted extensions that this implementation cannot encrypt.
+const webrtc::RtpExtension* FindHeaderExtensionByUriDiscardUnsupported(
+ const std::vector<webrtc::RtpExtension>& extensions,
+ absl::string_view uri,
+ webrtc::RtpExtension::Filter filter) {
+ // Note: While it's technically possible to decrypt extensions that we don't
+ // encrypt, the symmetric API of libsrtp does not allow us to supply
+ // different IDs for encryption/decryption of header extensions depending on
+ // whether the packet is inbound or outbound. Thereby, we are limited to
+ // what we can send in encrypted form.
+ if (!webrtc::RtpExtension::IsEncryptionSupported(uri)) {
+ // If there's no encryption support and we only want encrypted extensions,
+ // there's no point in continuing the search here.
+ if (filter == webrtc::RtpExtension::kRequireEncryptedExtension) {
+ return nullptr;
+ }
+
+ // Instruct to only return non-encrypted extensions
+ filter = webrtc::RtpExtension::Filter::kDiscardEncryptedExtension;
+ }
+
+ return webrtc::RtpExtension::FindHeaderExtensionByUri(extensions, uri,
+ filter);
+}
+
+void NegotiateRtpHeaderExtensions(const RtpHeaderExtensions& local_extensions,
+ const RtpHeaderExtensions& offered_extensions,
+ webrtc::RtpExtension::Filter filter,
+ RtpHeaderExtensions* negotiated_extensions) {
+ // TransportSequenceNumberV2 is not offered by default. The special logic for
+ // the TransportSequenceNumber extensions works as follows:
+ // Offer Answer
+ // V1 V1 if in local_extensions.
+ // V1 and V2 V2 regardless of local_extensions.
+ // V2 V2 regardless of local_extensions.
+ const webrtc::RtpExtension* transport_sequence_number_v2_offer =
+ FindHeaderExtensionByUriDiscardUnsupported(
+ offered_extensions,
+ webrtc::RtpExtension::kTransportSequenceNumberV2Uri, filter);
+
+ bool frame_descriptor_in_local = false;
+ bool dependency_descriptor_in_local = false;
+ bool abs_capture_time_in_local = false;
+
+ for (const webrtc::RtpExtension& ours : local_extensions) {
+ if (ours.uri == webrtc::RtpExtension::kGenericFrameDescriptorUri00)
+ frame_descriptor_in_local = true;
+ else if (ours.uri == webrtc::RtpExtension::kDependencyDescriptorUri)
+ dependency_descriptor_in_local = true;
+ else if (ours.uri == webrtc::RtpExtension::kAbsoluteCaptureTimeUri)
+ abs_capture_time_in_local = true;
+ const webrtc::RtpExtension* theirs =
+ FindHeaderExtensionByUriDiscardUnsupported(offered_extensions, ours.uri,
+ filter);
+ if (theirs) {
+ if (transport_sequence_number_v2_offer &&
+ ours.uri == webrtc::RtpExtension::kTransportSequenceNumberUri) {
+ // Don't respond to
+ // http://www.ietf.org/id/draft-holmer-rmcat-transport-wide-cc-extensions-01
+ // if we get an offer including
+ // http://www.webrtc.org/experiments/rtp-hdrext/transport-wide-cc-02
+ continue;
+ } else {
+ // We respond with their RTP header extension id.
+ negotiated_extensions->push_back(*theirs);
+ }
+ }
+ }
+
+ if (transport_sequence_number_v2_offer) {
+ // Respond that we support kTransportSequenceNumberV2Uri.
+ negotiated_extensions->push_back(*transport_sequence_number_v2_offer);
+ }
+
+ // Frame descriptors support. If the extension is not present locally, but is
+ // in the offer, we add it to the list.
+ if (!dependency_descriptor_in_local) {
+ const webrtc::RtpExtension* theirs =
+ FindHeaderExtensionByUriDiscardUnsupported(
+ offered_extensions, webrtc::RtpExtension::kDependencyDescriptorUri,
+ filter);
+ if (theirs) {
+ negotiated_extensions->push_back(*theirs);
+ }
+ }
+ if (!frame_descriptor_in_local) {
+ const webrtc::RtpExtension* theirs =
+ FindHeaderExtensionByUriDiscardUnsupported(
+ offered_extensions,
+ webrtc::RtpExtension::kGenericFrameDescriptorUri00, filter);
+ if (theirs) {
+ negotiated_extensions->push_back(*theirs);
+ }
+ }
+
+ // Absolute capture time support. If the extension is not present locally, but
+ // is in the offer, we add it to the list.
+ if (!abs_capture_time_in_local) {
+ const webrtc::RtpExtension* theirs =
+ FindHeaderExtensionByUriDiscardUnsupported(
+ offered_extensions, webrtc::RtpExtension::kAbsoluteCaptureTimeUri,
+ filter);
+ if (theirs) {
+ negotiated_extensions->push_back(*theirs);
+ }
+ }
+}
+
+void StripCNCodecs(AudioCodecs* audio_codecs) {
+ audio_codecs->erase(std::remove_if(audio_codecs->begin(), audio_codecs->end(),
+ [](const AudioCodec& codec) {
+ return IsComfortNoiseCodec(codec);
+ }),
+ audio_codecs->end());
+}
+
+bool SetCodecsInAnswer(const MediaContentDescription* offer,
+ const std::vector<Codec>& local_codecs,
+ const MediaDescriptionOptions& media_description_options,
+ const MediaSessionOptions& session_options,
+ UniqueRandomIdGenerator* ssrc_generator,
+ StreamParamsVec* current_streams,
+ MediaContentDescription* answer,
+ const webrtc::FieldTrialsView& field_trials) {
+ RTC_DCHECK(offer->type() == MEDIA_TYPE_AUDIO ||
+ offer->type() == MEDIA_TYPE_VIDEO);
+ std::vector<Codec> negotiated_codecs;
+ NegotiateCodecs(local_codecs, offer->codecs(), &negotiated_codecs,
+ media_description_options.codec_preferences.empty(),
+ &field_trials);
+ answer->AddCodecs(negotiated_codecs);
+ answer->set_protocol(offer->protocol());
+ if (!AddStreamParams(media_description_options.sender_options,
+ session_options.rtcp_cname, ssrc_generator,
+ current_streams, answer, field_trials)) {
+ return false; // Something went seriously wrong.
+ }
+ return true;
+}
+
+// Create a media content to be answered for the given `sender_options`
+// according to the given session_options.rtcp_mux, session_options.streams,
+// codecs, crypto, and current_streams. If we don't currently have crypto (in
+// current_cryptos) and it is enabled (in secure_policy), crypto is created
+// (according to crypto_suites). The codecs, rtcp_mux, and crypto are all
+// negotiated with the offer. If the negotiation fails, this method returns
+// false. The created content is added to the offer.
+bool CreateMediaContentAnswer(
+ const MediaContentDescription* offer,
+ const MediaDescriptionOptions& media_description_options,
+ const MediaSessionOptions& session_options,
+ const SecurePolicy& sdes_policy,
+ const CryptoParamsVec* current_cryptos,
+ const RtpHeaderExtensions& local_rtp_extensions,
+ UniqueRandomIdGenerator* ssrc_generator,
+ bool enable_encrypted_rtp_header_extensions,
+ StreamParamsVec* current_streams,
+ bool bundle_enabled,
+ MediaContentDescription* answer) {
+ answer->set_extmap_allow_mixed_enum(offer->extmap_allow_mixed_enum());
+ const webrtc::RtpExtension::Filter extensions_filter =
+ enable_encrypted_rtp_header_extensions
+ ? webrtc::RtpExtension::Filter::kPreferEncryptedExtension
+ : webrtc::RtpExtension::Filter::kDiscardEncryptedExtension;
+
+ // Filter local extensions by capabilities and direction.
+ RtpHeaderExtensions local_rtp_extensions_to_reply_with;
+ for (auto extension_with_id : local_rtp_extensions) {
+ for (const auto& extension : media_description_options.header_extensions) {
+ if (extension_with_id.uri == extension.uri) {
+ // TODO(crbug.com/1051821): Configure the extension direction from
+ // the information in the media_description_options extension
+ // capability. For now, do not include stopped extensions.
+ // See also crbug.com/webrtc/7477 about the general lack of direction.
+ if (extension.direction != RtpTransceiverDirection::kStopped) {
+ local_rtp_extensions_to_reply_with.push_back(extension_with_id);
+ }
+ }
+ }
+ }
+ RtpHeaderExtensions negotiated_rtp_extensions;
+ NegotiateRtpHeaderExtensions(local_rtp_extensions_to_reply_with,
+ offer->rtp_header_extensions(),
+ extensions_filter, &negotiated_rtp_extensions);
+ answer->set_rtp_header_extensions(negotiated_rtp_extensions);
+
+ answer->set_rtcp_mux(session_options.rtcp_mux_enabled && offer->rtcp_mux());
+ if (answer->type() == cricket::MEDIA_TYPE_VIDEO) {
+ answer->set_rtcp_reduced_size(offer->rtcp_reduced_size());
+ }
+
+ answer->set_remote_estimate(offer->remote_estimate());
+
+ if (sdes_policy != SEC_DISABLED) {
+ CryptoParams crypto;
+ if (SelectCrypto(offer, bundle_enabled, session_options.crypto_options,
+ &crypto)) {
+ if (current_cryptos) {
+ FindMatchingCrypto(*current_cryptos, crypto, &crypto);
+ }
+ answer->AddCrypto(crypto);
+ }
+ }
+
+ if (answer->cryptos().empty() && sdes_policy == SEC_REQUIRED) {
+ return false;
+ }
+
+ AddSimulcastToMediaDescription(media_description_options, answer);
+
+ answer->set_direction(NegotiateRtpTransceiverDirection(
+ offer->direction(), media_description_options.direction));
+
+ return true;
+}
+
+bool IsMediaProtocolSupported(MediaType type,
+ const std::string& protocol,
+ bool secure_transport) {
+ // Since not all applications serialize and deserialize the media protocol,
+ // we will have to accept `protocol` to be empty.
+ if (protocol.empty()) {
+ return true;
+ }
+
+ if (type == MEDIA_TYPE_DATA) {
+ // Check for SCTP
+ if (secure_transport) {
+ // Most likely scenarios first.
+ return IsDtlsSctp(protocol);
+ } else {
+ return IsPlainSctp(protocol);
+ }
+ }
+
+ // Allow for non-DTLS RTP protocol even when using DTLS because that's what
+ // JSEP specifies.
+ if (secure_transport) {
+ // Most likely scenarios first.
+ return IsDtlsRtp(protocol) || IsPlainRtp(protocol);
+ } else {
+ return IsPlainRtp(protocol);
+ }
+}
+
+void SetMediaProtocol(bool secure_transport, MediaContentDescription* desc) {
+ if (!desc->cryptos().empty())
+ desc->set_protocol(kMediaProtocolSavpf);
+ else if (secure_transport)
+ desc->set_protocol(kMediaProtocolDtlsSavpf);
+ else
+ desc->set_protocol(kMediaProtocolAvpf);
+}
+
+// Gets the TransportInfo of the given `content_name` from the
+// `current_description`. If doesn't exist, returns a new one.
+const TransportDescription* GetTransportDescription(
+ const std::string& content_name,
+ const SessionDescription* current_description) {
+ const TransportDescription* desc = NULL;
+ if (current_description) {
+ const TransportInfo* info =
+ current_description->GetTransportInfoByName(content_name);
+ if (info) {
+ desc = &info->description;
+ }
+ }
+ return desc;
+}
+
+// Gets the current DTLS state from the transport description.
+bool IsDtlsActive(const ContentInfo* content,
+ const SessionDescription* current_description) {
+ if (!content) {
+ return false;
+ }
+
+ size_t msection_index = content - &current_description->contents()[0];
+
+ if (current_description->transport_infos().size() <= msection_index) {
+ return false;
+ }
+
+ return current_description->transport_infos()[msection_index]
+ .description.secure();
+}
+
+} // namespace
+
+void MediaDescriptionOptions::AddAudioSender(
+ const std::string& track_id,
+ const std::vector<std::string>& stream_ids) {
+ RTC_DCHECK(type == MEDIA_TYPE_AUDIO);
+ AddSenderInternal(track_id, stream_ids, {}, SimulcastLayerList(), 1);
+}
+
+void MediaDescriptionOptions::AddVideoSender(
+ const std::string& track_id,
+ const std::vector<std::string>& stream_ids,
+ const std::vector<RidDescription>& rids,
+ const SimulcastLayerList& simulcast_layers,
+ int num_sim_layers) {
+ RTC_DCHECK(type == MEDIA_TYPE_VIDEO);
+ RTC_DCHECK(rids.empty() || num_sim_layers == 0)
+ << "RIDs are the compliant way to indicate simulcast.";
+ RTC_DCHECK(ValidateSimulcastLayers(rids, simulcast_layers));
+ AddSenderInternal(track_id, stream_ids, rids, simulcast_layers,
+ num_sim_layers);
+}
+
+void MediaDescriptionOptions::AddSenderInternal(
+ const std::string& track_id,
+ const std::vector<std::string>& stream_ids,
+ const std::vector<RidDescription>& rids,
+ const SimulcastLayerList& simulcast_layers,
+ int num_sim_layers) {
+ // TODO(steveanton): Support any number of stream ids.
+ RTC_CHECK(stream_ids.size() == 1U);
+ SenderOptions options;
+ options.track_id = track_id;
+ options.stream_ids = stream_ids;
+ options.simulcast_layers = simulcast_layers;
+ options.rids = rids;
+ options.num_sim_layers = num_sim_layers;
+ sender_options.push_back(options);
+}
+
+bool MediaSessionOptions::HasMediaDescription(MediaType type) const {
+ return absl::c_any_of(
+ media_description_options,
+ [type](const MediaDescriptionOptions& t) { return t.type == type; });
+}
+
+MediaSessionDescriptionFactory::MediaSessionDescriptionFactory(
+ cricket::MediaEngineInterface* media_engine,
+ bool rtx_enabled,
+ rtc::UniqueRandomIdGenerator* ssrc_generator,
+ const TransportDescriptionFactory* transport_desc_factory)
+ : ssrc_generator_(ssrc_generator),
+ transport_desc_factory_(transport_desc_factory) {
+ RTC_CHECK(transport_desc_factory_);
+ if (media_engine) {
+ audio_send_codecs_ = media_engine->voice().send_codecs();
+ audio_recv_codecs_ = media_engine->voice().recv_codecs();
+ video_send_codecs_ = media_engine->video().send_codecs(rtx_enabled);
+ video_recv_codecs_ = media_engine->video().recv_codecs(rtx_enabled);
+ }
+ ComputeAudioCodecsIntersectionAndUnion();
+ ComputeVideoCodecsIntersectionAndUnion();
+}
+
+const AudioCodecs& MediaSessionDescriptionFactory::audio_sendrecv_codecs()
+ const {
+ return audio_sendrecv_codecs_;
+}
+
+const AudioCodecs& MediaSessionDescriptionFactory::audio_send_codecs() const {
+ return audio_send_codecs_;
+}
+
+const AudioCodecs& MediaSessionDescriptionFactory::audio_recv_codecs() const {
+ return audio_recv_codecs_;
+}
+
+void MediaSessionDescriptionFactory::set_audio_codecs(
+ const AudioCodecs& send_codecs,
+ const AudioCodecs& recv_codecs) {
+ audio_send_codecs_ = send_codecs;
+ audio_recv_codecs_ = recv_codecs;
+ ComputeAudioCodecsIntersectionAndUnion();
+}
+
+const VideoCodecs& MediaSessionDescriptionFactory::video_sendrecv_codecs()
+ const {
+ return video_sendrecv_codecs_;
+}
+
+const VideoCodecs& MediaSessionDescriptionFactory::video_send_codecs() const {
+ return video_send_codecs_;
+}
+
+const VideoCodecs& MediaSessionDescriptionFactory::video_recv_codecs() const {
+ return video_recv_codecs_;
+}
+
+void MediaSessionDescriptionFactory::set_video_codecs(
+ const VideoCodecs& send_codecs,
+ const VideoCodecs& recv_codecs) {
+ video_send_codecs_ = send_codecs;
+ video_recv_codecs_ = recv_codecs;
+ ComputeVideoCodecsIntersectionAndUnion();
+}
+
+RtpHeaderExtensions
+MediaSessionDescriptionFactory::filtered_rtp_header_extensions(
+ RtpHeaderExtensions extensions) const {
+ if (!is_unified_plan_) {
+ // Remove extensions only supported with unified-plan.
+ extensions.erase(
+ std::remove_if(
+ extensions.begin(), extensions.end(),
+ [](const webrtc::RtpExtension& extension) {
+ return extension.uri == webrtc::RtpExtension::kMidUri ||
+ extension.uri == webrtc::RtpExtension::kRidUri ||
+ extension.uri == webrtc::RtpExtension::kRepairedRidUri;
+ }),
+ extensions.end());
+ }
+ return extensions;
+}
+
+webrtc::RTCErrorOr<std::unique_ptr<SessionDescription>>
+MediaSessionDescriptionFactory::CreateOfferOrError(
+ const MediaSessionOptions& session_options,
+ const SessionDescription* current_description) const {
+ // Must have options for each existing section.
+ if (current_description) {
+ RTC_DCHECK_LE(current_description->contents().size(),
+ session_options.media_description_options.size());
+ }
+
+ IceCredentialsIterator ice_credentials(
+ session_options.pooled_ice_credentials);
+
+ std::vector<const ContentInfo*> current_active_contents;
+ if (current_description) {
+ current_active_contents =
+ GetActiveContents(*current_description, session_options);
+ }
+
+ StreamParamsVec current_streams =
+ GetCurrentStreamParams(current_active_contents);
+
+ AudioCodecs offer_audio_codecs;
+ VideoCodecs offer_video_codecs;
+ GetCodecsForOffer(current_active_contents, &offer_audio_codecs,
+ &offer_video_codecs);
+ AudioVideoRtpHeaderExtensions extensions_with_ids =
+ GetOfferedRtpHeaderExtensionsWithIds(
+ current_active_contents, session_options.offer_extmap_allow_mixed,
+ session_options.media_description_options);
+
+ auto offer = std::make_unique<SessionDescription>();
+
+ // Iterate through the media description options, matching with existing media
+ // descriptions in `current_description`.
+ size_t msection_index = 0;
+ for (const MediaDescriptionOptions& media_description_options :
+ session_options.media_description_options) {
+ const ContentInfo* current_content = nullptr;
+ if (current_description &&
+ msection_index < current_description->contents().size()) {
+ current_content = &current_description->contents()[msection_index];
+ // Media type must match unless this media section is being recycled.
+ }
+ RTCError error;
+ switch (media_description_options.type) {
+ case MEDIA_TYPE_AUDIO:
+ error = AddAudioContentForOffer(
+ media_description_options, session_options, current_content,
+ current_description, extensions_with_ids.audio, offer_audio_codecs,
+ &current_streams, offer.get(), &ice_credentials);
+ break;
+ case MEDIA_TYPE_VIDEO:
+ error = AddVideoContentForOffer(
+ media_description_options, session_options, current_content,
+ current_description, extensions_with_ids.video, offer_video_codecs,
+ &current_streams, offer.get(), &ice_credentials);
+ break;
+ case MEDIA_TYPE_DATA:
+ error = AddDataContentForOffer(media_description_options,
+ session_options, current_content,
+ current_description, &current_streams,
+ offer.get(), &ice_credentials);
+ break;
+ case MEDIA_TYPE_UNSUPPORTED:
+ error = AddUnsupportedContentForOffer(
+ media_description_options, session_options, current_content,
+ current_description, offer.get(), &ice_credentials);
+ break;
+ default:
+ RTC_DCHECK_NOTREACHED();
+ }
+ if (!error.ok()) {
+ return error;
+ }
+ ++msection_index;
+ }
+
+ // Bundle the contents together, if we've been asked to do so, and update any
+ // parameters that need to be tweaked for BUNDLE.
+ if (session_options.bundle_enabled) {
+ ContentGroup offer_bundle(GROUP_TYPE_BUNDLE);
+ for (const ContentInfo& content : offer->contents()) {
+ if (content.rejected) {
+ continue;
+ }
+ // TODO(deadbeef): There are conditions that make bundling two media
+ // descriptions together illegal. For example, they use the same payload
+ // type to represent different codecs, or same IDs for different header
+ // extensions. We need to detect this and not try to bundle those media
+ // descriptions together.
+ offer_bundle.AddContentName(content.name);
+ }
+ if (!offer_bundle.content_names().empty()) {
+ offer->AddGroup(offer_bundle);
+ if (!UpdateTransportInfoForBundle(offer_bundle, offer.get())) {
+ LOG_AND_RETURN_ERROR(
+ RTCErrorType::INTERNAL_ERROR,
+ "CreateOffer failed to UpdateTransportInfoForBundle");
+ }
+ if (!UpdateCryptoParamsForBundle(offer_bundle, offer.get())) {
+ LOG_AND_RETURN_ERROR(
+ RTCErrorType::INTERNAL_ERROR,
+ "CreateOffer failed to UpdateCryptoParamsForBundle.");
+ }
+ }
+ }
+
+ // The following determines how to signal MSIDs to ensure compatibility with
+ // older endpoints (in particular, older Plan B endpoints).
+ if (is_unified_plan_) {
+ // Be conservative and signal using both a=msid and a=ssrc lines. Unified
+ // Plan answerers will look at a=msid and Plan B answerers will look at the
+ // a=ssrc MSID line.
+ offer->set_msid_signaling(cricket::kMsidSignalingMediaSection |
+ cricket::kMsidSignalingSsrcAttribute);
+ } else {
+ // Plan B always signals MSID using a=ssrc lines.
+ offer->set_msid_signaling(cricket::kMsidSignalingSsrcAttribute);
+ }
+
+ offer->set_extmap_allow_mixed(session_options.offer_extmap_allow_mixed);
+
+ return offer;
+}
+
+webrtc::RTCErrorOr<std::unique_ptr<SessionDescription>>
+MediaSessionDescriptionFactory::CreateAnswerOrError(
+ const SessionDescription* offer,
+ const MediaSessionOptions& session_options,
+ const SessionDescription* current_description) const {
+ if (!offer) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR, "Called without offer.");
+ }
+
+ // Must have options for exactly as many sections as in the offer.
+ RTC_DCHECK_EQ(offer->contents().size(),
+ session_options.media_description_options.size());
+
+ IceCredentialsIterator ice_credentials(
+ session_options.pooled_ice_credentials);
+
+ std::vector<const ContentInfo*> current_active_contents;
+ if (current_description) {
+ current_active_contents =
+ GetActiveContents(*current_description, session_options);
+ }
+
+ StreamParamsVec current_streams =
+ GetCurrentStreamParams(current_active_contents);
+
+ // Get list of all possible codecs that respects existing payload type
+ // mappings and uses a single payload type space.
+ //
+ // Note that these lists may be further filtered for each m= section; this
+ // step is done just to establish the payload type mappings shared by all
+ // sections.
+ AudioCodecs answer_audio_codecs;
+ VideoCodecs answer_video_codecs;
+ GetCodecsForAnswer(current_active_contents, *offer, &answer_audio_codecs,
+ &answer_video_codecs);
+
+ auto answer = std::make_unique<SessionDescription>();
+
+ // If the offer supports BUNDLE, and we want to use it too, create a BUNDLE
+ // group in the answer with the appropriate content names.
+ std::vector<const ContentGroup*> offer_bundles =
+ offer->GetGroupsByName(GROUP_TYPE_BUNDLE);
+ // There are as many answer BUNDLE groups as offer BUNDLE groups (even if
+ // rejected, we respond with an empty group). `offer_bundles`,
+ // `answer_bundles` and `bundle_transports` share the same size and indices.
+ std::vector<ContentGroup> answer_bundles;
+ std::vector<std::unique_ptr<TransportInfo>> bundle_transports;
+ answer_bundles.reserve(offer_bundles.size());
+ bundle_transports.reserve(offer_bundles.size());
+ for (size_t i = 0; i < offer_bundles.size(); ++i) {
+ answer_bundles.emplace_back(GROUP_TYPE_BUNDLE);
+ bundle_transports.emplace_back(nullptr);
+ }
+
+ answer->set_extmap_allow_mixed(offer->extmap_allow_mixed());
+
+ // Iterate through the media description options, matching with existing
+ // media descriptions in `current_description`.
+ size_t msection_index = 0;
+ for (const MediaDescriptionOptions& media_description_options :
+ session_options.media_description_options) {
+ const ContentInfo* offer_content = &offer->contents()[msection_index];
+ // Media types and MIDs must match between the remote offer and the
+ // MediaDescriptionOptions.
+ RTC_DCHECK(
+ IsMediaContentOfType(offer_content, media_description_options.type));
+ RTC_DCHECK(media_description_options.mid == offer_content->name);
+ // Get the index of the BUNDLE group that this MID belongs to, if any.
+ absl::optional<size_t> bundle_index;
+ for (size_t i = 0; i < offer_bundles.size(); ++i) {
+ if (offer_bundles[i]->HasContentName(media_description_options.mid)) {
+ bundle_index = i;
+ break;
+ }
+ }
+ TransportInfo* bundle_transport =
+ bundle_index.has_value() ? bundle_transports[bundle_index.value()].get()
+ : nullptr;
+
+ const ContentInfo* current_content = nullptr;
+ if (current_description &&
+ msection_index < current_description->contents().size()) {
+ current_content = &current_description->contents()[msection_index];
+ }
+ RtpHeaderExtensions header_extensions = RtpHeaderExtensionsFromCapabilities(
+ UnstoppedRtpHeaderExtensionCapabilities(
+ media_description_options.header_extensions));
+ RTCError error;
+ switch (media_description_options.type) {
+ case MEDIA_TYPE_AUDIO:
+ error = AddAudioContentForAnswer(
+ media_description_options, session_options, offer_content, offer,
+ current_content, current_description, bundle_transport,
+ answer_audio_codecs, header_extensions, &current_streams,
+ answer.get(), &ice_credentials);
+ break;
+ case MEDIA_TYPE_VIDEO:
+ error = AddVideoContentForAnswer(
+ media_description_options, session_options, offer_content, offer,
+ current_content, current_description, bundle_transport,
+ answer_video_codecs, header_extensions, &current_streams,
+ answer.get(), &ice_credentials);
+ break;
+ case MEDIA_TYPE_DATA:
+ error = AddDataContentForAnswer(
+ media_description_options, session_options, offer_content, offer,
+ current_content, current_description, bundle_transport,
+ &current_streams, answer.get(), &ice_credentials);
+ break;
+ case MEDIA_TYPE_UNSUPPORTED:
+ error = AddUnsupportedContentForAnswer(
+ media_description_options, session_options, offer_content, offer,
+ current_content, current_description, bundle_transport,
+ answer.get(), &ice_credentials);
+ break;
+ default:
+ RTC_DCHECK_NOTREACHED();
+ }
+ if (!error.ok()) {
+ return error;
+ }
+ ++msection_index;
+ // See if we can add the newly generated m= section to the BUNDLE group in
+ // the answer.
+ ContentInfo& added = answer->contents().back();
+ if (!added.rejected && session_options.bundle_enabled &&
+ bundle_index.has_value()) {
+ // The `bundle_index` is for `media_description_options.mid`.
+ RTC_DCHECK_EQ(media_description_options.mid, added.name);
+ answer_bundles[bundle_index.value()].AddContentName(added.name);
+ bundle_transports[bundle_index.value()].reset(
+ new TransportInfo(*answer->GetTransportInfoByName(added.name)));
+ }
+ }
+
+ // If BUNDLE group(s) were offered, put the same number of BUNDLE groups in
+ // the answer even if they're empty. RFC5888 says:
+ //
+ // A SIP entity that receives an offer that contains an "a=group" line
+ // with semantics that are understood MUST return an answer that
+ // contains an "a=group" line with the same semantics.
+ if (!offer_bundles.empty()) {
+ for (const ContentGroup& answer_bundle : answer_bundles) {
+ answer->AddGroup(answer_bundle);
+
+ if (answer_bundle.FirstContentName()) {
+ // Share the same ICE credentials and crypto params across all contents,
+ // as BUNDLE requires.
+ if (!UpdateTransportInfoForBundle(answer_bundle, answer.get())) {
+ LOG_AND_RETURN_ERROR(
+ RTCErrorType::INTERNAL_ERROR,
+ "CreateAnswer failed to UpdateTransportInfoForBundle.");
+ }
+
+ if (!UpdateCryptoParamsForBundle(answer_bundle, answer.get())) {
+ LOG_AND_RETURN_ERROR(
+ RTCErrorType::INTERNAL_ERROR,
+ "CreateAnswer failed to UpdateCryptoParamsForBundle.");
+ }
+ }
+ }
+ }
+
+ // The following determines how to signal MSIDs to ensure compatibility with
+ // older endpoints (in particular, older Plan B endpoints).
+ if (is_unified_plan_) {
+ // Unified Plan needs to look at what the offer included to find the most
+ // compatible answer.
+ if (offer->msid_signaling() == 0) {
+ // We end up here in one of three cases:
+ // 1. An empty offer. We'll reply with an empty answer so it doesn't
+ // matter what we pick here.
+ // 2. A data channel only offer. We won't add any MSIDs to the answer so
+ // it also doesn't matter what we pick here.
+ // 3. Media that's either sendonly or inactive from the remote endpoint.
+ // We don't have any information to say whether the endpoint is Plan B
+ // or Unified Plan, so be conservative and send both.
+ answer->set_msid_signaling(cricket::kMsidSignalingMediaSection |
+ cricket::kMsidSignalingSsrcAttribute);
+ } else if (offer->msid_signaling() ==
+ (cricket::kMsidSignalingMediaSection |
+ cricket::kMsidSignalingSsrcAttribute)) {
+ // If both a=msid and a=ssrc MSID signaling methods were used, we're
+ // probably talking to a Unified Plan endpoint so respond with just
+ // a=msid.
+ answer->set_msid_signaling(cricket::kMsidSignalingMediaSection);
+ } else {
+ // Otherwise, it's clear which method the offerer is using so repeat that
+ // back to them.
+ answer->set_msid_signaling(offer->msid_signaling());
+ }
+ } else {
+ // Plan B always signals MSID using a=ssrc lines.
+ answer->set_msid_signaling(cricket::kMsidSignalingSsrcAttribute);
+ }
+
+ return answer;
+}
+
+const AudioCodecs& MediaSessionDescriptionFactory::GetAudioCodecsForOffer(
+ const RtpTransceiverDirection& direction) const {
+ switch (direction) {
+ // If stream is inactive - generate list as if sendrecv.
+ case RtpTransceiverDirection::kSendRecv:
+ case RtpTransceiverDirection::kStopped:
+ case RtpTransceiverDirection::kInactive:
+ return audio_sendrecv_codecs_;
+ case RtpTransceiverDirection::kSendOnly:
+ return audio_send_codecs_;
+ case RtpTransceiverDirection::kRecvOnly:
+ return audio_recv_codecs_;
+ }
+ RTC_CHECK_NOTREACHED();
+}
+
+const AudioCodecs& MediaSessionDescriptionFactory::GetAudioCodecsForAnswer(
+ const RtpTransceiverDirection& offer,
+ const RtpTransceiverDirection& answer) const {
+ switch (answer) {
+ // For inactive and sendrecv answers, generate lists as if we were to accept
+ // the offer's direction. See RFC 3264 Section 6.1.
+ case RtpTransceiverDirection::kSendRecv:
+ case RtpTransceiverDirection::kStopped:
+ case RtpTransceiverDirection::kInactive:
+ return GetAudioCodecsForOffer(
+ webrtc::RtpTransceiverDirectionReversed(offer));
+ case RtpTransceiverDirection::kSendOnly:
+ return audio_send_codecs_;
+ case RtpTransceiverDirection::kRecvOnly:
+ return audio_recv_codecs_;
+ }
+ RTC_CHECK_NOTREACHED();
+}
+
+const VideoCodecs& MediaSessionDescriptionFactory::GetVideoCodecsForOffer(
+ const RtpTransceiverDirection& direction) const {
+ switch (direction) {
+ // If stream is inactive - generate list as if sendrecv.
+ case RtpTransceiverDirection::kSendRecv:
+ case RtpTransceiverDirection::kStopped:
+ case RtpTransceiverDirection::kInactive:
+ return video_sendrecv_codecs_;
+ case RtpTransceiverDirection::kSendOnly:
+ return video_send_codecs_;
+ case RtpTransceiverDirection::kRecvOnly:
+ return video_recv_codecs_;
+ }
+ RTC_CHECK_NOTREACHED();
+}
+
+const VideoCodecs& MediaSessionDescriptionFactory::GetVideoCodecsForAnswer(
+ const RtpTransceiverDirection& offer,
+ const RtpTransceiverDirection& answer) const {
+ switch (answer) {
+ // For inactive and sendrecv answers, generate lists as if we were to accept
+ // the offer's direction. See RFC 3264 Section 6.1.
+ case RtpTransceiverDirection::kSendRecv:
+ case RtpTransceiverDirection::kStopped:
+ case RtpTransceiverDirection::kInactive:
+ return GetVideoCodecsForOffer(
+ webrtc::RtpTransceiverDirectionReversed(offer));
+ case RtpTransceiverDirection::kSendOnly:
+ return video_send_codecs_;
+ case RtpTransceiverDirection::kRecvOnly:
+ return video_recv_codecs_;
+ }
+ RTC_CHECK_NOTREACHED();
+}
+
+void MergeCodecsFromDescription(
+ const std::vector<const ContentInfo*>& current_active_contents,
+ AudioCodecs* audio_codecs,
+ VideoCodecs* video_codecs,
+ UsedPayloadTypes* used_pltypes,
+ const webrtc::FieldTrialsView* field_trials) {
+ for (const ContentInfo* content : current_active_contents) {
+ if (IsMediaContentOfType(content, MEDIA_TYPE_AUDIO)) {
+ MergeCodecs(content->media_description()->codecs(), audio_codecs,
+ used_pltypes, field_trials);
+ } else if (IsMediaContentOfType(content, MEDIA_TYPE_VIDEO)) {
+ MergeCodecs(content->media_description()->codecs(), video_codecs,
+ used_pltypes, field_trials);
+ }
+ }
+}
+
+// Getting codecs for an offer involves these steps:
+//
+// 1. Construct payload type -> codec mappings for current description.
+// 2. Add any reference codecs that weren't already present
+// 3. For each individual media description (m= section), filter codecs based
+// on the directional attribute (happens in another method).
+void MediaSessionDescriptionFactory::GetCodecsForOffer(
+ const std::vector<const ContentInfo*>& current_active_contents,
+ AudioCodecs* audio_codecs,
+ VideoCodecs* video_codecs) const {
+ const webrtc::FieldTrialsView* field_trials =
+ &transport_desc_factory_->trials();
+ // First - get all codecs from the current description if the media type
+ // is used. Add them to `used_pltypes` so the payload type is not reused if a
+ // new media type is added.
+ UsedPayloadTypes used_pltypes;
+ MergeCodecsFromDescription(current_active_contents, audio_codecs,
+ video_codecs, &used_pltypes, field_trials);
+
+ // Add our codecs that are not in the current description.
+ MergeCodecs(all_audio_codecs_, audio_codecs, &used_pltypes, field_trials);
+ MergeCodecs(all_video_codecs_, video_codecs, &used_pltypes, field_trials);
+}
+
+// Getting codecs for an answer involves these steps:
+//
+// 1. Construct payload type -> codec mappings for current description.
+// 2. Add any codecs from the offer that weren't already present.
+// 3. Add any remaining codecs that weren't already present.
+// 4. For each individual media description (m= section), filter codecs based
+// on the directional attribute (happens in another method).
+void MediaSessionDescriptionFactory::GetCodecsForAnswer(
+ const std::vector<const ContentInfo*>& current_active_contents,
+ const SessionDescription& remote_offer,
+ AudioCodecs* audio_codecs,
+ VideoCodecs* video_codecs) const {
+ const webrtc::FieldTrialsView* field_trials =
+ &transport_desc_factory_->trials();
+ // First - get all codecs from the current description if the media type
+ // is used. Add them to `used_pltypes` so the payload type is not reused if a
+ // new media type is added.
+ UsedPayloadTypes used_pltypes;
+ MergeCodecsFromDescription(current_active_contents, audio_codecs,
+ video_codecs, &used_pltypes, field_trials);
+
+ // Second - filter out codecs that we don't support at all and should ignore.
+ AudioCodecs filtered_offered_audio_codecs;
+ VideoCodecs filtered_offered_video_codecs;
+ for (const ContentInfo& content : remote_offer.contents()) {
+ if (IsMediaContentOfType(&content, MEDIA_TYPE_AUDIO)) {
+ std::vector<Codec> offered_codecs = content.media_description()->codecs();
+ for (const Codec& offered_audio_codec : offered_codecs) {
+ if (!FindMatchingCodec(offered_codecs, filtered_offered_audio_codecs,
+ offered_audio_codec, field_trials) &&
+ FindMatchingCodec(offered_codecs, all_audio_codecs_,
+ offered_audio_codec, field_trials)) {
+ filtered_offered_audio_codecs.push_back(offered_audio_codec);
+ }
+ }
+ } else if (IsMediaContentOfType(&content, MEDIA_TYPE_VIDEO)) {
+ std::vector<Codec> offered_codecs = content.media_description()->codecs();
+ for (const Codec& offered_video_codec : offered_codecs) {
+ if (!FindMatchingCodec(offered_codecs, filtered_offered_video_codecs,
+ offered_video_codec, field_trials) &&
+ FindMatchingCodec(offered_codecs, all_video_codecs_,
+ offered_video_codec, field_trials)) {
+ filtered_offered_video_codecs.push_back(offered_video_codec);
+ }
+ }
+ }
+ }
+
+ // Add codecs that are not in the current description but were in
+ // `remote_offer`.
+ MergeCodecs(filtered_offered_audio_codecs, audio_codecs, &used_pltypes,
+ field_trials);
+ MergeCodecs(filtered_offered_video_codecs, video_codecs, &used_pltypes,
+ field_trials);
+}
+
+MediaSessionDescriptionFactory::AudioVideoRtpHeaderExtensions
+MediaSessionDescriptionFactory::GetOfferedRtpHeaderExtensionsWithIds(
+ const std::vector<const ContentInfo*>& current_active_contents,
+ bool extmap_allow_mixed,
+ const std::vector<MediaDescriptionOptions>& media_description_options)
+ const {
+ // All header extensions allocated from the same range to avoid potential
+ // issues when using BUNDLE.
+
+ // Strictly speaking the SDP attribute extmap_allow_mixed signals that the
+ // receiver supports an RTP stream where one- and two-byte RTP header
+ // extensions are mixed. For backwards compatibility reasons it's used in
+ // WebRTC to signal that two-byte RTP header extensions are supported.
+ UsedRtpHeaderExtensionIds used_ids(
+ extmap_allow_mixed ? UsedRtpHeaderExtensionIds::IdDomain::kTwoByteAllowed
+ : UsedRtpHeaderExtensionIds::IdDomain::kOneByteOnly);
+ RtpHeaderExtensions all_regular_extensions;
+ RtpHeaderExtensions all_encrypted_extensions;
+
+ AudioVideoRtpHeaderExtensions offered_extensions;
+ // First - get all extensions from the current description if the media type
+ // is used.
+ // Add them to `used_ids` so the local ids are not reused if a new media
+ // type is added.
+ for (const ContentInfo* content : current_active_contents) {
+ if (IsMediaContentOfType(content, MEDIA_TYPE_AUDIO)) {
+ MergeRtpHdrExts(content->media_description()->rtp_header_extensions(),
+ &offered_extensions.audio, &all_regular_extensions,
+ &all_encrypted_extensions, &used_ids);
+ } else if (IsMediaContentOfType(content, MEDIA_TYPE_VIDEO)) {
+ MergeRtpHdrExts(content->media_description()->rtp_header_extensions(),
+ &offered_extensions.video, &all_regular_extensions,
+ &all_encrypted_extensions, &used_ids);
+ }
+ }
+
+ // Add all encountered header extensions in the media description options that
+ // are not in the current description.
+
+ for (const auto& entry : media_description_options) {
+ RtpHeaderExtensions filtered_extensions =
+ filtered_rtp_header_extensions(UnstoppedOrPresentRtpHeaderExtensions(
+ entry.header_extensions, all_regular_extensions,
+ all_encrypted_extensions));
+ if (entry.type == MEDIA_TYPE_AUDIO)
+ MergeRtpHdrExts(filtered_extensions, &offered_extensions.audio,
+ &all_regular_extensions, &all_encrypted_extensions,
+ &used_ids);
+ else if (entry.type == MEDIA_TYPE_VIDEO)
+ MergeRtpHdrExts(filtered_extensions, &offered_extensions.video,
+ &all_regular_extensions, &all_encrypted_extensions,
+ &used_ids);
+ }
+ // TODO(jbauch): Support adding encrypted header extensions to existing
+ // sessions.
+ if (enable_encrypted_rtp_header_extensions_ &&
+ current_active_contents.empty()) {
+ AddEncryptedVersionsOfHdrExts(&offered_extensions.audio,
+ &all_encrypted_extensions, &used_ids);
+ AddEncryptedVersionsOfHdrExts(&offered_extensions.video,
+ &all_encrypted_extensions, &used_ids);
+ }
+ return offered_extensions;
+}
+
+RTCError MediaSessionDescriptionFactory::AddTransportOffer(
+ const std::string& content_name,
+ const TransportOptions& transport_options,
+ const SessionDescription* current_desc,
+ SessionDescription* offer_desc,
+ IceCredentialsIterator* ice_credentials) const {
+ const TransportDescription* current_tdesc =
+ GetTransportDescription(content_name, current_desc);
+ std::unique_ptr<TransportDescription> new_tdesc(
+ transport_desc_factory_->CreateOffer(transport_options, current_tdesc,
+ ice_credentials));
+ if (!new_tdesc) {
+ RTC_LOG(LS_ERROR) << "Failed to AddTransportOffer, content name="
+ << content_name;
+ }
+ offer_desc->AddTransportInfo(TransportInfo(content_name, *new_tdesc));
+ return RTCError::OK();
+}
+
+std::unique_ptr<TransportDescription>
+MediaSessionDescriptionFactory::CreateTransportAnswer(
+ const std::string& content_name,
+ const SessionDescription* offer_desc,
+ const TransportOptions& transport_options,
+ const SessionDescription* current_desc,
+ bool require_transport_attributes,
+ IceCredentialsIterator* ice_credentials) const {
+ const TransportDescription* offer_tdesc =
+ GetTransportDescription(content_name, offer_desc);
+ const TransportDescription* current_tdesc =
+ GetTransportDescription(content_name, current_desc);
+ return transport_desc_factory_->CreateAnswer(offer_tdesc, transport_options,
+ require_transport_attributes,
+ current_tdesc, ice_credentials);
+}
+
+RTCError MediaSessionDescriptionFactory::AddTransportAnswer(
+ const std::string& content_name,
+ const TransportDescription& transport_desc,
+ SessionDescription* answer_desc) const {
+ answer_desc->AddTransportInfo(TransportInfo(content_name, transport_desc));
+ return RTCError::OK();
+}
+
+// `audio_codecs` = set of all possible codecs that can be used, with correct
+// payload type mappings
+//
+// `supported_audio_codecs` = set of codecs that are supported for the direction
+// of this m= section
+//
+// mcd->codecs() = set of previously negotiated codecs for this m= section
+//
+// The payload types should come from audio_codecs, but the order should come
+// from mcd->codecs() and then supported_codecs, to ensure that re-offers don't
+// change existing codec priority, and that new codecs are added with the right
+// priority.
+RTCError MediaSessionDescriptionFactory::AddAudioContentForOffer(
+ const MediaDescriptionOptions& media_description_options,
+ const MediaSessionOptions& session_options,
+ const ContentInfo* current_content,
+ const SessionDescription* current_description,
+ const RtpHeaderExtensions& audio_rtp_extensions,
+ const AudioCodecs& audio_codecs,
+ StreamParamsVec* current_streams,
+ SessionDescription* desc,
+ IceCredentialsIterator* ice_credentials) const {
+ const webrtc::FieldTrialsView* field_trials =
+ &transport_desc_factory_->trials();
+ // Filter audio_codecs (which includes all codecs, with correctly remapped
+ // payload types) based on transceiver direction.
+ const AudioCodecs& supported_audio_codecs =
+ GetAudioCodecsForOffer(media_description_options.direction);
+
+ AudioCodecs filtered_codecs;
+
+ if (!media_description_options.codec_preferences.empty()) {
+ // Add the codecs from the current transceiver's codec preferences.
+ // They override any existing codecs from previous negotiations.
+ filtered_codecs = MatchCodecPreference(
+ media_description_options.codec_preferences, audio_codecs,
+ supported_audio_codecs, field_trials);
+ } else {
+ // Add the codecs from current content if it exists and is not rejected nor
+ // recycled.
+ if (current_content && !current_content->rejected &&
+ current_content->name == media_description_options.mid) {
+ if (!IsMediaContentOfType(current_content, MEDIA_TYPE_AUDIO)) {
+ // Can happen if the remote side re-uses a MID while recycling.
+ LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR,
+ "Media type for content with mid='" +
+ current_content->name +
+ "' does not match previous type.");
+ }
+ const MediaContentDescription* mcd = current_content->media_description();
+ for (const Codec& codec : mcd->codecs()) {
+ if (FindMatchingCodec(mcd->codecs(), audio_codecs, codec,
+ field_trials)) {
+ filtered_codecs.push_back(codec);
+ }
+ }
+ }
+ // Add other supported audio codecs.
+ for (const Codec& codec : supported_audio_codecs) {
+ absl::optional<Codec> found_codec = FindMatchingCodec(
+ supported_audio_codecs, audio_codecs, codec, field_trials);
+ if (found_codec &&
+ !FindMatchingCodec(supported_audio_codecs, filtered_codecs, codec,
+ field_trials)) {
+ // Use the `found_codec` from `audio_codecs` because it has the
+ // correctly mapped payload type.
+ filtered_codecs.push_back(*found_codec);
+ }
+ }
+ }
+ if (!session_options.vad_enabled) {
+ // If application doesn't want CN codecs in offer.
+ StripCNCodecs(&filtered_codecs);
+ }
+
+ cricket::SecurePolicy sdes_policy =
+ IsDtlsActive(current_content, current_description) ? cricket::SEC_DISABLED
+ : secure();
+
+ auto audio = std::make_unique<AudioContentDescription>();
+ std::vector<std::string> crypto_suites;
+ GetSupportedAudioSdesCryptoSuiteNames(session_options.crypto_options,
+ &crypto_suites);
+ auto error = CreateMediaContentOffer(
+ media_description_options, session_options, filtered_codecs, sdes_policy,
+ GetCryptos(current_content), crypto_suites, audio_rtp_extensions,
+ ssrc_generator(), current_streams, audio.get(),
+ transport_desc_factory_->trials());
+ if (!error.ok()) {
+ return error;
+ }
+
+ bool secure_transport = (transport_desc_factory_->secure() != SEC_DISABLED);
+ SetMediaProtocol(secure_transport, audio.get());
+
+ audio->set_direction(media_description_options.direction);
+
+ desc->AddContent(media_description_options.mid, MediaProtocolType::kRtp,
+ media_description_options.stopped, std::move(audio));
+ error = AddTransportOffer(media_description_options.mid,
+ media_description_options.transport_options,
+ current_description, desc, ice_credentials);
+ if (!error.ok()) {
+ return error;
+ }
+
+ return RTCError::OK();
+}
+
+// TODO(kron): This function is very similar to AddAudioContentForOffer.
+// Refactor to reuse shared code.
+RTCError MediaSessionDescriptionFactory::AddVideoContentForOffer(
+ const MediaDescriptionOptions& media_description_options,
+ const MediaSessionOptions& session_options,
+ const ContentInfo* current_content,
+ const SessionDescription* current_description,
+ const RtpHeaderExtensions& video_rtp_extensions,
+ const VideoCodecs& video_codecs,
+ StreamParamsVec* current_streams,
+ SessionDescription* desc,
+ IceCredentialsIterator* ice_credentials) const {
+ const webrtc::FieldTrialsView* field_trials =
+ &transport_desc_factory_->trials();
+ // Filter video_codecs (which includes all codecs, with correctly remapped
+ // payload types) based on transceiver direction.
+ const VideoCodecs& supported_video_codecs =
+ GetVideoCodecsForOffer(media_description_options.direction);
+
+ VideoCodecs filtered_codecs;
+
+ if (!media_description_options.codec_preferences.empty()) {
+ // Add the codecs from the current transceiver's codec preferences.
+ // They override any existing codecs from previous negotiations.
+ filtered_codecs = MatchCodecPreference(
+ media_description_options.codec_preferences, video_codecs,
+ supported_video_codecs, field_trials);
+ } else {
+ // Add the codecs from current content if it exists and is not rejected nor
+ // recycled.
+ if (current_content && !current_content->rejected &&
+ current_content->name == media_description_options.mid) {
+ if (!IsMediaContentOfType(current_content, MEDIA_TYPE_VIDEO)) {
+ // Can happen if the remote side re-uses a MID while recycling.
+ LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR,
+ "Media type for content with mid='" +
+ current_content->name +
+ "' does not match previous type.");
+ }
+ const MediaContentDescription* mcd = current_content->media_description();
+ for (const Codec& codec : mcd->codecs()) {
+ if (FindMatchingCodec(mcd->codecs(), video_codecs, codec,
+ field_trials)) {
+ filtered_codecs.push_back(codec);
+ }
+ }
+ }
+ // Add other supported video codecs.
+ for (const Codec& codec : supported_video_codecs) {
+ absl::optional<Codec> found_codec = FindMatchingCodec(
+ supported_video_codecs, video_codecs, codec, field_trials);
+ if (found_codec &&
+ !FindMatchingCodec(supported_video_codecs, filtered_codecs, codec,
+ field_trials)) {
+ // Use the `found_codec` from `video_codecs` because it has the
+ // correctly mapped payload type.
+ if (IsRtxCodec(codec)) {
+ // For RTX we might need to adjust the apt parameter if we got a
+ // remote offer without RTX for a codec for which we support RTX.
+ auto referenced_codec =
+ GetAssociatedCodecForRtx(supported_video_codecs, codec);
+ RTC_DCHECK(referenced_codec);
+
+ // Find the codec we should be referencing and point to it.
+ absl::optional<Codec> changed_referenced_codec =
+ FindMatchingCodec(supported_video_codecs, filtered_codecs,
+ *referenced_codec, field_trials);
+ if (changed_referenced_codec) {
+ found_codec->SetParam(kCodecParamAssociatedPayloadType,
+ changed_referenced_codec->id);
+ }
+ }
+ filtered_codecs.push_back(*found_codec);
+ }
+ }
+ }
+
+ if (session_options.raw_packetization_for_video) {
+ for (Codec& codec : filtered_codecs) {
+ if (codec.IsMediaCodec()) {
+ codec.packetization = kPacketizationParamRaw;
+ }
+ }
+ }
+
+ cricket::SecurePolicy sdes_policy =
+ IsDtlsActive(current_content, current_description) ? cricket::SEC_DISABLED
+ : secure();
+ auto video = std::make_unique<VideoContentDescription>();
+ std::vector<std::string> crypto_suites;
+ GetSupportedVideoSdesCryptoSuiteNames(session_options.crypto_options,
+ &crypto_suites);
+ auto error = CreateMediaContentOffer(
+ media_description_options, session_options, filtered_codecs, sdes_policy,
+ GetCryptos(current_content), crypto_suites, video_rtp_extensions,
+ ssrc_generator(), current_streams, video.get(),
+ transport_desc_factory_->trials());
+ if (!error.ok()) {
+ return error;
+ }
+
+ video->set_bandwidth(kAutoBandwidth);
+
+ bool secure_transport = (transport_desc_factory_->secure() != SEC_DISABLED);
+ SetMediaProtocol(secure_transport, video.get());
+
+ video->set_direction(media_description_options.direction);
+
+ desc->AddContent(media_description_options.mid, MediaProtocolType::kRtp,
+ media_description_options.stopped, std::move(video));
+ return AddTransportOffer(media_description_options.mid,
+ media_description_options.transport_options,
+ current_description, desc, ice_credentials);
+}
+
+RTCError MediaSessionDescriptionFactory::AddDataContentForOffer(
+ const MediaDescriptionOptions& media_description_options,
+ const MediaSessionOptions& session_options,
+ const ContentInfo* current_content,
+ const SessionDescription* current_description,
+ StreamParamsVec* current_streams,
+ SessionDescription* desc,
+ IceCredentialsIterator* ice_credentials) const {
+ auto data = std::make_unique<SctpDataContentDescription>();
+
+ bool secure_transport = (transport_desc_factory_->secure() != SEC_DISABLED);
+
+ cricket::SecurePolicy sdes_policy =
+ IsDtlsActive(current_content, current_description) ? cricket::SEC_DISABLED
+ : secure();
+ std::vector<std::string> crypto_suites;
+ // SDES doesn't make sense for SCTP, so we disable it, and we only
+ // get SDES crypto suites for RTP-based data channels.
+ sdes_policy = cricket::SEC_DISABLED;
+ // Unlike SetMediaProtocol below, we need to set the protocol
+ // before we call CreateMediaContentOffer. Otherwise,
+ // CreateMediaContentOffer won't know this is SCTP and will
+ // generate SSRCs rather than SIDs.
+ data->set_protocol(secure_transport ? kMediaProtocolUdpDtlsSctp
+ : kMediaProtocolSctp);
+ data->set_use_sctpmap(session_options.use_obsolete_sctp_sdp);
+ data->set_max_message_size(kSctpSendBufferSize);
+
+ auto error = CreateContentOffer(
+ media_description_options, session_options, sdes_policy,
+ GetCryptos(current_content), crypto_suites, RtpHeaderExtensions(),
+ ssrc_generator(), current_streams, data.get());
+ if (!error.ok()) {
+ return error;
+ }
+
+ desc->AddContent(media_description_options.mid, MediaProtocolType::kSctp,
+ media_description_options.stopped, std::move(data));
+ return AddTransportOffer(media_description_options.mid,
+ media_description_options.transport_options,
+ current_description, desc, ice_credentials);
+}
+
+RTCError MediaSessionDescriptionFactory::AddUnsupportedContentForOffer(
+ const MediaDescriptionOptions& media_description_options,
+ const MediaSessionOptions& session_options,
+ const ContentInfo* current_content,
+ const SessionDescription* current_description,
+ SessionDescription* desc,
+ IceCredentialsIterator* ice_credentials) const {
+ RTC_CHECK(IsMediaContentOfType(current_content, MEDIA_TYPE_UNSUPPORTED));
+
+ const UnsupportedContentDescription* current_unsupported_description =
+ current_content->media_description()->as_unsupported();
+ auto unsupported = std::make_unique<UnsupportedContentDescription>(
+ current_unsupported_description->media_type());
+ unsupported->set_protocol(current_content->media_description()->protocol());
+ desc->AddContent(media_description_options.mid, MediaProtocolType::kOther,
+ /*rejected=*/true, std::move(unsupported));
+
+ return AddTransportOffer(media_description_options.mid,
+ media_description_options.transport_options,
+ current_description, desc, ice_credentials);
+}
+
+// `audio_codecs` = set of all possible codecs that can be used, with correct
+// payload type mappings
+//
+// `supported_audio_codecs` = set of codecs that are supported for the direction
+// of this m= section
+//
+// mcd->codecs() = set of previously negotiated codecs for this m= section
+//
+// The payload types should come from audio_codecs, but the order should come
+// from mcd->codecs() and then supported_codecs, to ensure that re-offers don't
+// change existing codec priority, and that new codecs are added with the right
+// priority.
+RTCError MediaSessionDescriptionFactory::AddAudioContentForAnswer(
+ const MediaDescriptionOptions& media_description_options,
+ const MediaSessionOptions& session_options,
+ const ContentInfo* offer_content,
+ const SessionDescription* offer_description,
+ const ContentInfo* current_content,
+ const SessionDescription* current_description,
+ const TransportInfo* bundle_transport,
+ const AudioCodecs& audio_codecs,
+ const RtpHeaderExtensions& rtp_header_extensions,
+ StreamParamsVec* current_streams,
+ SessionDescription* answer,
+ IceCredentialsIterator* ice_credentials) const {
+ const webrtc::FieldTrialsView* field_trials =
+ &transport_desc_factory_->trials();
+ RTC_CHECK(IsMediaContentOfType(offer_content, MEDIA_TYPE_AUDIO));
+ const AudioContentDescription* offer_audio_description =
+ offer_content->media_description()->as_audio();
+
+ std::unique_ptr<TransportDescription> audio_transport = CreateTransportAnswer(
+ media_description_options.mid, offer_description,
+ media_description_options.transport_options, current_description,
+ bundle_transport != nullptr, ice_credentials);
+ if (!audio_transport) {
+ LOG_AND_RETURN_ERROR(
+ RTCErrorType::INTERNAL_ERROR,
+ "Failed to create transport answer, audio transport is missing");
+ }
+
+ // Pick codecs based on the requested communications direction in the offer
+ // and the selected direction in the answer.
+ // Note these will be filtered one final time in CreateMediaContentAnswer.
+ auto wants_rtd = media_description_options.direction;
+ auto offer_rtd = offer_audio_description->direction();
+ auto answer_rtd = NegotiateRtpTransceiverDirection(offer_rtd, wants_rtd);
+ AudioCodecs supported_audio_codecs =
+ GetAudioCodecsForAnswer(offer_rtd, answer_rtd);
+
+ AudioCodecs filtered_codecs;
+
+ if (!media_description_options.codec_preferences.empty()) {
+ filtered_codecs = MatchCodecPreference(
+ media_description_options.codec_preferences, audio_codecs,
+ supported_audio_codecs, field_trials);
+ } else {
+ // Add the codecs from current content if it exists and is not rejected nor
+ // recycled.
+ if (current_content && !current_content->rejected &&
+ current_content->name == media_description_options.mid) {
+ if (!IsMediaContentOfType(current_content, MEDIA_TYPE_AUDIO)) {
+ // Can happen if the remote side re-uses a MID while recycling.
+ LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR,
+ "Media type for content with mid='" +
+ current_content->name +
+ "' does not match previous type.");
+ }
+ const MediaContentDescription* mcd = current_content->media_description();
+ for (const Codec& codec : mcd->codecs()) {
+ if (FindMatchingCodec(mcd->codecs(), audio_codecs, codec,
+ field_trials)) {
+ filtered_codecs.push_back(codec);
+ }
+ }
+ }
+ // Add other supported audio codecs.
+ for (const Codec& codec : supported_audio_codecs) {
+ if (FindMatchingCodec(supported_audio_codecs, audio_codecs, codec,
+ field_trials) &&
+ !FindMatchingCodec(supported_audio_codecs, filtered_codecs, codec,
+ field_trials)) {
+ // We should use the local codec with local parameters and the codec id
+ // would be correctly mapped in `NegotiateCodecs`.
+ filtered_codecs.push_back(codec);
+ }
+ }
+ }
+ if (!session_options.vad_enabled) {
+ // If application doesn't want CN codecs in answer.
+ StripCNCodecs(&filtered_codecs);
+ }
+
+ // Determine if we have media codecs in common.
+ bool has_common_media_codecs =
+ std::find_if(filtered_codecs.begin(), filtered_codecs.end(),
+ [](const AudioCodec& c) {
+ return !(IsRedCodec(c) || IsComfortNoiseCodec(c));
+ }) != filtered_codecs.end();
+
+ bool bundle_enabled = offer_description->HasGroup(GROUP_TYPE_BUNDLE) &&
+ session_options.bundle_enabled;
+ auto audio_answer = std::make_unique<AudioContentDescription>();
+ // Do not require or create SDES cryptos if DTLS is used.
+ cricket::SecurePolicy sdes_policy =
+ audio_transport->secure() ? cricket::SEC_DISABLED : secure();
+ if (!SetCodecsInAnswer(offer_audio_description, filtered_codecs,
+ media_description_options, session_options,
+ ssrc_generator(), current_streams, audio_answer.get(),
+ transport_desc_factory_->trials())) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR,
+ "Failed to set codecs in answer");
+ }
+ if (!CreateMediaContentAnswer(
+ offer_audio_description, media_description_options, session_options,
+ sdes_policy, GetCryptos(current_content),
+ filtered_rtp_header_extensions(rtp_header_extensions),
+ ssrc_generator(), enable_encrypted_rtp_header_extensions_,
+ current_streams, bundle_enabled, audio_answer.get())) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR,
+ "Failed to create answer");
+ }
+
+ bool secure = bundle_transport ? bundle_transport->description.secure()
+ : audio_transport->secure();
+ bool rejected = media_description_options.stopped ||
+ offer_content->rejected || !has_common_media_codecs ||
+ !IsMediaProtocolSupported(MEDIA_TYPE_AUDIO,
+ audio_answer->protocol(), secure);
+ auto error = AddTransportAnswer(media_description_options.mid,
+ *(audio_transport.get()), answer);
+ if (!error.ok()) {
+ return error;
+ }
+
+ if (rejected) {
+ RTC_LOG(LS_INFO) << "Audio m= section '" << media_description_options.mid
+ << "' being rejected in answer.";
+ }
+
+ answer->AddContent(media_description_options.mid, offer_content->type,
+ rejected, std::move(audio_answer));
+ return RTCError::OK();
+}
+
+// TODO(kron): This function is very similar to AddAudioContentForAnswer.
+// Refactor to reuse shared code.
+RTCError MediaSessionDescriptionFactory::AddVideoContentForAnswer(
+ const MediaDescriptionOptions& media_description_options,
+ const MediaSessionOptions& session_options,
+ const ContentInfo* offer_content,
+ const SessionDescription* offer_description,
+ const ContentInfo* current_content,
+ const SessionDescription* current_description,
+ const TransportInfo* bundle_transport,
+ const VideoCodecs& video_codecs,
+ const RtpHeaderExtensions& default_video_rtp_header_extensions,
+ StreamParamsVec* current_streams,
+ SessionDescription* answer,
+ IceCredentialsIterator* ice_credentials) const {
+ const webrtc::FieldTrialsView* field_trials =
+ &transport_desc_factory_->trials();
+ RTC_CHECK(IsMediaContentOfType(offer_content, MEDIA_TYPE_VIDEO));
+ const VideoContentDescription* offer_video_description =
+ offer_content->media_description()->as_video();
+
+ std::unique_ptr<TransportDescription> video_transport = CreateTransportAnswer(
+ media_description_options.mid, offer_description,
+ media_description_options.transport_options, current_description,
+ bundle_transport != nullptr, ice_credentials);
+ if (!video_transport) {
+ LOG_AND_RETURN_ERROR(
+ RTCErrorType::INTERNAL_ERROR,
+ "Failed to create transport answer, video transport is missing");
+ }
+
+ // Pick codecs based on the requested communications direction in the offer
+ // and the selected direction in the answer.
+ // Note these will be filtered one final time in CreateMediaContentAnswer.
+ auto wants_rtd = media_description_options.direction;
+ auto offer_rtd = offer_video_description->direction();
+ auto answer_rtd = NegotiateRtpTransceiverDirection(offer_rtd, wants_rtd);
+ VideoCodecs supported_video_codecs =
+ GetVideoCodecsForAnswer(offer_rtd, answer_rtd);
+
+ VideoCodecs filtered_codecs;
+
+ if (!media_description_options.codec_preferences.empty()) {
+ filtered_codecs = MatchCodecPreference(
+ media_description_options.codec_preferences, video_codecs,
+ supported_video_codecs, field_trials);
+ } else {
+ // Add the codecs from current content if it exists and is not rejected nor
+ // recycled.
+ if (current_content && !current_content->rejected &&
+ current_content->name == media_description_options.mid) {
+ if (!IsMediaContentOfType(current_content, MEDIA_TYPE_VIDEO)) {
+ // Can happen if the remote side re-uses a MID while recycling.
+ LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR,
+ "Media type for content with mid='" +
+ current_content->name +
+ "' does not match previous type.");
+ }
+ const MediaContentDescription* mcd = current_content->media_description();
+ for (const Codec& codec : mcd->codecs()) {
+ if (FindMatchingCodec(mcd->codecs(), video_codecs, codec,
+ field_trials)) {
+ filtered_codecs.push_back(codec);
+ }
+ }
+ }
+
+ // Add other supported video codecs.
+ VideoCodecs other_video_codecs;
+ for (const Codec& codec : supported_video_codecs) {
+ if (FindMatchingCodec(supported_video_codecs, video_codecs, codec,
+ field_trials) &&
+ !FindMatchingCodec(supported_video_codecs, filtered_codecs, codec,
+ field_trials)) {
+ // We should use the local codec with local parameters and the codec id
+ // would be correctly mapped in `NegotiateCodecs`.
+ other_video_codecs.push_back(codec);
+ }
+ }
+
+ // Use ComputeCodecsUnion to avoid having duplicate payload IDs
+ filtered_codecs =
+ ComputeCodecsUnion(filtered_codecs, other_video_codecs, field_trials);
+ }
+ // Determine if we have media codecs in common.
+ bool has_common_media_codecs =
+ std::find_if(
+ filtered_codecs.begin(), filtered_codecs.end(), [](const Codec& c) {
+ return !(IsRedCodec(c) || IsUlpfecCodec(c) || IsFlexfecCodec(c));
+ }) != filtered_codecs.end();
+
+ if (session_options.raw_packetization_for_video) {
+ for (Codec& codec : filtered_codecs) {
+ if (codec.IsMediaCodec()) {
+ codec.packetization = kPacketizationParamRaw;
+ }
+ }
+ }
+
+ bool bundle_enabled = offer_description->HasGroup(GROUP_TYPE_BUNDLE) &&
+ session_options.bundle_enabled;
+ auto video_answer = std::make_unique<VideoContentDescription>();
+ // Do not require or create SDES cryptos if DTLS is used.
+ cricket::SecurePolicy sdes_policy =
+ video_transport->secure() ? cricket::SEC_DISABLED : secure();
+ if (!SetCodecsInAnswer(offer_video_description, filtered_codecs,
+ media_description_options, session_options,
+ ssrc_generator(), current_streams, video_answer.get(),
+ transport_desc_factory_->trials())) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR,
+ "Failed to set codecs in answer");
+ }
+ if (!CreateMediaContentAnswer(
+ offer_video_description, media_description_options, session_options,
+ sdes_policy, GetCryptos(current_content),
+ filtered_rtp_header_extensions(default_video_rtp_header_extensions),
+ ssrc_generator(), enable_encrypted_rtp_header_extensions_,
+ current_streams, bundle_enabled, video_answer.get())) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR,
+ "Failed to create answer");
+ }
+ bool secure = bundle_transport ? bundle_transport->description.secure()
+ : video_transport->secure();
+ bool rejected = media_description_options.stopped ||
+ offer_content->rejected || !has_common_media_codecs ||
+ !IsMediaProtocolSupported(MEDIA_TYPE_VIDEO,
+ video_answer->protocol(), secure);
+ auto error = AddTransportAnswer(media_description_options.mid,
+ *(video_transport.get()), answer);
+ if (!error.ok()) {
+ return error;
+ }
+
+ if (!rejected) {
+ video_answer->set_bandwidth(kAutoBandwidth);
+ } else {
+ RTC_LOG(LS_INFO) << "Video m= section '" << media_description_options.mid
+ << "' being rejected in answer.";
+ }
+ answer->AddContent(media_description_options.mid, offer_content->type,
+ rejected, std::move(video_answer));
+ return RTCError::OK();
+}
+
+RTCError MediaSessionDescriptionFactory::AddDataContentForAnswer(
+ const MediaDescriptionOptions& media_description_options,
+ const MediaSessionOptions& session_options,
+ const ContentInfo* offer_content,
+ const SessionDescription* offer_description,
+ const ContentInfo* current_content,
+ const SessionDescription* current_description,
+ const TransportInfo* bundle_transport,
+ StreamParamsVec* current_streams,
+ SessionDescription* answer,
+ IceCredentialsIterator* ice_credentials) const {
+ std::unique_ptr<TransportDescription> data_transport = CreateTransportAnswer(
+ media_description_options.mid, offer_description,
+ media_description_options.transport_options, current_description,
+ bundle_transport != nullptr, ice_credentials);
+ if (!data_transport) {
+ LOG_AND_RETURN_ERROR(
+ RTCErrorType::INTERNAL_ERROR,
+ "Failed to create transport answer, data transport is missing");
+ }
+
+ // Do not require or create SDES cryptos if DTLS is used.
+ cricket::SecurePolicy sdes_policy =
+ data_transport->secure() ? cricket::SEC_DISABLED : secure();
+ bool bundle_enabled = offer_description->HasGroup(GROUP_TYPE_BUNDLE) &&
+ session_options.bundle_enabled;
+ RTC_CHECK(IsMediaContentOfType(offer_content, MEDIA_TYPE_DATA));
+ std::unique_ptr<MediaContentDescription> data_answer;
+ if (offer_content->media_description()->as_sctp()) {
+ // SCTP data content
+ data_answer = std::make_unique<SctpDataContentDescription>();
+ const SctpDataContentDescription* offer_data_description =
+ offer_content->media_description()->as_sctp();
+ // Respond with the offerer's proto, whatever it is.
+ data_answer->as_sctp()->set_protocol(offer_data_description->protocol());
+ // Respond with our max message size or the remote max messsage size,
+ // whichever is smaller.
+ // 0 is treated specially - it means "I can accept any size". Since
+ // we do not implement infinite size messages, reply with
+ // kSctpSendBufferSize.
+ if (offer_data_description->max_message_size() == 0) {
+ data_answer->as_sctp()->set_max_message_size(kSctpSendBufferSize);
+ } else {
+ data_answer->as_sctp()->set_max_message_size(std::min(
+ offer_data_description->max_message_size(), kSctpSendBufferSize));
+ }
+ if (!CreateMediaContentAnswer(
+ offer_data_description, media_description_options, session_options,
+ sdes_policy, GetCryptos(current_content), RtpHeaderExtensions(),
+ ssrc_generator(), enable_encrypted_rtp_header_extensions_,
+ current_streams, bundle_enabled, data_answer.get())) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR,
+ "Failed to create answer");
+ }
+ // Respond with sctpmap if the offer uses sctpmap.
+ bool offer_uses_sctpmap = offer_data_description->use_sctpmap();
+ data_answer->as_sctp()->set_use_sctpmap(offer_uses_sctpmap);
+ } else {
+ RTC_DCHECK_NOTREACHED() << "Non-SCTP data content found";
+ }
+
+ bool secure = bundle_transport ? bundle_transport->description.secure()
+ : data_transport->secure();
+
+ bool rejected = media_description_options.stopped ||
+ offer_content->rejected ||
+ !IsMediaProtocolSupported(MEDIA_TYPE_DATA,
+ data_answer->protocol(), secure);
+ auto error = AddTransportAnswer(media_description_options.mid,
+ *(data_transport.get()), answer);
+ if (!error.ok()) {
+ return error;
+ }
+ answer->AddContent(media_description_options.mid, offer_content->type,
+ rejected, std::move(data_answer));
+ return RTCError::OK();
+}
+
+RTCError MediaSessionDescriptionFactory::AddUnsupportedContentForAnswer(
+ const MediaDescriptionOptions& media_description_options,
+ const MediaSessionOptions& session_options,
+ const ContentInfo* offer_content,
+ const SessionDescription* offer_description,
+ const ContentInfo* current_content,
+ const SessionDescription* current_description,
+ const TransportInfo* bundle_transport,
+ SessionDescription* answer,
+ IceCredentialsIterator* ice_credentials) const {
+ std::unique_ptr<TransportDescription> unsupported_transport =
+ CreateTransportAnswer(media_description_options.mid, offer_description,
+ media_description_options.transport_options,
+ current_description, bundle_transport != nullptr,
+ ice_credentials);
+ if (!unsupported_transport) {
+ LOG_AND_RETURN_ERROR(
+ RTCErrorType::INTERNAL_ERROR,
+ "Failed to create transport answer, unsupported transport is missing");
+ }
+ RTC_CHECK(IsMediaContentOfType(offer_content, MEDIA_TYPE_UNSUPPORTED));
+
+ const UnsupportedContentDescription* offer_unsupported_description =
+ offer_content->media_description()->as_unsupported();
+ std::unique_ptr<MediaContentDescription> unsupported_answer =
+ std::make_unique<UnsupportedContentDescription>(
+ offer_unsupported_description->media_type());
+ unsupported_answer->set_protocol(offer_unsupported_description->protocol());
+
+ auto error = AddTransportAnswer(media_description_options.mid,
+ *(unsupported_transport.get()), answer);
+ if (!error.ok()) {
+ return error;
+ }
+
+ answer->AddContent(media_description_options.mid, offer_content->type,
+ /*rejected=*/true, std::move(unsupported_answer));
+ return RTCError::OK();
+}
+
+void MediaSessionDescriptionFactory::ComputeAudioCodecsIntersectionAndUnion() {
+ const webrtc::FieldTrialsView* field_trials =
+ &transport_desc_factory_->trials();
+ audio_sendrecv_codecs_.clear();
+ all_audio_codecs_.clear();
+ // Compute the audio codecs union.
+ for (const Codec& send : audio_send_codecs_) {
+ all_audio_codecs_.push_back(send);
+ if (!FindMatchingCodec(audio_send_codecs_, audio_recv_codecs_, send,
+ field_trials)) {
+ // It doesn't make sense to have an RTX codec we support sending but not
+ // receiving.
+ RTC_DCHECK(!IsRtxCodec(send));
+ }
+ }
+ for (const Codec& recv : audio_recv_codecs_) {
+ if (!FindMatchingCodec(audio_recv_codecs_, audio_send_codecs_, recv,
+ field_trials)) {
+ all_audio_codecs_.push_back(recv);
+ }
+ }
+ // Use NegotiateCodecs to merge our codec lists, since the operation is
+ // essentially the same. Put send_codecs as the offered_codecs, which is the
+ // order we'd like to follow. The reasoning is that encoding is usually more
+ // expensive than decoding, and prioritizing a codec in the send list probably
+ // means it's a codec we can handle efficiently.
+ NegotiateCodecs(audio_recv_codecs_, audio_send_codecs_,
+ &audio_sendrecv_codecs_, true, field_trials);
+}
+
+void MediaSessionDescriptionFactory::ComputeVideoCodecsIntersectionAndUnion() {
+ const webrtc::FieldTrialsView* field_trials =
+ &transport_desc_factory_->trials();
+ video_sendrecv_codecs_.clear();
+
+ // Use ComputeCodecsUnion to avoid having duplicate payload IDs
+ all_video_codecs_ =
+ ComputeCodecsUnion(video_recv_codecs_, video_send_codecs_, field_trials);
+
+ // Use NegotiateCodecs to merge our codec lists, since the operation is
+ // essentially the same. Put send_codecs as the offered_codecs, which is the
+ // order we'd like to follow. The reasoning is that encoding is usually more
+ // expensive than decoding, and prioritizing a codec in the send list probably
+ // means it's a codec we can handle efficiently.
+ NegotiateCodecs(video_recv_codecs_, video_send_codecs_,
+ &video_sendrecv_codecs_, true, field_trials);
+}
+
+bool IsMediaContent(const ContentInfo* content) {
+ return (content && (content->type == MediaProtocolType::kRtp ||
+ content->type == MediaProtocolType::kSctp));
+}
+
+bool IsAudioContent(const ContentInfo* content) {
+ return IsMediaContentOfType(content, MEDIA_TYPE_AUDIO);
+}
+
+bool IsVideoContent(const ContentInfo* content) {
+ return IsMediaContentOfType(content, MEDIA_TYPE_VIDEO);
+}
+
+bool IsDataContent(const ContentInfo* content) {
+ return IsMediaContentOfType(content, MEDIA_TYPE_DATA);
+}
+
+bool IsUnsupportedContent(const ContentInfo* content) {
+ return IsMediaContentOfType(content, MEDIA_TYPE_UNSUPPORTED);
+}
+
+const ContentInfo* GetFirstMediaContent(const ContentInfos& contents,
+ MediaType media_type) {
+ for (const ContentInfo& content : contents) {
+ if (IsMediaContentOfType(&content, media_type)) {
+ return &content;
+ }
+ }
+ return nullptr;
+}
+
+const ContentInfo* GetFirstAudioContent(const ContentInfos& contents) {
+ return GetFirstMediaContent(contents, MEDIA_TYPE_AUDIO);
+}
+
+const ContentInfo* GetFirstVideoContent(const ContentInfos& contents) {
+ return GetFirstMediaContent(contents, MEDIA_TYPE_VIDEO);
+}
+
+const ContentInfo* GetFirstDataContent(const ContentInfos& contents) {
+ return GetFirstMediaContent(contents, MEDIA_TYPE_DATA);
+}
+
+const ContentInfo* GetFirstMediaContent(const SessionDescription* sdesc,
+ MediaType media_type) {
+ if (sdesc == nullptr) {
+ return nullptr;
+ }
+
+ return GetFirstMediaContent(sdesc->contents(), media_type);
+}
+
+const ContentInfo* GetFirstAudioContent(const SessionDescription* sdesc) {
+ return GetFirstMediaContent(sdesc, MEDIA_TYPE_AUDIO);
+}
+
+const ContentInfo* GetFirstVideoContent(const SessionDescription* sdesc) {
+ return GetFirstMediaContent(sdesc, MEDIA_TYPE_VIDEO);
+}
+
+const ContentInfo* GetFirstDataContent(const SessionDescription* sdesc) {
+ return GetFirstMediaContent(sdesc, MEDIA_TYPE_DATA);
+}
+
+const MediaContentDescription* GetFirstMediaContentDescription(
+ const SessionDescription* sdesc,
+ MediaType media_type) {
+ const ContentInfo* content = GetFirstMediaContent(sdesc, media_type);
+ return (content ? content->media_description() : nullptr);
+}
+
+const AudioContentDescription* GetFirstAudioContentDescription(
+ const SessionDescription* sdesc) {
+ auto desc = GetFirstMediaContentDescription(sdesc, MEDIA_TYPE_AUDIO);
+ return desc ? desc->as_audio() : nullptr;
+}
+
+const VideoContentDescription* GetFirstVideoContentDescription(
+ const SessionDescription* sdesc) {
+ auto desc = GetFirstMediaContentDescription(sdesc, MEDIA_TYPE_VIDEO);
+ return desc ? desc->as_video() : nullptr;
+}
+
+const SctpDataContentDescription* GetFirstSctpDataContentDescription(
+ const SessionDescription* sdesc) {
+ auto desc = GetFirstMediaContentDescription(sdesc, MEDIA_TYPE_DATA);
+ return desc ? desc->as_sctp() : nullptr;
+}
+
+//
+// Non-const versions of the above functions.
+//
+
+ContentInfo* GetFirstMediaContent(ContentInfos* contents,
+ MediaType media_type) {
+ for (ContentInfo& content : *contents) {
+ if (IsMediaContentOfType(&content, media_type)) {
+ return &content;
+ }
+ }
+ return nullptr;
+}
+
+ContentInfo* GetFirstAudioContent(ContentInfos* contents) {
+ return GetFirstMediaContent(contents, MEDIA_TYPE_AUDIO);
+}
+
+ContentInfo* GetFirstVideoContent(ContentInfos* contents) {
+ return GetFirstMediaContent(contents, MEDIA_TYPE_VIDEO);
+}
+
+ContentInfo* GetFirstDataContent(ContentInfos* contents) {
+ return GetFirstMediaContent(contents, MEDIA_TYPE_DATA);
+}
+
+ContentInfo* GetFirstMediaContent(SessionDescription* sdesc,
+ MediaType media_type) {
+ if (sdesc == nullptr) {
+ return nullptr;
+ }
+
+ return GetFirstMediaContent(&sdesc->contents(), media_type);
+}
+
+ContentInfo* GetFirstAudioContent(SessionDescription* sdesc) {
+ return GetFirstMediaContent(sdesc, MEDIA_TYPE_AUDIO);
+}
+
+ContentInfo* GetFirstVideoContent(SessionDescription* sdesc) {
+ return GetFirstMediaContent(sdesc, MEDIA_TYPE_VIDEO);
+}
+
+ContentInfo* GetFirstDataContent(SessionDescription* sdesc) {
+ return GetFirstMediaContent(sdesc, MEDIA_TYPE_DATA);
+}
+
+MediaContentDescription* GetFirstMediaContentDescription(
+ SessionDescription* sdesc,
+ MediaType media_type) {
+ ContentInfo* content = GetFirstMediaContent(sdesc, media_type);
+ return (content ? content->media_description() : nullptr);
+}
+
+AudioContentDescription* GetFirstAudioContentDescription(
+ SessionDescription* sdesc) {
+ auto desc = GetFirstMediaContentDescription(sdesc, MEDIA_TYPE_AUDIO);
+ return desc ? desc->as_audio() : nullptr;
+}
+
+VideoContentDescription* GetFirstVideoContentDescription(
+ SessionDescription* sdesc) {
+ auto desc = GetFirstMediaContentDescription(sdesc, MEDIA_TYPE_VIDEO);
+ return desc ? desc->as_video() : nullptr;
+}
+
+SctpDataContentDescription* GetFirstSctpDataContentDescription(
+ SessionDescription* sdesc) {
+ auto desc = GetFirstMediaContentDescription(sdesc, MEDIA_TYPE_DATA);
+ return desc ? desc->as_sctp() : nullptr;
+}
+
+} // namespace cricket
diff --git a/third_party/libwebrtc/pc/media_session.h b/third_party/libwebrtc/pc/media_session.h
new file mode 100644
index 0000000000..3100fb6fdb
--- /dev/null
+++ b/third_party/libwebrtc/pc/media_session.h
@@ -0,0 +1,400 @@
+/*
+ * Copyright 2004 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Types and classes used in media session descriptions.
+
+#ifndef PC_MEDIA_SESSION_H_
+#define PC_MEDIA_SESSION_H_
+
+#include <map>
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "api/crypto/crypto_options.h"
+#include "api/field_trials_view.h"
+#include "api/media_types.h"
+#include "api/rtp_parameters.h"
+#include "api/rtp_transceiver_direction.h"
+#include "media/base/media_constants.h"
+#include "media/base/rid_description.h"
+#include "media/base/stream_params.h"
+#include "p2p/base/ice_credentials_iterator.h"
+#include "p2p/base/transport_description.h"
+#include "p2p/base/transport_description_factory.h"
+#include "p2p/base/transport_info.h"
+#include "pc/jsep_transport.h"
+#include "pc/media_protocol_names.h"
+#include "pc/session_description.h"
+#include "pc/simulcast_description.h"
+#include "rtc_base/memory/always_valid_pointer.h"
+#include "rtc_base/unique_id_generator.h"
+
+namespace webrtc {
+
+// Forward declaration due to circular dependecy.
+class ConnectionContext;
+
+} // namespace webrtc
+
+namespace cricket {
+
+class MediaEngineInterface;
+
+// Default RTCP CNAME for unit tests.
+const char kDefaultRtcpCname[] = "DefaultRtcpCname";
+
+// Options for an RtpSender contained with an media description/"m=" section.
+// Note: Spec-compliant Simulcast and legacy simulcast are mutually exclusive.
+struct SenderOptions {
+ std::string track_id;
+ std::vector<std::string> stream_ids;
+ // Use RIDs and Simulcast Layers to indicate spec-compliant Simulcast.
+ std::vector<RidDescription> rids;
+ SimulcastLayerList simulcast_layers;
+ // Use `num_sim_layers` to indicate legacy simulcast.
+ int num_sim_layers;
+};
+
+// Options for an individual media description/"m=" section.
+struct MediaDescriptionOptions {
+ MediaDescriptionOptions(MediaType type,
+ const std::string& mid,
+ webrtc::RtpTransceiverDirection direction,
+ bool stopped)
+ : type(type), mid(mid), direction(direction), stopped(stopped) {}
+
+ // TODO(deadbeef): When we don't support Plan B, there will only be one
+ // sender per media description and this can be simplified.
+ void AddAudioSender(const std::string& track_id,
+ const std::vector<std::string>& stream_ids);
+ void AddVideoSender(const std::string& track_id,
+ const std::vector<std::string>& stream_ids,
+ const std::vector<RidDescription>& rids,
+ const SimulcastLayerList& simulcast_layers,
+ int num_sim_layers);
+
+ MediaType type;
+ std::string mid;
+ webrtc::RtpTransceiverDirection direction;
+ bool stopped;
+ TransportOptions transport_options;
+ // Note: There's no equivalent "RtpReceiverOptions" because only send
+ // stream information goes in the local descriptions.
+ std::vector<SenderOptions> sender_options;
+ std::vector<webrtc::RtpCodecCapability> codec_preferences;
+ std::vector<webrtc::RtpHeaderExtensionCapability> header_extensions;
+
+ private:
+ // Doesn't DCHECK on `type`.
+ void AddSenderInternal(const std::string& track_id,
+ const std::vector<std::string>& stream_ids,
+ const std::vector<RidDescription>& rids,
+ const SimulcastLayerList& simulcast_layers,
+ int num_sim_layers);
+};
+
+// Provides a mechanism for describing how m= sections should be generated.
+// The m= section with index X will use media_description_options[X]. There
+// must be an option for each existing section if creating an answer, or a
+// subsequent offer.
+struct MediaSessionOptions {
+ MediaSessionOptions() {}
+
+ bool has_audio() const { return HasMediaDescription(MEDIA_TYPE_AUDIO); }
+ bool has_video() const { return HasMediaDescription(MEDIA_TYPE_VIDEO); }
+ bool has_data() const { return HasMediaDescription(MEDIA_TYPE_DATA); }
+
+ bool HasMediaDescription(MediaType type) const;
+
+ bool vad_enabled = true; // When disabled, removes all CN codecs from SDP.
+ bool rtcp_mux_enabled = true;
+ bool bundle_enabled = false;
+ bool offer_extmap_allow_mixed = false;
+ bool raw_packetization_for_video = false;
+ std::string rtcp_cname = kDefaultRtcpCname;
+ webrtc::CryptoOptions crypto_options;
+ // List of media description options in the same order that the media
+ // descriptions will be generated.
+ std::vector<MediaDescriptionOptions> media_description_options;
+ std::vector<IceParameters> pooled_ice_credentials;
+
+ // Use the draft-ietf-mmusic-sctp-sdp-03 obsolete syntax for SCTP
+ // datachannels.
+ // Default is true for backwards compatibility with clients that use
+ // this internal interface.
+ bool use_obsolete_sctp_sdp = true;
+};
+
+// Creates media session descriptions according to the supplied codecs and
+// other fields, as well as the supplied per-call options.
+// When creating answers, performs the appropriate negotiation
+// of the various fields to determine the proper result.
+class MediaSessionDescriptionFactory {
+ public:
+ // This constructor automatically sets up the factory to get its configuration
+ // from the specified MediaEngine (when provided).
+ // The TransportDescriptionFactory and the UniqueRandomIdGenerator are not
+ // owned by MediaSessionDescriptionFactory, so they must be kept alive by the
+ // user of this class.
+ MediaSessionDescriptionFactory(cricket::MediaEngineInterface* media_engine,
+ bool rtx_enabled,
+ rtc::UniqueRandomIdGenerator* ssrc_generator,
+ const TransportDescriptionFactory* factory);
+
+ const AudioCodecs& audio_sendrecv_codecs() const;
+ const AudioCodecs& audio_send_codecs() const;
+ const AudioCodecs& audio_recv_codecs() const;
+ void set_audio_codecs(const AudioCodecs& send_codecs,
+ const AudioCodecs& recv_codecs);
+ const VideoCodecs& video_sendrecv_codecs() const;
+ const VideoCodecs& video_send_codecs() const;
+ const VideoCodecs& video_recv_codecs() const;
+ void set_video_codecs(const VideoCodecs& send_codecs,
+ const VideoCodecs& recv_codecs);
+ RtpHeaderExtensions filtered_rtp_header_extensions(
+ RtpHeaderExtensions extensions) const;
+ SecurePolicy secure() const { return secure_; }
+ void set_secure(SecurePolicy s) { secure_ = s; }
+
+ void set_enable_encrypted_rtp_header_extensions(bool enable) {
+ enable_encrypted_rtp_header_extensions_ = enable;
+ }
+
+ void set_is_unified_plan(bool is_unified_plan) {
+ is_unified_plan_ = is_unified_plan;
+ }
+
+ webrtc::RTCErrorOr<std::unique_ptr<SessionDescription>> CreateOfferOrError(
+ const MediaSessionOptions& options,
+ const SessionDescription* current_description) const;
+ webrtc::RTCErrorOr<std::unique_ptr<SessionDescription>> CreateAnswerOrError(
+ const SessionDescription* offer,
+ const MediaSessionOptions& options,
+ const SessionDescription* current_description) const;
+
+ private:
+ struct AudioVideoRtpHeaderExtensions {
+ RtpHeaderExtensions audio;
+ RtpHeaderExtensions video;
+ };
+
+ const AudioCodecs& GetAudioCodecsForOffer(
+ const webrtc::RtpTransceiverDirection& direction) const;
+ const AudioCodecs& GetAudioCodecsForAnswer(
+ const webrtc::RtpTransceiverDirection& offer,
+ const webrtc::RtpTransceiverDirection& answer) const;
+ const VideoCodecs& GetVideoCodecsForOffer(
+ const webrtc::RtpTransceiverDirection& direction) const;
+ const VideoCodecs& GetVideoCodecsForAnswer(
+ const webrtc::RtpTransceiverDirection& offer,
+ const webrtc::RtpTransceiverDirection& answer) const;
+ void GetCodecsForOffer(
+ const std::vector<const ContentInfo*>& current_active_contents,
+ AudioCodecs* audio_codecs,
+ VideoCodecs* video_codecs) const;
+ void GetCodecsForAnswer(
+ const std::vector<const ContentInfo*>& current_active_contents,
+ const SessionDescription& remote_offer,
+ AudioCodecs* audio_codecs,
+ VideoCodecs* video_codecs) const;
+ AudioVideoRtpHeaderExtensions GetOfferedRtpHeaderExtensionsWithIds(
+ const std::vector<const ContentInfo*>& current_active_contents,
+ bool extmap_allow_mixed,
+ const std::vector<MediaDescriptionOptions>& media_description_options)
+ const;
+ webrtc::RTCError AddTransportOffer(
+ const std::string& content_name,
+ const TransportOptions& transport_options,
+ const SessionDescription* current_desc,
+ SessionDescription* offer,
+ IceCredentialsIterator* ice_credentials) const;
+
+ std::unique_ptr<TransportDescription> CreateTransportAnswer(
+ const std::string& content_name,
+ const SessionDescription* offer_desc,
+ const TransportOptions& transport_options,
+ const SessionDescription* current_desc,
+ bool require_transport_attributes,
+ IceCredentialsIterator* ice_credentials) const;
+
+ webrtc::RTCError AddTransportAnswer(
+ const std::string& content_name,
+ const TransportDescription& transport_desc,
+ SessionDescription* answer_desc) const;
+
+ // Helpers for adding media contents to the SessionDescription. Returns true
+ // it succeeds or the media content is not needed, or false if there is any
+ // error.
+
+ webrtc::RTCError AddAudioContentForOffer(
+ const MediaDescriptionOptions& media_description_options,
+ const MediaSessionOptions& session_options,
+ const ContentInfo* current_content,
+ const SessionDescription* current_description,
+ const RtpHeaderExtensions& audio_rtp_extensions,
+ const AudioCodecs& audio_codecs,
+ StreamParamsVec* current_streams,
+ SessionDescription* desc,
+ IceCredentialsIterator* ice_credentials) const;
+
+ webrtc::RTCError AddVideoContentForOffer(
+ const MediaDescriptionOptions& media_description_options,
+ const MediaSessionOptions& session_options,
+ const ContentInfo* current_content,
+ const SessionDescription* current_description,
+ const RtpHeaderExtensions& video_rtp_extensions,
+ const VideoCodecs& video_codecs,
+ StreamParamsVec* current_streams,
+ SessionDescription* desc,
+ IceCredentialsIterator* ice_credentials) const;
+
+ webrtc::RTCError AddDataContentForOffer(
+ const MediaDescriptionOptions& media_description_options,
+ const MediaSessionOptions& session_options,
+ const ContentInfo* current_content,
+ const SessionDescription* current_description,
+ StreamParamsVec* current_streams,
+ SessionDescription* desc,
+ IceCredentialsIterator* ice_credentials) const;
+
+ webrtc::RTCError AddUnsupportedContentForOffer(
+ const MediaDescriptionOptions& media_description_options,
+ const MediaSessionOptions& session_options,
+ const ContentInfo* current_content,
+ const SessionDescription* current_description,
+ SessionDescription* desc,
+ IceCredentialsIterator* ice_credentials) const;
+
+ webrtc::RTCError AddAudioContentForAnswer(
+ const MediaDescriptionOptions& media_description_options,
+ const MediaSessionOptions& session_options,
+ const ContentInfo* offer_content,
+ const SessionDescription* offer_description,
+ const ContentInfo* current_content,
+ const SessionDescription* current_description,
+ const TransportInfo* bundle_transport,
+ const AudioCodecs& audio_codecs,
+ const RtpHeaderExtensions& rtp_header_extensions,
+ StreamParamsVec* current_streams,
+ SessionDescription* answer,
+ IceCredentialsIterator* ice_credentials) const;
+
+ webrtc::RTCError AddVideoContentForAnswer(
+ const MediaDescriptionOptions& media_description_options,
+ const MediaSessionOptions& session_options,
+ const ContentInfo* offer_content,
+ const SessionDescription* offer_description,
+ const ContentInfo* current_content,
+ const SessionDescription* current_description,
+ const TransportInfo* bundle_transport,
+ const VideoCodecs& video_codecs,
+ const RtpHeaderExtensions& rtp_header_extensions,
+ StreamParamsVec* current_streams,
+ SessionDescription* answer,
+ IceCredentialsIterator* ice_credentials) const;
+
+ webrtc::RTCError AddDataContentForAnswer(
+ const MediaDescriptionOptions& media_description_options,
+ const MediaSessionOptions& session_options,
+ const ContentInfo* offer_content,
+ const SessionDescription* offer_description,
+ const ContentInfo* current_content,
+ const SessionDescription* current_description,
+ const TransportInfo* bundle_transport,
+ StreamParamsVec* current_streams,
+ SessionDescription* answer,
+ IceCredentialsIterator* ice_credentials) const;
+
+ webrtc::RTCError AddUnsupportedContentForAnswer(
+ const MediaDescriptionOptions& media_description_options,
+ const MediaSessionOptions& session_options,
+ const ContentInfo* offer_content,
+ const SessionDescription* offer_description,
+ const ContentInfo* current_content,
+ const SessionDescription* current_description,
+ const TransportInfo* bundle_transport,
+ SessionDescription* answer,
+ IceCredentialsIterator* ice_credentials) const;
+
+ void ComputeAudioCodecsIntersectionAndUnion();
+
+ void ComputeVideoCodecsIntersectionAndUnion();
+
+ rtc::UniqueRandomIdGenerator* ssrc_generator() const {
+ return ssrc_generator_.get();
+ }
+
+ bool is_unified_plan_ = false;
+ AudioCodecs audio_send_codecs_;
+ AudioCodecs audio_recv_codecs_;
+ // Intersection of send and recv.
+ AudioCodecs audio_sendrecv_codecs_;
+ // Union of send and recv.
+ AudioCodecs all_audio_codecs_;
+ VideoCodecs video_send_codecs_;
+ VideoCodecs video_recv_codecs_;
+ // Intersection of send and recv.
+ VideoCodecs video_sendrecv_codecs_;
+ // Union of send and recv.
+ VideoCodecs all_video_codecs_;
+ // This object may or may not be owned by this class.
+ webrtc::AlwaysValidPointer<rtc::UniqueRandomIdGenerator> const
+ ssrc_generator_;
+ bool enable_encrypted_rtp_header_extensions_ = false;
+ // TODO(zhihuang): Rename secure_ to sdec_policy_; rename the related getter
+ // and setter.
+ SecurePolicy secure_ = SEC_DISABLED;
+ const TransportDescriptionFactory* transport_desc_factory_;
+};
+
+// Convenience functions.
+bool IsMediaContent(const ContentInfo* content);
+bool IsAudioContent(const ContentInfo* content);
+bool IsVideoContent(const ContentInfo* content);
+bool IsDataContent(const ContentInfo* content);
+bool IsUnsupportedContent(const ContentInfo* content);
+const ContentInfo* GetFirstMediaContent(const ContentInfos& contents,
+ MediaType media_type);
+const ContentInfo* GetFirstAudioContent(const ContentInfos& contents);
+const ContentInfo* GetFirstVideoContent(const ContentInfos& contents);
+const ContentInfo* GetFirstDataContent(const ContentInfos& contents);
+const ContentInfo* GetFirstMediaContent(const SessionDescription* sdesc,
+ MediaType media_type);
+const ContentInfo* GetFirstAudioContent(const SessionDescription* sdesc);
+const ContentInfo* GetFirstVideoContent(const SessionDescription* sdesc);
+const ContentInfo* GetFirstDataContent(const SessionDescription* sdesc);
+const AudioContentDescription* GetFirstAudioContentDescription(
+ const SessionDescription* sdesc);
+const VideoContentDescription* GetFirstVideoContentDescription(
+ const SessionDescription* sdesc);
+const SctpDataContentDescription* GetFirstSctpDataContentDescription(
+ const SessionDescription* sdesc);
+// Non-const versions of the above functions.
+// Useful when modifying an existing description.
+ContentInfo* GetFirstMediaContent(ContentInfos* contents, MediaType media_type);
+ContentInfo* GetFirstAudioContent(ContentInfos* contents);
+ContentInfo* GetFirstVideoContent(ContentInfos* contents);
+ContentInfo* GetFirstDataContent(ContentInfos* contents);
+ContentInfo* GetFirstMediaContent(SessionDescription* sdesc,
+ MediaType media_type);
+ContentInfo* GetFirstAudioContent(SessionDescription* sdesc);
+ContentInfo* GetFirstVideoContent(SessionDescription* sdesc);
+ContentInfo* GetFirstDataContent(SessionDescription* sdesc);
+AudioContentDescription* GetFirstAudioContentDescription(
+ SessionDescription* sdesc);
+VideoContentDescription* GetFirstVideoContentDescription(
+ SessionDescription* sdesc);
+SctpDataContentDescription* GetFirstSctpDataContentDescription(
+ SessionDescription* sdesc);
+
+} // namespace cricket
+
+#endif // PC_MEDIA_SESSION_H_
diff --git a/third_party/libwebrtc/pc/media_session_unittest.cc b/third_party/libwebrtc/pc/media_session_unittest.cc
new file mode 100644
index 0000000000..a1770c18c5
--- /dev/null
+++ b/third_party/libwebrtc/pc/media_session_unittest.cc
@@ -0,0 +1,5039 @@
+/*
+ * Copyright 2004 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/media_session.h"
+
+#include <stddef.h>
+
+#include <algorithm>
+#include <cstdint>
+#include <map>
+#include <memory>
+#include <string>
+#include <tuple>
+#include <vector>
+
+#include "absl/algorithm/container.h"
+#include "absl/strings/match.h"
+#include "absl/strings/string_view.h"
+#include "absl/types/optional.h"
+#include "api/candidate.h"
+#include "api/crypto_params.h"
+#include "api/rtp_parameters.h"
+#include "media/base/codec.h"
+#include "media/base/media_constants.h"
+#include "media/base/test_utils.h"
+#include "media/sctp/sctp_transport_internal.h"
+#include "p2p/base/p2p_constants.h"
+#include "p2p/base/transport_description.h"
+#include "p2p/base/transport_info.h"
+#include "pc/media_protocol_names.h"
+#include "pc/rtp_media_utils.h"
+#include "pc/rtp_parameters_conversion.h"
+#include "rtc_base/arraysize.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/fake_ssl_identity.h"
+#include "rtc_base/rtc_certificate.h"
+#include "rtc_base/ssl_identity.h"
+#include "rtc_base/ssl_stream_adapter.h"
+#include "rtc_base/string_encode.h"
+#include "rtc_base/strings/string_builder.h"
+#include "rtc_base/unique_id_generator.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+#include "test/scoped_key_value_config.h"
+
+#define ASSERT_CRYPTO(cd, s, cs) \
+ ASSERT_EQ(s, cd->cryptos().size()); \
+ ASSERT_EQ(cs, cd->cryptos()[0].crypto_suite)
+
+typedef std::vector<cricket::Candidate> Candidates;
+
+using cricket::AudioCodec;
+using cricket::AudioContentDescription;
+using cricket::ContentInfo;
+using cricket::CryptoParamsVec;
+using cricket::GetFirstAudioContent;
+using cricket::GetFirstAudioContentDescription;
+using cricket::GetFirstDataContent;
+using cricket::GetFirstVideoContent;
+using cricket::GetFirstVideoContentDescription;
+using cricket::kAutoBandwidth;
+using cricket::MEDIA_TYPE_AUDIO;
+using cricket::MEDIA_TYPE_DATA;
+using cricket::MEDIA_TYPE_VIDEO;
+using cricket::MediaContentDescription;
+using cricket::MediaDescriptionOptions;
+using cricket::MediaProtocolType;
+using cricket::MediaSessionDescriptionFactory;
+using cricket::MediaSessionOptions;
+using cricket::MediaType;
+using cricket::RidDescription;
+using cricket::RidDirection;
+using cricket::SctpDataContentDescription;
+using cricket::SEC_DISABLED;
+using cricket::SEC_ENABLED;
+using cricket::SEC_REQUIRED;
+using cricket::SessionDescription;
+using cricket::SimulcastDescription;
+using cricket::SimulcastLayer;
+using cricket::SimulcastLayerList;
+using cricket::SsrcGroup;
+using cricket::StreamParams;
+using cricket::StreamParamsVec;
+using cricket::TransportDescription;
+using cricket::TransportDescriptionFactory;
+using cricket::TransportInfo;
+using cricket::VideoCodec;
+using cricket::VideoContentDescription;
+using rtc::kCsAeadAes128Gcm;
+using rtc::kCsAeadAes256Gcm;
+using rtc::kCsAesCm128HmacSha1_32;
+using rtc::kCsAesCm128HmacSha1_80;
+using rtc::UniqueRandomIdGenerator;
+using ::testing::Contains;
+using ::testing::Each;
+using ::testing::ElementsAre;
+using ::testing::ElementsAreArray;
+using ::testing::Eq;
+using ::testing::Field;
+using ::testing::IsEmpty;
+using ::testing::IsFalse;
+using ::testing::Ne;
+using ::testing::Not;
+using ::testing::Pointwise;
+using ::testing::SizeIs;
+using webrtc::RtpExtension;
+using webrtc::RtpTransceiverDirection;
+
+static AudioCodec createRedAudioCodec(absl::string_view encoding_id) {
+ AudioCodec red = cricket::CreateAudioCodec(63, "red", 48000, 2);
+ red.SetParam(cricket::kCodecParamNotInNameValueFormat,
+ std::string(encoding_id) + '/' + std::string(encoding_id));
+ return red;
+}
+
+static const AudioCodec kAudioCodecs1[] = {
+ cricket::CreateAudioCodec(111, "opus", 48000, 2),
+ createRedAudioCodec("111"),
+ cricket::CreateAudioCodec(102, "iLBC", 8000, 1),
+ cricket::CreateAudioCodec(0, "PCMU", 8000, 1),
+ cricket::CreateAudioCodec(8, "PCMA", 8000, 1),
+ cricket::CreateAudioCodec(117, "red", 8000, 1),
+ cricket::CreateAudioCodec(107, "CN", 48000, 1)};
+
+static const AudioCodec kAudioCodecs2[] = {
+ cricket::CreateAudioCodec(126, "foo", 16000, 1),
+ cricket::CreateAudioCodec(0, "PCMU", 8000, 1),
+ cricket::CreateAudioCodec(127, "iLBC", 8000, 1),
+};
+
+static const AudioCodec kAudioCodecsAnswer[] = {
+ cricket::CreateAudioCodec(102, "iLBC", 8000, 1),
+ cricket::CreateAudioCodec(0, "PCMU", 8000, 1),
+};
+
+static const VideoCodec kVideoCodecs1[] = {
+ cricket::CreateVideoCodec(96, "H264-SVC"),
+ cricket::CreateVideoCodec(97, "H264")};
+
+static const VideoCodec kVideoCodecs1Reverse[] = {
+ cricket::CreateVideoCodec(97, "H264"),
+ cricket::CreateVideoCodec(96, "H264-SVC")};
+
+static const VideoCodec kVideoCodecs2[] = {
+ cricket::CreateVideoCodec(126, "H264"),
+ cricket::CreateVideoCodec(127, "H263")};
+
+static const VideoCodec kVideoCodecsAnswer[] = {
+ cricket::CreateVideoCodec(97, "H264")};
+
+static const RtpExtension kAudioRtpExtension1[] = {
+ RtpExtension("urn:ietf:params:rtp-hdrext:ssrc-audio-level", 8),
+ RtpExtension("http://google.com/testing/audio_something", 10),
+};
+
+static const RtpExtension kAudioRtpExtensionEncrypted1[] = {
+ RtpExtension("urn:ietf:params:rtp-hdrext:ssrc-audio-level", 8),
+ RtpExtension("http://google.com/testing/audio_something", 10),
+ RtpExtension("urn:ietf:params:rtp-hdrext:ssrc-audio-level", 12, true),
+ RtpExtension("http://google.com/testing/audio_something", 11, true),
+};
+
+static const RtpExtension kAudioRtpExtension2[] = {
+ RtpExtension("urn:ietf:params:rtp-hdrext:ssrc-audio-level", 2),
+ RtpExtension("http://google.com/testing/audio_something_else", 8),
+ RtpExtension("http://google.com/testing/both_audio_and_video", 7),
+};
+
+static const RtpExtension kAudioRtpExtension3[] = {
+ RtpExtension("http://google.com/testing/audio_something", 2),
+ RtpExtension("http://google.com/testing/both_audio_and_video", 3),
+};
+
+static const RtpExtension kAudioRtpExtension3ForEncryption[] = {
+ RtpExtension("http://google.com/testing/audio_something", 2),
+ // Use RTP extension that supports encryption.
+ RtpExtension("urn:ietf:params:rtp-hdrext:toffset", 3),
+};
+
+static const RtpExtension kAudioRtpExtension3ForEncryptionOffer[] = {
+ RtpExtension("http://google.com/testing/audio_something", 2),
+ RtpExtension("urn:ietf:params:rtp-hdrext:toffset", 3),
+ RtpExtension("http://google.com/testing/audio_something", 14, true),
+ RtpExtension("urn:ietf:params:rtp-hdrext:toffset", 13, true),
+};
+
+static const RtpExtension kVideoRtpExtension3ForEncryptionOffer[] = {
+ RtpExtension("http://google.com/testing/video_something", 4),
+ RtpExtension("urn:ietf:params:rtp-hdrext:toffset", 3),
+ RtpExtension("http://google.com/testing/video_something", 12, true),
+ RtpExtension("urn:ietf:params:rtp-hdrext:toffset", 13, true),
+};
+
+static const RtpExtension kAudioRtpExtensionAnswer[] = {
+ RtpExtension("urn:ietf:params:rtp-hdrext:ssrc-audio-level", 8),
+};
+
+static const RtpExtension kAudioRtpExtensionEncryptedAnswer[] = {
+ RtpExtension("urn:ietf:params:rtp-hdrext:ssrc-audio-level", 12, true),
+};
+
+static const RtpExtension kVideoRtpExtension1[] = {
+ RtpExtension("urn:ietf:params:rtp-hdrext:toffset", 14),
+ RtpExtension("http://google.com/testing/video_something", 13),
+};
+
+static const RtpExtension kVideoRtpExtensionEncrypted1[] = {
+ RtpExtension("urn:ietf:params:rtp-hdrext:toffset", 14),
+ RtpExtension("http://google.com/testing/video_something", 13),
+ RtpExtension("urn:ietf:params:rtp-hdrext:toffset", 9, true),
+ RtpExtension("http://google.com/testing/video_something", 7, true),
+};
+
+static const RtpExtension kVideoRtpExtension2[] = {
+ RtpExtension("urn:ietf:params:rtp-hdrext:toffset", 2),
+ RtpExtension("http://google.com/testing/video_something_else", 14),
+ RtpExtension("http://google.com/testing/both_audio_and_video", 7),
+};
+
+static const RtpExtension kVideoRtpExtension3[] = {
+ RtpExtension("http://google.com/testing/video_something", 4),
+ RtpExtension("http://google.com/testing/both_audio_and_video", 5),
+};
+
+static const RtpExtension kVideoRtpExtension3ForEncryption[] = {
+ RtpExtension("http://google.com/testing/video_something", 4),
+ // Use RTP extension that supports encryption.
+ RtpExtension("urn:ietf:params:rtp-hdrext:toffset", 5),
+};
+
+static const RtpExtension kVideoRtpExtensionAnswer[] = {
+ RtpExtension("urn:ietf:params:rtp-hdrext:toffset", 14),
+};
+
+static const RtpExtension kVideoRtpExtensionEncryptedAnswer[] = {
+ RtpExtension("urn:ietf:params:rtp-hdrext:toffset", 9, true),
+};
+
+static const RtpExtension kRtpExtensionTransportSequenceNumber01[] = {
+ RtpExtension("http://www.ietf.org/id/"
+ "draft-holmer-rmcat-transport-wide-cc-extensions-01",
+ 1),
+};
+
+static const RtpExtension kRtpExtensionTransportSequenceNumber01And02[] = {
+ RtpExtension("http://www.ietf.org/id/"
+ "draft-holmer-rmcat-transport-wide-cc-extensions-01",
+ 1),
+ RtpExtension(
+ "http://www.webrtc.org/experiments/rtp-hdrext/transport-wide-cc-02",
+ 2),
+};
+
+static const RtpExtension kRtpExtensionTransportSequenceNumber02[] = {
+ RtpExtension(
+ "http://www.webrtc.org/experiments/rtp-hdrext/transport-wide-cc-02",
+ 2),
+};
+
+static const RtpExtension kRtpExtensionGenericFrameDescriptorUri00[] = {
+ RtpExtension("http://www.webrtc.org/experiments/rtp-hdrext/"
+ "generic-frame-descriptor-00",
+ 3),
+};
+
+static const uint32_t kSimulcastParamsSsrc[] = {10, 11, 20, 21, 30, 31};
+static const uint32_t kSimSsrc[] = {10, 20, 30};
+static const uint32_t kFec1Ssrc[] = {10, 11};
+static const uint32_t kFec2Ssrc[] = {20, 21};
+static const uint32_t kFec3Ssrc[] = {30, 31};
+
+static const char kMediaStream1[] = "stream_1";
+static const char kMediaStream2[] = "stream_2";
+static const char kVideoTrack1[] = "video_1";
+static const char kVideoTrack2[] = "video_2";
+static const char kAudioTrack1[] = "audio_1";
+static const char kAudioTrack2[] = "audio_2";
+static const char kAudioTrack3[] = "audio_3";
+
+static const char* kMediaProtocols[] = {"RTP/AVP", "RTP/SAVP", "RTP/AVPF",
+ "RTP/SAVPF"};
+static const char* kMediaProtocolsDtls[] = {
+ "TCP/TLS/RTP/SAVPF", "TCP/TLS/RTP/SAVP", "UDP/TLS/RTP/SAVPF",
+ "UDP/TLS/RTP/SAVP"};
+
+// SRTP cipher name negotiated by the tests. This must be updated if the
+// default changes.
+static const char* kDefaultSrtpCryptoSuite = kCsAesCm128HmacSha1_80;
+static const char* kDefaultSrtpCryptoSuiteGcm = kCsAeadAes256Gcm;
+static const uint8_t kDefaultCryptoSuiteSize = 3U;
+
+// These constants are used to make the code using "AddMediaDescriptionOptions"
+// more readable.
+static constexpr bool kStopped = true;
+static constexpr bool kActive = false;
+
+static bool IsMediaContentOfType(const ContentInfo* content,
+ MediaType media_type) {
+ RTC_DCHECK(content);
+ return content->media_description()->type() == media_type;
+}
+
+static RtpTransceiverDirection GetMediaDirection(const ContentInfo* content) {
+ RTC_DCHECK(content);
+ return content->media_description()->direction();
+}
+
+static void AddRtxCodec(const VideoCodec& rtx_codec,
+ std::vector<VideoCodec>* codecs) {
+ ASSERT_FALSE(cricket::FindCodecById(*codecs, rtx_codec.id));
+ codecs->push_back(rtx_codec);
+}
+
+static std::vector<std::string> GetCodecNames(
+ const std::vector<cricket::Codec>& codecs) {
+ std::vector<std::string> codec_names;
+ codec_names.reserve(codecs.size());
+ for (const auto& codec : codecs) {
+ codec_names.push_back(codec.name);
+ }
+ return codec_names;
+}
+
+// This is used for test only. MIDs are not the identification of the
+// MediaDescriptionOptions since some end points may not support MID and the SDP
+// may not contain 'mid'.
+std::vector<MediaDescriptionOptions>::iterator FindFirstMediaDescriptionByMid(
+ const std::string& mid,
+ MediaSessionOptions* opts) {
+ return absl::c_find_if(
+ opts->media_description_options,
+ [&mid](const MediaDescriptionOptions& t) { return t.mid == mid; });
+}
+
+std::vector<MediaDescriptionOptions>::const_iterator
+FindFirstMediaDescriptionByMid(const std::string& mid,
+ const MediaSessionOptions& opts) {
+ return absl::c_find_if(
+ opts.media_description_options,
+ [&mid](const MediaDescriptionOptions& t) { return t.mid == mid; });
+}
+
+// Add a media section to the `session_options`.
+static void AddMediaDescriptionOptions(MediaType type,
+ const std::string& mid,
+ RtpTransceiverDirection direction,
+ bool stopped,
+ MediaSessionOptions* opts) {
+ opts->media_description_options.push_back(
+ MediaDescriptionOptions(type, mid, direction, stopped));
+}
+
+static void AddAudioVideoSections(RtpTransceiverDirection direction,
+ MediaSessionOptions* opts) {
+ AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio", direction, kActive,
+ opts);
+ AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video", direction, kActive,
+ opts);
+}
+
+static void AddDataSection(RtpTransceiverDirection direction,
+ MediaSessionOptions* opts) {
+ AddMediaDescriptionOptions(MEDIA_TYPE_DATA, "data", direction, kActive, opts);
+}
+
+static void AttachSenderToMediaDescriptionOptions(
+ const std::string& mid,
+ MediaType type,
+ const std::string& track_id,
+ const std::vector<std::string>& stream_ids,
+ const std::vector<RidDescription>& rids,
+ const SimulcastLayerList& simulcast_layers,
+ int num_sim_layer,
+ MediaSessionOptions* session_options) {
+ auto it = FindFirstMediaDescriptionByMid(mid, session_options);
+ switch (type) {
+ case MEDIA_TYPE_AUDIO:
+ it->AddAudioSender(track_id, stream_ids);
+ break;
+ case MEDIA_TYPE_VIDEO:
+ it->AddVideoSender(track_id, stream_ids, rids, simulcast_layers,
+ num_sim_layer);
+ break;
+ default:
+ RTC_DCHECK_NOTREACHED();
+ }
+}
+
+static void AttachSenderToMediaDescriptionOptions(
+ const std::string& mid,
+ MediaType type,
+ const std::string& track_id,
+ const std::vector<std::string>& stream_ids,
+ int num_sim_layer,
+ MediaSessionOptions* session_options) {
+ AttachSenderToMediaDescriptionOptions(mid, type, track_id, stream_ids, {},
+ SimulcastLayerList(), num_sim_layer,
+ session_options);
+}
+
+static void DetachSenderFromMediaSection(const std::string& mid,
+ const std::string& track_id,
+ MediaSessionOptions* session_options) {
+ std::vector<cricket::SenderOptions>& sender_options_list =
+ FindFirstMediaDescriptionByMid(mid, session_options)->sender_options;
+ auto sender_it =
+ absl::c_find_if(sender_options_list,
+ [track_id](const cricket::SenderOptions& sender_options) {
+ return sender_options.track_id == track_id;
+ });
+ RTC_DCHECK(sender_it != sender_options_list.end());
+ sender_options_list.erase(sender_it);
+}
+
+// Helper function used to create a default MediaSessionOptions for Plan B SDP.
+// (https://tools.ietf.org/html/draft-uberti-rtcweb-plan-00).
+static MediaSessionOptions CreatePlanBMediaSessionOptions() {
+ MediaSessionOptions session_options;
+ AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio",
+ RtpTransceiverDirection::kRecvOnly, kActive,
+ &session_options);
+ return session_options;
+}
+
+// prefers GCM SDES crypto suites by removing non-GCM defaults.
+void PreferGcmCryptoParameters(CryptoParamsVec* cryptos) {
+ cryptos->erase(
+ std::remove_if(cryptos->begin(), cryptos->end(),
+ [](const cricket::CryptoParams& crypto) {
+ return crypto.crypto_suite != kCsAeadAes256Gcm &&
+ crypto.crypto_suite != kCsAeadAes128Gcm;
+ }),
+ cryptos->end());
+}
+
+// TODO(zhihuang): Most of these tests were written while MediaSessionOptions
+// was designed for Plan B SDP, where only one audio "m=" section and one video
+// "m=" section could be generated, and ordering couldn't be controlled. Many of
+// these tests may be obsolete as a result, and should be refactored or removed.
+class MediaSessionDescriptionFactoryTest : public ::testing::Test {
+ public:
+ MediaSessionDescriptionFactoryTest()
+ : tdf1_(field_trials),
+ tdf2_(field_trials),
+ f1_(nullptr, false, &ssrc_generator1, &tdf1_),
+ f2_(nullptr, false, &ssrc_generator2, &tdf2_) {
+ f1_.set_audio_codecs(MAKE_VECTOR(kAudioCodecs1),
+ MAKE_VECTOR(kAudioCodecs1));
+ f1_.set_video_codecs(MAKE_VECTOR(kVideoCodecs1),
+ MAKE_VECTOR(kVideoCodecs1));
+ f2_.set_audio_codecs(MAKE_VECTOR(kAudioCodecs2),
+ MAKE_VECTOR(kAudioCodecs2));
+ f2_.set_video_codecs(MAKE_VECTOR(kVideoCodecs2),
+ MAKE_VECTOR(kVideoCodecs2));
+ tdf1_.set_certificate(rtc::RTCCertificate::Create(
+ std::unique_ptr<rtc::SSLIdentity>(new rtc::FakeSSLIdentity("id1"))));
+ tdf2_.set_certificate(rtc::RTCCertificate::Create(
+ std::unique_ptr<rtc::SSLIdentity>(new rtc::FakeSSLIdentity("id2"))));
+ }
+
+ // Create a video StreamParamsVec object with:
+ // - one video stream with 3 simulcast streams and FEC,
+ StreamParamsVec CreateComplexVideoStreamParamsVec() {
+ SsrcGroup sim_group("SIM", MAKE_VECTOR(kSimSsrc));
+ SsrcGroup fec_group1("FEC", MAKE_VECTOR(kFec1Ssrc));
+ SsrcGroup fec_group2("FEC", MAKE_VECTOR(kFec2Ssrc));
+ SsrcGroup fec_group3("FEC", MAKE_VECTOR(kFec3Ssrc));
+
+ std::vector<SsrcGroup> ssrc_groups;
+ ssrc_groups.push_back(sim_group);
+ ssrc_groups.push_back(fec_group1);
+ ssrc_groups.push_back(fec_group2);
+ ssrc_groups.push_back(fec_group3);
+
+ StreamParams simulcast_params;
+ simulcast_params.id = kVideoTrack1;
+ simulcast_params.ssrcs = MAKE_VECTOR(kSimulcastParamsSsrc);
+ simulcast_params.ssrc_groups = ssrc_groups;
+ simulcast_params.cname = "Video_SIM_FEC";
+ simulcast_params.set_stream_ids({kMediaStream1});
+
+ StreamParamsVec video_streams;
+ video_streams.push_back(simulcast_params);
+
+ return video_streams;
+ }
+
+ bool CompareCryptoParams(const CryptoParamsVec& c1,
+ const CryptoParamsVec& c2) {
+ if (c1.size() != c2.size())
+ return false;
+ for (size_t i = 0; i < c1.size(); ++i)
+ if (c1[i].tag != c2[i].tag || c1[i].crypto_suite != c2[i].crypto_suite ||
+ c1[i].key_params != c2[i].key_params ||
+ c1[i].session_params != c2[i].session_params)
+ return false;
+ return true;
+ }
+
+ // Returns true if the transport info contains "renomination" as an
+ // ICE option.
+ bool GetIceRenomination(const TransportInfo* transport_info) {
+ return absl::c_linear_search(transport_info->description.transport_options,
+ "renomination");
+ }
+
+ void TestTransportInfo(bool offer,
+ const MediaSessionOptions& options,
+ bool has_current_desc) {
+ const std::string current_audio_ufrag = "current_audio_ufrag";
+ const std::string current_audio_pwd = "current_audio_pwd";
+ const std::string current_video_ufrag = "current_video_ufrag";
+ const std::string current_video_pwd = "current_video_pwd";
+ const std::string current_data_ufrag = "current_data_ufrag";
+ const std::string current_data_pwd = "current_data_pwd";
+ std::unique_ptr<SessionDescription> current_desc;
+ std::unique_ptr<SessionDescription> desc;
+ if (has_current_desc) {
+ current_desc = std::make_unique<SessionDescription>();
+ current_desc->AddTransportInfo(TransportInfo(
+ "audio",
+ TransportDescription(current_audio_ufrag, current_audio_pwd)));
+ current_desc->AddTransportInfo(TransportInfo(
+ "video",
+ TransportDescription(current_video_ufrag, current_video_pwd)));
+ current_desc->AddTransportInfo(TransportInfo(
+ "data", TransportDescription(current_data_ufrag, current_data_pwd)));
+ }
+ if (offer) {
+ desc = f1_.CreateOfferOrError(options, current_desc.get()).MoveValue();
+ } else {
+ std::unique_ptr<SessionDescription> offer;
+ offer = f1_.CreateOfferOrError(options, nullptr).MoveValue();
+ desc = f1_.CreateAnswerOrError(offer.get(), options, current_desc.get())
+ .MoveValue();
+ }
+ ASSERT_TRUE(desc);
+ const TransportInfo* ti_audio = desc->GetTransportInfoByName("audio");
+ if (options.has_audio()) {
+ if (has_current_desc) {
+ EXPECT_EQ(current_audio_ufrag, ti_audio->description.ice_ufrag);
+ EXPECT_EQ(current_audio_pwd, ti_audio->description.ice_pwd);
+ } else {
+ EXPECT_EQ(static_cast<size_t>(cricket::ICE_UFRAG_LENGTH),
+ ti_audio->description.ice_ufrag.size());
+ EXPECT_EQ(static_cast<size_t>(cricket::ICE_PWD_LENGTH),
+ ti_audio->description.ice_pwd.size());
+ }
+ auto media_desc_options_it =
+ FindFirstMediaDescriptionByMid("audio", options);
+ EXPECT_EQ(
+ media_desc_options_it->transport_options.enable_ice_renomination,
+ GetIceRenomination(ti_audio));
+ }
+ const TransportInfo* ti_video = desc->GetTransportInfoByName("video");
+ if (options.has_video()) {
+ auto media_desc_options_it =
+ FindFirstMediaDescriptionByMid("video", options);
+ if (options.bundle_enabled) {
+ EXPECT_EQ(ti_audio->description.ice_ufrag,
+ ti_video->description.ice_ufrag);
+ EXPECT_EQ(ti_audio->description.ice_pwd, ti_video->description.ice_pwd);
+ } else {
+ if (has_current_desc) {
+ EXPECT_EQ(current_video_ufrag, ti_video->description.ice_ufrag);
+ EXPECT_EQ(current_video_pwd, ti_video->description.ice_pwd);
+ } else {
+ EXPECT_EQ(static_cast<size_t>(cricket::ICE_UFRAG_LENGTH),
+ ti_video->description.ice_ufrag.size());
+ EXPECT_EQ(static_cast<size_t>(cricket::ICE_PWD_LENGTH),
+ ti_video->description.ice_pwd.size());
+ }
+ }
+ EXPECT_EQ(
+ media_desc_options_it->transport_options.enable_ice_renomination,
+ GetIceRenomination(ti_video));
+ }
+ const TransportInfo* ti_data = desc->GetTransportInfoByName("data");
+ if (options.has_data()) {
+ if (options.bundle_enabled) {
+ EXPECT_EQ(ti_audio->description.ice_ufrag,
+ ti_data->description.ice_ufrag);
+ EXPECT_EQ(ti_audio->description.ice_pwd, ti_data->description.ice_pwd);
+ } else {
+ if (has_current_desc) {
+ EXPECT_EQ(current_data_ufrag, ti_data->description.ice_ufrag);
+ EXPECT_EQ(current_data_pwd, ti_data->description.ice_pwd);
+ } else {
+ EXPECT_EQ(static_cast<size_t>(cricket::ICE_UFRAG_LENGTH),
+ ti_data->description.ice_ufrag.size());
+ EXPECT_EQ(static_cast<size_t>(cricket::ICE_PWD_LENGTH),
+ ti_data->description.ice_pwd.size());
+ }
+ }
+ auto media_desc_options_it =
+ FindFirstMediaDescriptionByMid("data", options);
+ EXPECT_EQ(
+ media_desc_options_it->transport_options.enable_ice_renomination,
+ GetIceRenomination(ti_data));
+ }
+ }
+
+ void TestCryptoWithBundle(bool offer) {
+ f1_.set_secure(SEC_ENABLED);
+ MediaSessionOptions options;
+ AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &options);
+ std::unique_ptr<SessionDescription> ref_desc;
+ std::unique_ptr<SessionDescription> desc;
+ if (offer) {
+ options.bundle_enabled = false;
+ ref_desc = f1_.CreateOfferOrError(options, nullptr).MoveValue();
+ options.bundle_enabled = true;
+ desc = f1_.CreateOfferOrError(options, ref_desc.get()).MoveValue();
+ } else {
+ options.bundle_enabled = true;
+ ref_desc = f1_.CreateOfferOrError(options, nullptr).MoveValue();
+ desc =
+ f1_.CreateAnswerOrError(ref_desc.get(), options, nullptr).MoveValue();
+ }
+ ASSERT_TRUE(desc);
+ const cricket::MediaContentDescription* audio_media_desc =
+ desc->GetContentDescriptionByName("audio");
+ ASSERT_TRUE(audio_media_desc);
+ const cricket::MediaContentDescription* video_media_desc =
+ desc->GetContentDescriptionByName("video");
+ ASSERT_TRUE(video_media_desc);
+ EXPECT_TRUE(CompareCryptoParams(audio_media_desc->cryptos(),
+ video_media_desc->cryptos()));
+ ASSERT_CRYPTO(audio_media_desc, offer ? kDefaultCryptoSuiteSize : 1U,
+ kDefaultSrtpCryptoSuite);
+
+ // Verify the selected crypto is one from the reference audio
+ // media content.
+ const cricket::MediaContentDescription* ref_audio_media_desc =
+ ref_desc->GetContentDescriptionByName("audio");
+ bool found = false;
+ for (size_t i = 0; i < ref_audio_media_desc->cryptos().size(); ++i) {
+ if (ref_audio_media_desc->cryptos()[i].Matches(
+ audio_media_desc->cryptos()[0])) {
+ found = true;
+ break;
+ }
+ }
+ EXPECT_TRUE(found);
+ }
+
+ // This test that the audio and video media direction is set to
+ // `expected_direction_in_answer` in an answer if the offer direction is set
+ // to `direction_in_offer` and the answer is willing to both send and receive.
+ void TestMediaDirectionInAnswer(
+ RtpTransceiverDirection direction_in_offer,
+ RtpTransceiverDirection expected_direction_in_answer) {
+ MediaSessionOptions offer_opts;
+ AddAudioVideoSections(direction_in_offer, &offer_opts);
+
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(offer_opts, nullptr).MoveValue();
+ ASSERT_TRUE(offer.get());
+ ContentInfo* ac_offer = offer->GetContentByName("audio");
+ ASSERT_TRUE(ac_offer);
+ ContentInfo* vc_offer = offer->GetContentByName("video");
+ ASSERT_TRUE(vc_offer);
+
+ MediaSessionOptions answer_opts;
+ AddAudioVideoSections(RtpTransceiverDirection::kSendRecv, &answer_opts);
+ std::unique_ptr<SessionDescription> answer =
+ f2_.CreateAnswerOrError(offer.get(), answer_opts, nullptr).MoveValue();
+ const AudioContentDescription* acd_answer =
+ GetFirstAudioContentDescription(answer.get());
+ EXPECT_EQ(expected_direction_in_answer, acd_answer->direction());
+ const VideoContentDescription* vcd_answer =
+ GetFirstVideoContentDescription(answer.get());
+ EXPECT_EQ(expected_direction_in_answer, vcd_answer->direction());
+ }
+
+ bool VerifyNoCNCodecs(const cricket::ContentInfo* content) {
+ RTC_DCHECK(content);
+ RTC_CHECK(content->media_description());
+ for (const cricket::Codec& codec : content->media_description()->codecs()) {
+ if (codec.name == "CN") {
+ return false;
+ }
+ }
+ return true;
+ }
+
+ void TestVideoGcmCipher(bool gcm_offer, bool gcm_answer) {
+ MediaSessionOptions offer_opts;
+ AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &offer_opts);
+ offer_opts.crypto_options.srtp.enable_gcm_crypto_suites = gcm_offer;
+
+ MediaSessionOptions answer_opts;
+ AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &answer_opts);
+ answer_opts.crypto_options.srtp.enable_gcm_crypto_suites = gcm_answer;
+
+ f1_.set_secure(SEC_ENABLED);
+ f2_.set_secure(SEC_ENABLED);
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(offer_opts, nullptr).MoveValue();
+ ASSERT_TRUE(offer.get());
+ if (gcm_offer && gcm_answer) {
+ for (cricket::ContentInfo& content : offer->contents()) {
+ auto cryptos = content.media_description()->cryptos();
+ PreferGcmCryptoParameters(&cryptos);
+ content.media_description()->set_cryptos(cryptos);
+ }
+ }
+ std::unique_ptr<SessionDescription> answer =
+ f2_.CreateAnswerOrError(offer.get(), answer_opts, nullptr).MoveValue();
+ const ContentInfo* ac = answer->GetContentByName("audio");
+ const ContentInfo* vc = answer->GetContentByName("video");
+ ASSERT_TRUE(ac);
+ ASSERT_TRUE(vc);
+ EXPECT_EQ(MediaProtocolType::kRtp, ac->type);
+ EXPECT_EQ(MediaProtocolType::kRtp, vc->type);
+ const AudioContentDescription* acd = ac->media_description()->as_audio();
+ const VideoContentDescription* vcd = vc->media_description()->as_video();
+ EXPECT_EQ(MEDIA_TYPE_AUDIO, acd->type());
+ EXPECT_THAT(acd->codecs(), ElementsAreArray(kAudioCodecsAnswer));
+ EXPECT_EQ(kAutoBandwidth, acd->bandwidth()); // negotiated auto bw
+ EXPECT_EQ(0U, acd->first_ssrc()); // no sender is attached
+ EXPECT_TRUE(acd->rtcp_mux()); // negotiated rtcp-mux
+ if (gcm_offer && gcm_answer) {
+ ASSERT_CRYPTO(acd, 1U, kDefaultSrtpCryptoSuiteGcm);
+ } else {
+ ASSERT_CRYPTO(acd, 1U, kDefaultSrtpCryptoSuite);
+ }
+ EXPECT_EQ(MEDIA_TYPE_VIDEO, vcd->type());
+ EXPECT_THAT(vcd->codecs(), ElementsAreArray(kVideoCodecsAnswer));
+ EXPECT_EQ(0U, vcd->first_ssrc()); // no sender is attached
+ EXPECT_TRUE(vcd->rtcp_mux()); // negotiated rtcp-mux
+ if (gcm_offer && gcm_answer) {
+ ASSERT_CRYPTO(vcd, 1U, kDefaultSrtpCryptoSuiteGcm);
+ } else {
+ ASSERT_CRYPTO(vcd, 1U, kDefaultSrtpCryptoSuite);
+ }
+ EXPECT_EQ(cricket::kMediaProtocolSavpf, vcd->protocol());
+ }
+
+ void TestTransportSequenceNumberNegotiation(
+ const cricket::RtpHeaderExtensions& local,
+ const cricket::RtpHeaderExtensions& offered,
+ const cricket::RtpHeaderExtensions& expectedAnswer) {
+ MediaSessionOptions opts;
+ AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &opts);
+ SetAudioVideoRtpHeaderExtensions(offered, offered, &opts);
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+ ASSERT_TRUE(offer.get());
+ SetAudioVideoRtpHeaderExtensions(local, local, &opts);
+ std::unique_ptr<SessionDescription> answer =
+ f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue();
+
+ EXPECT_EQ(
+ expectedAnswer,
+ GetFirstAudioContentDescription(answer.get())->rtp_header_extensions());
+ EXPECT_EQ(
+ expectedAnswer,
+ GetFirstVideoContentDescription(answer.get())->rtp_header_extensions());
+ }
+
+ std::vector<webrtc::RtpHeaderExtensionCapability>
+ HeaderExtensionCapabilitiesFromRtpExtensions(
+ cricket::RtpHeaderExtensions extensions) {
+ std::vector<webrtc::RtpHeaderExtensionCapability> capabilities;
+ for (const auto& extension : extensions) {
+ webrtc::RtpHeaderExtensionCapability capability(
+ extension.uri, extension.id,
+ webrtc::RtpTransceiverDirection::kSendRecv);
+ capabilities.push_back(capability);
+ }
+ return capabilities;
+ }
+
+ void SetAudioVideoRtpHeaderExtensions(cricket::RtpHeaderExtensions audio_exts,
+ cricket::RtpHeaderExtensions video_exts,
+ MediaSessionOptions* opts) {
+ auto audio_caps = HeaderExtensionCapabilitiesFromRtpExtensions(audio_exts);
+ auto video_caps = HeaderExtensionCapabilitiesFromRtpExtensions(video_exts);
+ for (auto& entry : opts->media_description_options) {
+ switch (entry.type) {
+ case MEDIA_TYPE_AUDIO:
+ entry.header_extensions = audio_caps;
+ break;
+ case MEDIA_TYPE_VIDEO:
+ entry.header_extensions = video_caps;
+ break;
+ default:
+ break;
+ }
+ }
+ }
+
+ protected:
+ webrtc::test::ScopedKeyValueConfig field_trials;
+ UniqueRandomIdGenerator ssrc_generator1;
+ UniqueRandomIdGenerator ssrc_generator2;
+ TransportDescriptionFactory tdf1_;
+ TransportDescriptionFactory tdf2_;
+ MediaSessionDescriptionFactory f1_;
+ MediaSessionDescriptionFactory f2_;
+};
+
+// Create a typical audio offer, and ensure it matches what we expect.
+TEST_F(MediaSessionDescriptionFactoryTest, TestCreateAudioOffer) {
+ f1_.set_secure(SEC_ENABLED);
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(CreatePlanBMediaSessionOptions(), nullptr)
+ .MoveValue();
+ ASSERT_TRUE(offer.get());
+ const ContentInfo* ac = offer->GetContentByName("audio");
+ const ContentInfo* vc = offer->GetContentByName("video");
+ ASSERT_TRUE(ac);
+ EXPECT_FALSE(vc);
+ EXPECT_EQ(MediaProtocolType::kRtp, ac->type);
+ const AudioContentDescription* acd = ac->media_description()->as_audio();
+ EXPECT_EQ(MEDIA_TYPE_AUDIO, acd->type());
+ EXPECT_EQ(f1_.audio_sendrecv_codecs(), acd->codecs());
+ EXPECT_EQ(0U, acd->first_ssrc()); // no sender is attached.
+ EXPECT_EQ(kAutoBandwidth, acd->bandwidth()); // default bandwidth (auto)
+ EXPECT_TRUE(acd->rtcp_mux()); // rtcp-mux defaults on
+ ASSERT_CRYPTO(acd, kDefaultCryptoSuiteSize, kDefaultSrtpCryptoSuite);
+ EXPECT_EQ(cricket::kMediaProtocolSavpf, acd->protocol());
+}
+
+// Create an offer with just Opus and RED.
+TEST_F(MediaSessionDescriptionFactoryTest,
+ TestCreateAudioOfferWithJustOpusAndRed) {
+ f1_.set_secure(SEC_ENABLED);
+ // First, prefer to only use opus and red.
+ std::vector<webrtc::RtpCodecCapability> preferences;
+ preferences.push_back(
+ webrtc::ToRtpCodecCapability(f1_.audio_sendrecv_codecs()[0]));
+ preferences.push_back(
+ webrtc::ToRtpCodecCapability(f1_.audio_sendrecv_codecs()[1]));
+ EXPECT_EQ("opus", preferences[0].name);
+ EXPECT_EQ("red", preferences[1].name);
+
+ auto opts = CreatePlanBMediaSessionOptions();
+ opts.media_description_options.at(0).codec_preferences = preferences;
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+ ASSERT_TRUE(offer.get());
+ const ContentInfo* ac = offer->GetContentByName("audio");
+ const ContentInfo* vc = offer->GetContentByName("video");
+ ASSERT_TRUE(ac != NULL);
+ ASSERT_TRUE(vc == NULL);
+ EXPECT_EQ(MediaProtocolType::kRtp, ac->type);
+ const AudioContentDescription* acd = ac->media_description()->as_audio();
+ EXPECT_EQ(MEDIA_TYPE_AUDIO, acd->type());
+ EXPECT_EQ(2U, acd->codecs().size());
+ EXPECT_EQ("opus", acd->codecs()[0].name);
+ EXPECT_EQ("red", acd->codecs()[1].name);
+}
+
+// Create an offer with RED before Opus, which enables RED with Opus encoding.
+TEST_F(MediaSessionDescriptionFactoryTest, TestCreateAudioOfferWithRedForOpus) {
+ f1_.set_secure(SEC_ENABLED);
+ // First, prefer to only use opus and red.
+ std::vector<webrtc::RtpCodecCapability> preferences;
+ preferences.push_back(
+ webrtc::ToRtpCodecCapability(f1_.audio_sendrecv_codecs()[1]));
+ preferences.push_back(
+ webrtc::ToRtpCodecCapability(f1_.audio_sendrecv_codecs()[0]));
+ EXPECT_EQ("red", preferences[0].name);
+ EXPECT_EQ("opus", preferences[1].name);
+
+ auto opts = CreatePlanBMediaSessionOptions();
+ opts.media_description_options.at(0).codec_preferences = preferences;
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+ ASSERT_TRUE(offer.get());
+ const ContentInfo* ac = offer->GetContentByName("audio");
+ const ContentInfo* vc = offer->GetContentByName("video");
+ ASSERT_TRUE(ac != NULL);
+ ASSERT_TRUE(vc == NULL);
+ EXPECT_EQ(MediaProtocolType::kRtp, ac->type);
+ const AudioContentDescription* acd = ac->media_description()->as_audio();
+ EXPECT_EQ(MEDIA_TYPE_AUDIO, acd->type());
+ EXPECT_EQ(2U, acd->codecs().size());
+ EXPECT_EQ("red", acd->codecs()[0].name);
+ EXPECT_EQ("opus", acd->codecs()[1].name);
+}
+
+// Create a typical video offer, and ensure it matches what we expect.
+TEST_F(MediaSessionDescriptionFactoryTest, TestCreateVideoOffer) {
+ MediaSessionOptions opts;
+ AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &opts);
+ f1_.set_secure(SEC_ENABLED);
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+ ASSERT_TRUE(offer.get());
+ const ContentInfo* ac = offer->GetContentByName("audio");
+ const ContentInfo* vc = offer->GetContentByName("video");
+ ASSERT_TRUE(ac);
+ ASSERT_TRUE(vc);
+ EXPECT_EQ(MediaProtocolType::kRtp, ac->type);
+ EXPECT_EQ(MediaProtocolType::kRtp, vc->type);
+ const AudioContentDescription* acd = ac->media_description()->as_audio();
+ const VideoContentDescription* vcd = vc->media_description()->as_video();
+ EXPECT_EQ(MEDIA_TYPE_AUDIO, acd->type());
+ EXPECT_EQ(f1_.audio_sendrecv_codecs(), acd->codecs());
+ EXPECT_EQ(0U, acd->first_ssrc()); // no sender is attached
+ EXPECT_EQ(kAutoBandwidth, acd->bandwidth()); // default bandwidth (auto)
+ EXPECT_TRUE(acd->rtcp_mux()); // rtcp-mux defaults on
+ ASSERT_CRYPTO(acd, kDefaultCryptoSuiteSize, kDefaultSrtpCryptoSuite);
+ EXPECT_EQ(cricket::kMediaProtocolSavpf, acd->protocol());
+ EXPECT_EQ(MEDIA_TYPE_VIDEO, vcd->type());
+ EXPECT_EQ(f1_.video_sendrecv_codecs(), vcd->codecs());
+ EXPECT_EQ(0U, vcd->first_ssrc()); // no sender is attached
+ EXPECT_EQ(kAutoBandwidth, vcd->bandwidth()); // default bandwidth (auto)
+ EXPECT_TRUE(vcd->rtcp_mux()); // rtcp-mux defaults on
+ ASSERT_CRYPTO(vcd, kDefaultCryptoSuiteSize, kDefaultSrtpCryptoSuite);
+ EXPECT_EQ(cricket::kMediaProtocolSavpf, vcd->protocol());
+}
+
+// Test creating an offer with bundle where the Codecs have the same dynamic
+// RTP playlod type. The test verifies that the offer don't contain the
+// duplicate RTP payload types.
+TEST_F(MediaSessionDescriptionFactoryTest, TestBundleOfferWithSameCodecPlType) {
+ const VideoCodec& offered_video_codec = f2_.video_sendrecv_codecs()[0];
+ const AudioCodec& offered_audio_codec = f2_.audio_sendrecv_codecs()[0];
+ ASSERT_EQ(offered_video_codec.id, offered_audio_codec.id);
+
+ MediaSessionOptions opts;
+ AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &opts);
+ opts.bundle_enabled = true;
+ std::unique_ptr<SessionDescription> offer =
+ f2_.CreateOfferOrError(opts, nullptr).MoveValue();
+ const VideoContentDescription* vcd =
+ GetFirstVideoContentDescription(offer.get());
+ const AudioContentDescription* acd =
+ GetFirstAudioContentDescription(offer.get());
+ ASSERT_TRUE(vcd);
+ ASSERT_TRUE(acd);
+ EXPECT_NE(vcd->codecs()[0].id, acd->codecs()[0].id);
+ EXPECT_EQ(vcd->codecs()[0].name, offered_video_codec.name);
+ EXPECT_EQ(acd->codecs()[0].name, offered_audio_codec.name);
+}
+
+// Test creating an updated offer with bundle, audio, video and data
+// after an audio only session has been negotiated.
+TEST_F(MediaSessionDescriptionFactoryTest,
+ TestCreateUpdatedVideoOfferWithBundle) {
+ f1_.set_secure(SEC_ENABLED);
+ f2_.set_secure(SEC_ENABLED);
+ MediaSessionOptions opts;
+ AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio",
+ RtpTransceiverDirection::kRecvOnly, kActive,
+ &opts);
+ AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video",
+ RtpTransceiverDirection::kInactive, kStopped,
+ &opts);
+ opts.bundle_enabled = true;
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+ std::unique_ptr<SessionDescription> answer =
+ f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue();
+
+ MediaSessionOptions updated_opts;
+ AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &updated_opts);
+ updated_opts.bundle_enabled = true;
+ std::unique_ptr<SessionDescription> updated_offer(
+ f1_.CreateOfferOrError(updated_opts, answer.get()).MoveValue());
+
+ const AudioContentDescription* acd =
+ GetFirstAudioContentDescription(updated_offer.get());
+ const VideoContentDescription* vcd =
+ GetFirstVideoContentDescription(updated_offer.get());
+ EXPECT_TRUE(vcd);
+ EXPECT_TRUE(acd);
+
+ ASSERT_CRYPTO(acd, 1U, kDefaultSrtpCryptoSuite);
+ EXPECT_EQ(cricket::kMediaProtocolSavpf, acd->protocol());
+ ASSERT_CRYPTO(vcd, 1U, kDefaultSrtpCryptoSuite);
+ EXPECT_EQ(cricket::kMediaProtocolSavpf, vcd->protocol());
+}
+
+// Create an SCTP data offer with bundle without error.
+TEST_F(MediaSessionDescriptionFactoryTest, TestCreateSctpDataOffer) {
+ MediaSessionOptions opts;
+ opts.bundle_enabled = true;
+ AddDataSection(RtpTransceiverDirection::kSendRecv, &opts);
+ f1_.set_secure(SEC_ENABLED);
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+ EXPECT_TRUE(offer.get());
+ EXPECT_TRUE(offer->GetContentByName("data"));
+ auto dcd = GetFirstSctpDataContentDescription(offer.get());
+ ASSERT_TRUE(dcd);
+ // Since this transport is insecure, the protocol should be "SCTP".
+ EXPECT_EQ(cricket::kMediaProtocolSctp, dcd->protocol());
+}
+
+// Create an SCTP data offer with bundle without error.
+TEST_F(MediaSessionDescriptionFactoryTest, TestCreateSecureSctpDataOffer) {
+ MediaSessionOptions opts;
+ opts.bundle_enabled = true;
+ AddDataSection(RtpTransceiverDirection::kSendRecv, &opts);
+ f1_.set_secure(SEC_ENABLED);
+ tdf1_.set_secure(SEC_ENABLED);
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+ EXPECT_TRUE(offer.get());
+ EXPECT_TRUE(offer->GetContentByName("data"));
+ auto dcd = GetFirstSctpDataContentDescription(offer.get());
+ ASSERT_TRUE(dcd);
+ // The protocol should now be "UDP/DTLS/SCTP"
+ EXPECT_EQ(cricket::kMediaProtocolUdpDtlsSctp, dcd->protocol());
+}
+
+// Test creating an sctp data channel from an already generated offer.
+TEST_F(MediaSessionDescriptionFactoryTest, TestCreateImplicitSctpDataOffer) {
+ MediaSessionOptions opts;
+ opts.bundle_enabled = true;
+ AddDataSection(RtpTransceiverDirection::kSendRecv, &opts);
+ f1_.set_secure(SEC_ENABLED);
+ std::unique_ptr<SessionDescription> offer1(
+ f1_.CreateOfferOrError(opts, nullptr).MoveValue());
+ ASSERT_TRUE(offer1.get());
+ const ContentInfo* data = offer1->GetContentByName("data");
+ ASSERT_TRUE(data);
+ ASSERT_EQ(cricket::kMediaProtocolSctp, data->media_description()->protocol());
+
+ std::unique_ptr<SessionDescription> offer2(
+ f1_.CreateOfferOrError(opts, offer1.get()).MoveValue());
+ data = offer2->GetContentByName("data");
+ ASSERT_TRUE(data);
+ EXPECT_EQ(cricket::kMediaProtocolSctp, data->media_description()->protocol());
+}
+
+// Test that if BUNDLE is enabled and all media sections are rejected then the
+// BUNDLE group is not present in the re-offer.
+TEST_F(MediaSessionDescriptionFactoryTest, ReOfferNoBundleGroupIfAllRejected) {
+ MediaSessionOptions opts;
+ opts.bundle_enabled = true;
+ AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &opts);
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+
+ opts.media_description_options[0].stopped = true;
+ std::unique_ptr<SessionDescription> reoffer =
+ f1_.CreateOfferOrError(opts, offer.get()).MoveValue();
+
+ EXPECT_FALSE(reoffer->GetGroupByName(cricket::GROUP_TYPE_BUNDLE));
+}
+
+// Test that if BUNDLE is enabled and the remote re-offer does not include a
+// BUNDLE group since all media sections are rejected, then the re-answer also
+// does not include a BUNDLE group.
+TEST_F(MediaSessionDescriptionFactoryTest, ReAnswerNoBundleGroupIfAllRejected) {
+ MediaSessionOptions opts;
+ opts.bundle_enabled = true;
+ AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &opts);
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+ std::unique_ptr<SessionDescription> answer =
+ f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue();
+
+ opts.media_description_options[0].stopped = true;
+ std::unique_ptr<SessionDescription> reoffer =
+ f1_.CreateOfferOrError(opts, offer.get()).MoveValue();
+ std::unique_ptr<SessionDescription> reanswer =
+ f2_.CreateAnswerOrError(reoffer.get(), opts, answer.get()).MoveValue();
+
+ EXPECT_FALSE(reanswer->GetGroupByName(cricket::GROUP_TYPE_BUNDLE));
+}
+
+// Test that if BUNDLE is enabled and the previous offerer-tagged media section
+// was rejected then the new offerer-tagged media section is the non-rejected
+// media section.
+TEST_F(MediaSessionDescriptionFactoryTest, ReOfferChangeBundleOffererTagged) {
+ MediaSessionOptions opts;
+ opts.bundle_enabled = true;
+ AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &opts);
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+
+ // Reject the audio m= section and add a video m= section.
+ opts.media_description_options[0].stopped = true;
+ AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &opts);
+ std::unique_ptr<SessionDescription> reoffer =
+ f1_.CreateOfferOrError(opts, offer.get()).MoveValue();
+
+ const cricket::ContentGroup* bundle_group =
+ reoffer->GetGroupByName(cricket::GROUP_TYPE_BUNDLE);
+ ASSERT_TRUE(bundle_group);
+ EXPECT_FALSE(bundle_group->HasContentName("audio"));
+ EXPECT_TRUE(bundle_group->HasContentName("video"));
+}
+
+// Test that if BUNDLE is enabled and the previous offerer-tagged media section
+// was rejected and a new media section is added, then the re-answer BUNDLE
+// group will contain only the non-rejected media section.
+TEST_F(MediaSessionDescriptionFactoryTest, ReAnswerChangedBundleOffererTagged) {
+ MediaSessionOptions opts;
+ opts.bundle_enabled = true;
+ AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &opts);
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+ std::unique_ptr<SessionDescription> answer =
+ f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue();
+
+ // Reject the audio m= section and add a video m= section.
+ opts.media_description_options[0].stopped = true;
+ AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &opts);
+ std::unique_ptr<SessionDescription> reoffer =
+ f1_.CreateOfferOrError(opts, offer.get()).MoveValue();
+ std::unique_ptr<SessionDescription> reanswer =
+ f2_.CreateAnswerOrError(reoffer.get(), opts, answer.get()).MoveValue();
+
+ const cricket::ContentGroup* bundle_group =
+ reanswer->GetGroupByName(cricket::GROUP_TYPE_BUNDLE);
+ ASSERT_TRUE(bundle_group);
+ EXPECT_FALSE(bundle_group->HasContentName("audio"));
+ EXPECT_TRUE(bundle_group->HasContentName("video"));
+}
+
+TEST_F(MediaSessionDescriptionFactoryTest,
+ CreateAnswerForOfferWithMultipleBundleGroups) {
+ // Create an offer with 4 m= sections, initially without BUNDLE groups.
+ MediaSessionOptions opts;
+ opts.bundle_enabled = false;
+ AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "1",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &opts);
+ AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "2",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &opts);
+ AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "3",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &opts);
+ AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "4",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &opts);
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+ ASSERT_TRUE(offer->groups().empty());
+
+ // Munge the offer to have two groups. Offers like these cannot be generated
+ // without munging, but it is valid to receive such offers from remote
+ // endpoints.
+ cricket::ContentGroup bundle_group1(cricket::GROUP_TYPE_BUNDLE);
+ bundle_group1.AddContentName("1");
+ bundle_group1.AddContentName("2");
+ cricket::ContentGroup bundle_group2(cricket::GROUP_TYPE_BUNDLE);
+ bundle_group2.AddContentName("3");
+ bundle_group2.AddContentName("4");
+ offer->AddGroup(bundle_group1);
+ offer->AddGroup(bundle_group2);
+
+ // If BUNDLE is enabled, the answer to this offer should accept both BUNDLE
+ // groups.
+ opts.bundle_enabled = true;
+ std::unique_ptr<SessionDescription> answer =
+ f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue();
+
+ std::vector<const cricket::ContentGroup*> answer_groups =
+ answer->GetGroupsByName(cricket::GROUP_TYPE_BUNDLE);
+ ASSERT_EQ(answer_groups.size(), 2u);
+ EXPECT_EQ(answer_groups[0]->content_names().size(), 2u);
+ EXPECT_TRUE(answer_groups[0]->HasContentName("1"));
+ EXPECT_TRUE(answer_groups[0]->HasContentName("2"));
+ EXPECT_EQ(answer_groups[1]->content_names().size(), 2u);
+ EXPECT_TRUE(answer_groups[1]->HasContentName("3"));
+ EXPECT_TRUE(answer_groups[1]->HasContentName("4"));
+
+ // If BUNDLE is disabled, the answer to this offer should reject both BUNDLE
+ // groups.
+ opts.bundle_enabled = false;
+ answer = f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue();
+
+ answer_groups = answer->GetGroupsByName(cricket::GROUP_TYPE_BUNDLE);
+ // Rejected groups are still listed, but they are empty.
+ ASSERT_EQ(answer_groups.size(), 2u);
+ EXPECT_TRUE(answer_groups[0]->content_names().empty());
+ EXPECT_TRUE(answer_groups[1]->content_names().empty());
+}
+
+// Test that if the BUNDLE offerer-tagged media section is changed in a reoffer
+// and there is still a non-rejected media section that was in the initial
+// offer, then the ICE credentials do not change in the reoffer offerer-tagged
+// media section.
+TEST_F(MediaSessionDescriptionFactoryTest,
+ ReOfferChangeBundleOffererTaggedKeepsIceCredentials) {
+ MediaSessionOptions opts;
+ opts.bundle_enabled = true;
+ AddAudioVideoSections(RtpTransceiverDirection::kSendRecv, &opts);
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+ std::unique_ptr<SessionDescription> answer =
+ f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue();
+
+ // Reject the audio m= section.
+ opts.media_description_options[0].stopped = true;
+ std::unique_ptr<SessionDescription> reoffer =
+ f1_.CreateOfferOrError(opts, offer.get()).MoveValue();
+
+ const TransportDescription* offer_tagged =
+ offer->GetTransportDescriptionByName("audio");
+ ASSERT_TRUE(offer_tagged);
+ const TransportDescription* reoffer_tagged =
+ reoffer->GetTransportDescriptionByName("video");
+ ASSERT_TRUE(reoffer_tagged);
+ EXPECT_EQ(offer_tagged->ice_ufrag, reoffer_tagged->ice_ufrag);
+ EXPECT_EQ(offer_tagged->ice_pwd, reoffer_tagged->ice_pwd);
+}
+
+// Test that if the BUNDLE offerer-tagged media section is changed in a reoffer
+// and there is still a non-rejected media section that was in the initial
+// offer, then the ICE credentials do not change in the reanswer answerer-tagged
+// media section.
+TEST_F(MediaSessionDescriptionFactoryTest,
+ ReAnswerChangeBundleOffererTaggedKeepsIceCredentials) {
+ MediaSessionOptions opts;
+ opts.bundle_enabled = true;
+ AddAudioVideoSections(RtpTransceiverDirection::kSendRecv, &opts);
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+ std::unique_ptr<SessionDescription> answer =
+ f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue();
+
+ // Reject the audio m= section.
+ opts.media_description_options[0].stopped = true;
+ std::unique_ptr<SessionDescription> reoffer =
+ f1_.CreateOfferOrError(opts, offer.get()).MoveValue();
+ std::unique_ptr<SessionDescription> reanswer =
+ f2_.CreateAnswerOrError(reoffer.get(), opts, answer.get()).MoveValue();
+
+ const TransportDescription* answer_tagged =
+ answer->GetTransportDescriptionByName("audio");
+ ASSERT_TRUE(answer_tagged);
+ const TransportDescription* reanswer_tagged =
+ reanswer->GetTransportDescriptionByName("video");
+ ASSERT_TRUE(reanswer_tagged);
+ EXPECT_EQ(answer_tagged->ice_ufrag, reanswer_tagged->ice_ufrag);
+ EXPECT_EQ(answer_tagged->ice_pwd, reanswer_tagged->ice_pwd);
+}
+
+// Create an audio, video offer without legacy StreamParams.
+TEST_F(MediaSessionDescriptionFactoryTest,
+ TestCreateOfferWithoutLegacyStreams) {
+ MediaSessionOptions opts;
+ AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &opts);
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+ ASSERT_TRUE(offer.get());
+ const ContentInfo* ac = offer->GetContentByName("audio");
+ const ContentInfo* vc = offer->GetContentByName("video");
+ ASSERT_TRUE(ac);
+ ASSERT_TRUE(vc);
+ const AudioContentDescription* acd = ac->media_description()->as_audio();
+ const VideoContentDescription* vcd = vc->media_description()->as_video();
+
+ EXPECT_FALSE(vcd->has_ssrcs()); // No StreamParams.
+ EXPECT_FALSE(acd->has_ssrcs()); // No StreamParams.
+}
+
+// Creates an audio+video sendonly offer.
+TEST_F(MediaSessionDescriptionFactoryTest, TestCreateSendOnlyOffer) {
+ MediaSessionOptions opts;
+ AddAudioVideoSections(RtpTransceiverDirection::kSendOnly, &opts);
+ AttachSenderToMediaDescriptionOptions("video", MEDIA_TYPE_VIDEO, kVideoTrack1,
+ {kMediaStream1}, 1, &opts);
+ AttachSenderToMediaDescriptionOptions("audio", MEDIA_TYPE_AUDIO, kAudioTrack1,
+ {kMediaStream1}, 1, &opts);
+
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+ ASSERT_TRUE(offer.get());
+ EXPECT_EQ(2u, offer->contents().size());
+ EXPECT_TRUE(IsMediaContentOfType(&offer->contents()[0], MEDIA_TYPE_AUDIO));
+ EXPECT_TRUE(IsMediaContentOfType(&offer->contents()[1], MEDIA_TYPE_VIDEO));
+
+ EXPECT_EQ(RtpTransceiverDirection::kSendOnly,
+ GetMediaDirection(&offer->contents()[0]));
+ EXPECT_EQ(RtpTransceiverDirection::kSendOnly,
+ GetMediaDirection(&offer->contents()[1]));
+}
+
+// Verifies that the order of the media contents in the current
+// SessionDescription is preserved in the new SessionDescription.
+TEST_F(MediaSessionDescriptionFactoryTest, TestCreateOfferContentOrder) {
+ MediaSessionOptions opts;
+ AddDataSection(RtpTransceiverDirection::kSendRecv, &opts);
+
+ std::unique_ptr<SessionDescription> offer1(
+ f1_.CreateOfferOrError(opts, nullptr).MoveValue());
+ ASSERT_TRUE(offer1.get());
+ EXPECT_EQ(1u, offer1->contents().size());
+ EXPECT_TRUE(IsMediaContentOfType(&offer1->contents()[0], MEDIA_TYPE_DATA));
+
+ AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video",
+ RtpTransceiverDirection::kRecvOnly, kActive,
+ &opts);
+ std::unique_ptr<SessionDescription> offer2(
+ f1_.CreateOfferOrError(opts, offer1.get()).MoveValue());
+ ASSERT_TRUE(offer2.get());
+ EXPECT_EQ(2u, offer2->contents().size());
+ EXPECT_TRUE(IsMediaContentOfType(&offer2->contents()[0], MEDIA_TYPE_DATA));
+ EXPECT_TRUE(IsMediaContentOfType(&offer2->contents()[1], MEDIA_TYPE_VIDEO));
+
+ AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio",
+ RtpTransceiverDirection::kRecvOnly, kActive,
+ &opts);
+ std::unique_ptr<SessionDescription> offer3(
+ f1_.CreateOfferOrError(opts, offer2.get()).MoveValue());
+ ASSERT_TRUE(offer3.get());
+ EXPECT_EQ(3u, offer3->contents().size());
+ EXPECT_TRUE(IsMediaContentOfType(&offer3->contents()[0], MEDIA_TYPE_DATA));
+ EXPECT_TRUE(IsMediaContentOfType(&offer3->contents()[1], MEDIA_TYPE_VIDEO));
+ EXPECT_TRUE(IsMediaContentOfType(&offer3->contents()[2], MEDIA_TYPE_AUDIO));
+}
+
+// Create a typical audio answer, and ensure it matches what we expect.
+TEST_F(MediaSessionDescriptionFactoryTest, TestCreateAudioAnswer) {
+ f1_.set_secure(SEC_ENABLED);
+ f2_.set_secure(SEC_ENABLED);
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(CreatePlanBMediaSessionOptions(), nullptr)
+ .MoveValue();
+ ASSERT_TRUE(offer.get());
+ std::unique_ptr<SessionDescription> answer =
+ f2_.CreateAnswerOrError(offer.get(), CreatePlanBMediaSessionOptions(),
+ nullptr)
+ .MoveValue();
+ const ContentInfo* ac = answer->GetContentByName("audio");
+ const ContentInfo* vc = answer->GetContentByName("video");
+ ASSERT_TRUE(ac);
+ EXPECT_FALSE(vc);
+ EXPECT_EQ(MediaProtocolType::kRtp, ac->type);
+ const AudioContentDescription* acd = ac->media_description()->as_audio();
+ EXPECT_EQ(MEDIA_TYPE_AUDIO, acd->type());
+ EXPECT_THAT(acd->codecs(), ElementsAreArray(kAudioCodecsAnswer));
+ EXPECT_EQ(0U, acd->first_ssrc()); // no sender is attached
+ EXPECT_EQ(kAutoBandwidth, acd->bandwidth()); // negotiated auto bw
+ EXPECT_TRUE(acd->rtcp_mux()); // negotiated rtcp-mux
+ ASSERT_CRYPTO(acd, 1U, kDefaultSrtpCryptoSuite);
+ EXPECT_EQ(cricket::kMediaProtocolSavpf, acd->protocol());
+}
+
+// Create a typical audio answer with GCM ciphers enabled, and ensure it
+// matches what we expect.
+TEST_F(MediaSessionDescriptionFactoryTest, TestCreateAudioAnswerGcm) {
+ f1_.set_secure(SEC_ENABLED);
+ f2_.set_secure(SEC_ENABLED);
+ MediaSessionOptions opts = CreatePlanBMediaSessionOptions();
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+ ASSERT_TRUE(offer.get());
+ for (cricket::ContentInfo& content : offer->contents()) {
+ auto cryptos = content.media_description()->cryptos();
+ PreferGcmCryptoParameters(&cryptos);
+ content.media_description()->set_cryptos(cryptos);
+ }
+ std::unique_ptr<SessionDescription> answer =
+ f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue();
+ const ContentInfo* ac = answer->GetContentByName("audio");
+ const ContentInfo* vc = answer->GetContentByName("video");
+ ASSERT_TRUE(ac);
+ EXPECT_FALSE(vc);
+ EXPECT_EQ(MediaProtocolType::kRtp, ac->type);
+ const AudioContentDescription* acd = ac->media_description()->as_audio();
+ EXPECT_EQ(MEDIA_TYPE_AUDIO, acd->type());
+ EXPECT_THAT(acd->codecs(), ElementsAreArray(kAudioCodecsAnswer));
+ EXPECT_EQ(0U, acd->first_ssrc()); // no sender is attached
+ EXPECT_EQ(kAutoBandwidth, acd->bandwidth()); // negotiated auto bw
+ EXPECT_TRUE(acd->rtcp_mux()); // negotiated rtcp-mux
+ ASSERT_CRYPTO(acd, 1U, kDefaultSrtpCryptoSuiteGcm);
+ EXPECT_EQ(cricket::kMediaProtocolSavpf, acd->protocol());
+}
+
+// Create an audio answer with no common codecs, and ensure it is rejected.
+TEST_F(MediaSessionDescriptionFactoryTest,
+ TestCreateAudioAnswerWithNoCommonCodecs) {
+ MediaSessionOptions opts;
+ AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &opts);
+ std::vector f1_codecs = {cricket::CreateAudioCodec(96, "opus", 48000, 1)};
+ f1_.set_audio_codecs(f1_codecs, f1_codecs);
+
+ std::vector f2_codecs = {cricket::CreateAudioCodec(0, "PCMU", 8000, 1)};
+ f2_.set_audio_codecs(f2_codecs, f2_codecs);
+
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+ std::unique_ptr<SessionDescription> answer =
+ f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue();
+ const ContentInfo* ac = answer->GetContentByName("audio");
+ ASSERT_TRUE(ac);
+ EXPECT_TRUE(ac->rejected);
+}
+
+// Create a typical video answer, and ensure it matches what we expect.
+TEST_F(MediaSessionDescriptionFactoryTest, TestCreateVideoAnswer) {
+ MediaSessionOptions opts;
+ AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &opts);
+ f1_.set_secure(SEC_ENABLED);
+ f2_.set_secure(SEC_ENABLED);
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+ ASSERT_TRUE(offer.get());
+ std::unique_ptr<SessionDescription> answer =
+ f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue();
+ const ContentInfo* ac = answer->GetContentByName("audio");
+ const ContentInfo* vc = answer->GetContentByName("video");
+ ASSERT_TRUE(ac);
+ ASSERT_TRUE(vc);
+ EXPECT_EQ(MediaProtocolType::kRtp, ac->type);
+ EXPECT_EQ(MediaProtocolType::kRtp, vc->type);
+ const AudioContentDescription* acd = ac->media_description()->as_audio();
+ const VideoContentDescription* vcd = vc->media_description()->as_video();
+ EXPECT_EQ(MEDIA_TYPE_AUDIO, acd->type());
+ EXPECT_THAT(acd->codecs(), ElementsAreArray(kAudioCodecsAnswer));
+ EXPECT_EQ(kAutoBandwidth, acd->bandwidth()); // negotiated auto bw
+ EXPECT_EQ(0U, acd->first_ssrc()); // no sender is attached
+ EXPECT_TRUE(acd->rtcp_mux()); // negotiated rtcp-mux
+ ASSERT_CRYPTO(acd, 1U, kDefaultSrtpCryptoSuite);
+ EXPECT_EQ(MEDIA_TYPE_VIDEO, vcd->type());
+ EXPECT_THAT(vcd->codecs(), ElementsAreArray(kVideoCodecsAnswer));
+ EXPECT_EQ(0U, vcd->first_ssrc()); // no sender is attached
+ EXPECT_TRUE(vcd->rtcp_mux()); // negotiated rtcp-mux
+ ASSERT_CRYPTO(vcd, 1U, kDefaultSrtpCryptoSuite);
+ EXPECT_EQ(cricket::kMediaProtocolSavpf, vcd->protocol());
+}
+
+// Create a typical video answer with GCM ciphers enabled, and ensure it
+// matches what we expect.
+TEST_F(MediaSessionDescriptionFactoryTest, TestCreateVideoAnswerGcm) {
+ TestVideoGcmCipher(true, true);
+}
+
+// Create a typical video answer with GCM ciphers enabled for the offer only,
+// and ensure it matches what we expect.
+TEST_F(MediaSessionDescriptionFactoryTest, TestCreateVideoAnswerGcmOffer) {
+ TestVideoGcmCipher(true, false);
+}
+
+// Create a typical video answer with GCM ciphers enabled for the answer only,
+// and ensure it matches what we expect.
+TEST_F(MediaSessionDescriptionFactoryTest, TestCreateVideoAnswerGcmAnswer) {
+ TestVideoGcmCipher(false, true);
+}
+
+// Create a video answer with no common codecs, and ensure it is rejected.
+TEST_F(MediaSessionDescriptionFactoryTest,
+ TestCreateVideoAnswerWithNoCommonCodecs) {
+ MediaSessionOptions opts;
+ AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &opts);
+ std::vector f1_codecs = {cricket::CreateVideoCodec(96, "H264")};
+ f1_.set_video_codecs(f1_codecs, f1_codecs);
+
+ std::vector f2_codecs = {cricket::CreateVideoCodec(97, "VP8")};
+ f2_.set_video_codecs(f2_codecs, f2_codecs);
+
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+ std::unique_ptr<SessionDescription> answer =
+ f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue();
+ const ContentInfo* vc = answer->GetContentByName("video");
+ ASSERT_TRUE(vc);
+ EXPECT_TRUE(vc->rejected);
+}
+
+// Create a video answer with no common codecs (but a common FEC codec), and
+// ensure it is rejected.
+TEST_F(MediaSessionDescriptionFactoryTest,
+ TestCreateVideoAnswerWithOnlyFecCodecsCommon) {
+ MediaSessionOptions opts;
+ AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &opts);
+ std::vector f1_codecs = {cricket::CreateVideoCodec(96, "H264"),
+ cricket::CreateVideoCodec(118, "flexfec-03")};
+ f1_.set_video_codecs(f1_codecs, f1_codecs);
+
+ std::vector f2_codecs = {cricket::CreateVideoCodec(97, "VP8"),
+ cricket::CreateVideoCodec(118, "flexfec-03")};
+ f2_.set_video_codecs(f2_codecs, f2_codecs);
+
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+ std::unique_ptr<SessionDescription> answer =
+ f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue();
+ const ContentInfo* vc = answer->GetContentByName("video");
+ ASSERT_TRUE(vc);
+ EXPECT_TRUE(vc->rejected);
+}
+
+// The use_sctpmap flag should be set in an Sctp DataContentDescription by
+// default. The answer's use_sctpmap flag should match the offer's.
+TEST_F(MediaSessionDescriptionFactoryTest, TestCreateDataAnswerUsesSctpmap) {
+ MediaSessionOptions opts;
+ AddDataSection(RtpTransceiverDirection::kSendRecv, &opts);
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+ ASSERT_TRUE(offer.get());
+ ContentInfo* dc_offer = offer->GetContentByName("data");
+ ASSERT_TRUE(dc_offer);
+ SctpDataContentDescription* dcd_offer =
+ dc_offer->media_description()->as_sctp();
+ EXPECT_TRUE(dcd_offer->use_sctpmap());
+
+ std::unique_ptr<SessionDescription> answer =
+ f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue();
+ const ContentInfo* dc_answer = answer->GetContentByName("data");
+ ASSERT_TRUE(dc_answer);
+ const SctpDataContentDescription* dcd_answer =
+ dc_answer->media_description()->as_sctp();
+ EXPECT_TRUE(dcd_answer->use_sctpmap());
+}
+
+// The answer's use_sctpmap flag should match the offer's.
+TEST_F(MediaSessionDescriptionFactoryTest, TestCreateDataAnswerWithoutSctpmap) {
+ MediaSessionOptions opts;
+ AddDataSection(RtpTransceiverDirection::kSendRecv, &opts);
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+ ASSERT_TRUE(offer.get());
+ ContentInfo* dc_offer = offer->GetContentByName("data");
+ ASSERT_TRUE(dc_offer);
+ SctpDataContentDescription* dcd_offer =
+ dc_offer->media_description()->as_sctp();
+ dcd_offer->set_use_sctpmap(false);
+
+ std::unique_ptr<SessionDescription> answer =
+ f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue();
+ const ContentInfo* dc_answer = answer->GetContentByName("data");
+ ASSERT_TRUE(dc_answer);
+ const SctpDataContentDescription* dcd_answer =
+ dc_answer->media_description()->as_sctp();
+ EXPECT_FALSE(dcd_answer->use_sctpmap());
+}
+
+// Test that a valid answer will be created for "DTLS/SCTP", "UDP/DTLS/SCTP"
+// and "TCP/DTLS/SCTP" offers.
+TEST_F(MediaSessionDescriptionFactoryTest,
+ TestCreateDataAnswerToDifferentOfferedProtos) {
+ // Need to enable DTLS offer/answer generation (disabled by default in this
+ // test).
+ f1_.set_secure(SEC_ENABLED);
+ f2_.set_secure(SEC_ENABLED);
+ tdf1_.set_secure(SEC_ENABLED);
+ tdf2_.set_secure(SEC_ENABLED);
+
+ MediaSessionOptions opts;
+ AddDataSection(RtpTransceiverDirection::kSendRecv, &opts);
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+ ASSERT_TRUE(offer.get());
+ ContentInfo* dc_offer = offer->GetContentByName("data");
+ ASSERT_TRUE(dc_offer);
+ SctpDataContentDescription* dcd_offer =
+ dc_offer->media_description()->as_sctp();
+ ASSERT_TRUE(dcd_offer);
+
+ std::vector<std::string> protos = {"DTLS/SCTP", "UDP/DTLS/SCTP",
+ "TCP/DTLS/SCTP"};
+ for (const std::string& proto : protos) {
+ dcd_offer->set_protocol(proto);
+ std::unique_ptr<SessionDescription> answer =
+ f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue();
+ const ContentInfo* dc_answer = answer->GetContentByName("data");
+ ASSERT_TRUE(dc_answer);
+ const SctpDataContentDescription* dcd_answer =
+ dc_answer->media_description()->as_sctp();
+ EXPECT_FALSE(dc_answer->rejected);
+ EXPECT_EQ(proto, dcd_answer->protocol());
+ }
+}
+
+TEST_F(MediaSessionDescriptionFactoryTest,
+ TestCreateDataAnswerToOfferWithDefinedMessageSize) {
+ // Need to enable DTLS offer/answer generation (disabled by default in this
+ // test).
+ f1_.set_secure(SEC_ENABLED);
+ f2_.set_secure(SEC_ENABLED);
+ tdf1_.set_secure(SEC_ENABLED);
+ tdf2_.set_secure(SEC_ENABLED);
+
+ MediaSessionOptions opts;
+ AddDataSection(RtpTransceiverDirection::kSendRecv, &opts);
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+ ASSERT_TRUE(offer.get());
+ ContentInfo* dc_offer = offer->GetContentByName("data");
+ ASSERT_TRUE(dc_offer);
+ SctpDataContentDescription* dcd_offer =
+ dc_offer->media_description()->as_sctp();
+ ASSERT_TRUE(dcd_offer);
+ dcd_offer->set_max_message_size(1234);
+ std::unique_ptr<SessionDescription> answer =
+ f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue();
+ const ContentInfo* dc_answer = answer->GetContentByName("data");
+ ASSERT_TRUE(dc_answer);
+ const SctpDataContentDescription* dcd_answer =
+ dc_answer->media_description()->as_sctp();
+ EXPECT_FALSE(dc_answer->rejected);
+ EXPECT_EQ(1234, dcd_answer->max_message_size());
+}
+
+TEST_F(MediaSessionDescriptionFactoryTest,
+ TestCreateDataAnswerToOfferWithZeroMessageSize) {
+ // Need to enable DTLS offer/answer generation (disabled by default in this
+ // test).
+ f1_.set_secure(SEC_ENABLED);
+ f2_.set_secure(SEC_ENABLED);
+ tdf1_.set_secure(SEC_ENABLED);
+ tdf2_.set_secure(SEC_ENABLED);
+
+ MediaSessionOptions opts;
+ AddDataSection(RtpTransceiverDirection::kSendRecv, &opts);
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+ ASSERT_TRUE(offer.get());
+ ContentInfo* dc_offer = offer->GetContentByName("data");
+ ASSERT_TRUE(dc_offer);
+ SctpDataContentDescription* dcd_offer =
+ dc_offer->media_description()->as_sctp();
+ ASSERT_TRUE(dcd_offer);
+ dcd_offer->set_max_message_size(0);
+ std::unique_ptr<SessionDescription> answer =
+ f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue();
+ const ContentInfo* dc_answer = answer->GetContentByName("data");
+ ASSERT_TRUE(dc_answer);
+ const SctpDataContentDescription* dcd_answer =
+ dc_answer->media_description()->as_sctp();
+ EXPECT_FALSE(dc_answer->rejected);
+ EXPECT_EQ(cricket::kSctpSendBufferSize, dcd_answer->max_message_size());
+}
+
+// Verifies that the order of the media contents in the offer is preserved in
+// the answer.
+TEST_F(MediaSessionDescriptionFactoryTest, TestCreateAnswerContentOrder) {
+ MediaSessionOptions opts;
+
+ // Creates a data only offer.
+ AddDataSection(RtpTransceiverDirection::kSendRecv, &opts);
+ std::unique_ptr<SessionDescription> offer1(
+ f1_.CreateOfferOrError(opts, nullptr).MoveValue());
+ ASSERT_TRUE(offer1.get());
+
+ // Appends audio to the offer.
+ AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio",
+ RtpTransceiverDirection::kRecvOnly, kActive,
+ &opts);
+ std::unique_ptr<SessionDescription> offer2(
+ f1_.CreateOfferOrError(opts, offer1.get()).MoveValue());
+ ASSERT_TRUE(offer2.get());
+
+ // Appends video to the offer.
+ AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video",
+ RtpTransceiverDirection::kRecvOnly, kActive,
+ &opts);
+ std::unique_ptr<SessionDescription> offer3(
+ f1_.CreateOfferOrError(opts, offer2.get()).MoveValue());
+ ASSERT_TRUE(offer3.get());
+
+ std::unique_ptr<SessionDescription> answer =
+ f2_.CreateAnswerOrError(offer3.get(), opts, nullptr).MoveValue();
+ ASSERT_TRUE(answer.get());
+ EXPECT_EQ(3u, answer->contents().size());
+ EXPECT_TRUE(IsMediaContentOfType(&answer->contents()[0], MEDIA_TYPE_DATA));
+ EXPECT_TRUE(IsMediaContentOfType(&answer->contents()[1], MEDIA_TYPE_AUDIO));
+ EXPECT_TRUE(IsMediaContentOfType(&answer->contents()[2], MEDIA_TYPE_VIDEO));
+}
+
+// TODO(deadbeef): Extend these tests to ensure the correct direction with other
+// answerer settings.
+
+// This test that the media direction is set to send/receive in an answer if
+// the offer is send receive.
+TEST_F(MediaSessionDescriptionFactoryTest, CreateAnswerToSendReceiveOffer) {
+ TestMediaDirectionInAnswer(RtpTransceiverDirection::kSendRecv,
+ RtpTransceiverDirection::kSendRecv);
+}
+
+// This test that the media direction is set to receive only in an answer if
+// the offer is send only.
+TEST_F(MediaSessionDescriptionFactoryTest, CreateAnswerToSendOnlyOffer) {
+ TestMediaDirectionInAnswer(RtpTransceiverDirection::kSendOnly,
+ RtpTransceiverDirection::kRecvOnly);
+}
+
+// This test that the media direction is set to send only in an answer if
+// the offer is recv only.
+TEST_F(MediaSessionDescriptionFactoryTest, CreateAnswerToRecvOnlyOffer) {
+ TestMediaDirectionInAnswer(RtpTransceiverDirection::kRecvOnly,
+ RtpTransceiverDirection::kSendOnly);
+}
+
+// This test that the media direction is set to inactive in an answer if
+// the offer is inactive.
+TEST_F(MediaSessionDescriptionFactoryTest, CreateAnswerToInactiveOffer) {
+ TestMediaDirectionInAnswer(RtpTransceiverDirection::kInactive,
+ RtpTransceiverDirection::kInactive);
+}
+
+// Test that the media protocol is RTP/AVPF if DTLS and SDES are disabled.
+TEST_F(MediaSessionDescriptionFactoryTest, AudioOfferAnswerWithCryptoDisabled) {
+ MediaSessionOptions opts = CreatePlanBMediaSessionOptions();
+ f1_.set_secure(SEC_DISABLED);
+ f2_.set_secure(SEC_DISABLED);
+ tdf1_.set_secure(SEC_DISABLED);
+ tdf2_.set_secure(SEC_DISABLED);
+
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+ const AudioContentDescription* offer_acd =
+ GetFirstAudioContentDescription(offer.get());
+ ASSERT_TRUE(offer_acd);
+ EXPECT_EQ(cricket::kMediaProtocolAvpf, offer_acd->protocol());
+
+ std::unique_ptr<SessionDescription> answer =
+ f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue();
+
+ const ContentInfo* ac_answer = answer->GetContentByName("audio");
+ ASSERT_TRUE(ac_answer);
+ EXPECT_FALSE(ac_answer->rejected);
+
+ const AudioContentDescription* answer_acd =
+ GetFirstAudioContentDescription(answer.get());
+ ASSERT_TRUE(answer_acd);
+ EXPECT_EQ(cricket::kMediaProtocolAvpf, answer_acd->protocol());
+}
+
+// Create a video offer and answer and ensure the RTP header extensions
+// matches what we expect.
+TEST_F(MediaSessionDescriptionFactoryTest, TestOfferAnswerWithRtpExtensions) {
+ MediaSessionOptions opts;
+ AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &opts);
+ SetAudioVideoRtpHeaderExtensions(MAKE_VECTOR(kAudioRtpExtension1),
+ MAKE_VECTOR(kVideoRtpExtension1), &opts);
+
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+ ASSERT_TRUE(offer.get());
+ SetAudioVideoRtpHeaderExtensions(MAKE_VECTOR(kAudioRtpExtension2),
+ MAKE_VECTOR(kVideoRtpExtension2), &opts);
+ std::unique_ptr<SessionDescription> answer =
+ f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue();
+
+ EXPECT_EQ(
+ MAKE_VECTOR(kAudioRtpExtension1),
+ GetFirstAudioContentDescription(offer.get())->rtp_header_extensions());
+ EXPECT_EQ(
+ MAKE_VECTOR(kVideoRtpExtension1),
+ GetFirstVideoContentDescription(offer.get())->rtp_header_extensions());
+ EXPECT_EQ(
+ MAKE_VECTOR(kAudioRtpExtensionAnswer),
+ GetFirstAudioContentDescription(answer.get())->rtp_header_extensions());
+ EXPECT_EQ(
+ MAKE_VECTOR(kVideoRtpExtensionAnswer),
+ GetFirstVideoContentDescription(answer.get())->rtp_header_extensions());
+}
+
+// Create a audio/video offer and answer and ensure that the
+// TransportSequenceNumber RTP header extensions are handled correctly. 02 is
+// supported and should take precedence even though not listed among locally
+// supported extensions.
+TEST_F(MediaSessionDescriptionFactoryTest,
+ TestOfferAnswerWithTransportSequenceNumberInOffer) {
+ TestTransportSequenceNumberNegotiation(
+ MAKE_VECTOR(kRtpExtensionTransportSequenceNumber01), // Local.
+ MAKE_VECTOR(kRtpExtensionTransportSequenceNumber01), // Offer.
+ MAKE_VECTOR(kRtpExtensionTransportSequenceNumber01)); // Expected answer.
+}
+TEST_F(MediaSessionDescriptionFactoryTest,
+ TestOfferAnswerWithTransportSequenceNumber01And02InOffer) {
+ TestTransportSequenceNumberNegotiation(
+ MAKE_VECTOR(kRtpExtensionTransportSequenceNumber01), // Local.
+ MAKE_VECTOR(kRtpExtensionTransportSequenceNumber01And02), // Offer.
+ MAKE_VECTOR(kRtpExtensionTransportSequenceNumber02)); // Expected answer.
+}
+TEST_F(MediaSessionDescriptionFactoryTest,
+ TestOfferAnswerWithTransportSequenceNumber02InOffer) {
+ TestTransportSequenceNumberNegotiation(
+ MAKE_VECTOR(kRtpExtensionTransportSequenceNumber01), // Local.
+ MAKE_VECTOR(kRtpExtensionTransportSequenceNumber02), // Offer.
+ MAKE_VECTOR(kRtpExtensionTransportSequenceNumber02)); // Expected answer.
+}
+
+TEST_F(MediaSessionDescriptionFactoryTest,
+ TestNegotiateFrameDescriptorWhenUnexposedLocally) {
+ MediaSessionOptions opts;
+ AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &opts);
+
+ SetAudioVideoRtpHeaderExtensions(
+ MAKE_VECTOR(kRtpExtensionGenericFrameDescriptorUri00),
+ MAKE_VECTOR(kRtpExtensionGenericFrameDescriptorUri00), &opts);
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+ SetAudioVideoRtpHeaderExtensions(
+ MAKE_VECTOR(kRtpExtensionTransportSequenceNumber01),
+ MAKE_VECTOR(kRtpExtensionTransportSequenceNumber01), &opts);
+ std::unique_ptr<SessionDescription> answer =
+ f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue();
+ EXPECT_THAT(
+ GetFirstAudioContentDescription(answer.get())->rtp_header_extensions(),
+ ElementsAreArray(kRtpExtensionGenericFrameDescriptorUri00));
+ EXPECT_THAT(
+ GetFirstVideoContentDescription(answer.get())->rtp_header_extensions(),
+ ElementsAreArray(kRtpExtensionGenericFrameDescriptorUri00));
+}
+
+TEST_F(MediaSessionDescriptionFactoryTest,
+ TestNegotiateFrameDescriptorWhenExposedLocally) {
+ MediaSessionOptions opts;
+ AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &opts);
+
+ SetAudioVideoRtpHeaderExtensions(
+ MAKE_VECTOR(kRtpExtensionGenericFrameDescriptorUri00),
+ MAKE_VECTOR(kRtpExtensionGenericFrameDescriptorUri00), &opts);
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+ std::unique_ptr<SessionDescription> answer =
+ f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue();
+ EXPECT_THAT(
+ GetFirstAudioContentDescription(answer.get())->rtp_header_extensions(),
+ ElementsAreArray(kRtpExtensionGenericFrameDescriptorUri00));
+ EXPECT_THAT(
+ GetFirstVideoContentDescription(answer.get())->rtp_header_extensions(),
+ ElementsAreArray(kRtpExtensionGenericFrameDescriptorUri00));
+}
+
+TEST_F(MediaSessionDescriptionFactoryTest,
+ NegotiateDependencyDescriptorWhenUnexposedLocally) {
+ MediaSessionOptions opts;
+ AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &opts);
+
+ RtpExtension offer_dd(RtpExtension::kDependencyDescriptorUri, 7);
+ SetAudioVideoRtpHeaderExtensions({}, {offer_dd}, &opts);
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+ RtpExtension local_tsn(RtpExtension::kTransportSequenceNumberUri, 5);
+ SetAudioVideoRtpHeaderExtensions({}, {local_tsn}, &opts);
+ std::unique_ptr<SessionDescription> answer =
+ f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue();
+ EXPECT_THAT(
+ GetFirstVideoContentDescription(answer.get())->rtp_header_extensions(),
+ ElementsAre(offer_dd));
+}
+
+TEST_F(MediaSessionDescriptionFactoryTest,
+ NegotiateDependencyDescriptorWhenExposedLocally) {
+ MediaSessionOptions opts;
+ AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &opts);
+
+ RtpExtension offer_dd(RtpExtension::kDependencyDescriptorUri, 7);
+ RtpExtension local_dd(RtpExtension::kDependencyDescriptorUri, 5);
+ SetAudioVideoRtpHeaderExtensions({}, {offer_dd}, &opts);
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+ SetAudioVideoRtpHeaderExtensions({}, {local_dd}, &opts);
+ std::unique_ptr<SessionDescription> answer =
+ f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue();
+ EXPECT_THAT(
+ GetFirstVideoContentDescription(answer.get())->rtp_header_extensions(),
+ ElementsAre(offer_dd));
+}
+
+TEST_F(MediaSessionDescriptionFactoryTest,
+ NegotiateAbsoluteCaptureTimeWhenUnexposedLocally) {
+ MediaSessionOptions opts;
+ AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &opts);
+
+ const cricket::RtpHeaderExtensions offered_extensions = {
+ RtpExtension(RtpExtension::kAbsoluteCaptureTimeUri, 7)};
+ const cricket::RtpHeaderExtensions local_extensions = {
+ RtpExtension(RtpExtension::kTransportSequenceNumberUri, 5)};
+ SetAudioVideoRtpHeaderExtensions(offered_extensions, offered_extensions,
+ &opts);
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+ SetAudioVideoRtpHeaderExtensions(local_extensions, local_extensions, &opts);
+ std::unique_ptr<SessionDescription> answer =
+ f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue();
+ EXPECT_THAT(
+ GetFirstVideoContentDescription(answer.get())->rtp_header_extensions(),
+ ElementsAreArray(offered_extensions));
+ EXPECT_THAT(
+ GetFirstAudioContentDescription(answer.get())->rtp_header_extensions(),
+ ElementsAreArray(offered_extensions));
+}
+
+TEST_F(MediaSessionDescriptionFactoryTest,
+ NegotiateAbsoluteCaptureTimeWhenExposedLocally) {
+ MediaSessionOptions opts;
+ AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &opts);
+
+ const cricket::RtpHeaderExtensions offered_extensions = {
+ RtpExtension(RtpExtension::kAbsoluteCaptureTimeUri, 7)};
+ const cricket::RtpHeaderExtensions local_extensions = {
+ RtpExtension(RtpExtension::kAbsoluteCaptureTimeUri, 5)};
+ SetAudioVideoRtpHeaderExtensions(offered_extensions, offered_extensions,
+ &opts);
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+ SetAudioVideoRtpHeaderExtensions(local_extensions, local_extensions, &opts);
+ std::unique_ptr<SessionDescription> answer =
+ f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue();
+ EXPECT_THAT(
+ GetFirstVideoContentDescription(answer.get())->rtp_header_extensions(),
+ ElementsAreArray(offered_extensions));
+ EXPECT_THAT(
+ GetFirstAudioContentDescription(answer.get())->rtp_header_extensions(),
+ ElementsAreArray(offered_extensions));
+}
+
+TEST_F(MediaSessionDescriptionFactoryTest,
+ DoNotNegotiateAbsoluteCaptureTimeWhenNotOffered) {
+ MediaSessionOptions opts;
+ AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &opts);
+
+ const cricket::RtpHeaderExtensions offered_extensions = {
+ RtpExtension(RtpExtension::kTransportSequenceNumberUri, 7)};
+ const cricket::RtpHeaderExtensions local_extensions = {
+ RtpExtension(RtpExtension::kAbsoluteCaptureTimeUri, 5)};
+ SetAudioVideoRtpHeaderExtensions(offered_extensions, offered_extensions,
+ &opts);
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+ SetAudioVideoRtpHeaderExtensions(local_extensions, local_extensions, &opts);
+ std::unique_ptr<SessionDescription> answer =
+ f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue();
+ EXPECT_THAT(
+ GetFirstVideoContentDescription(answer.get())->rtp_header_extensions(),
+ IsEmpty());
+ EXPECT_THAT(
+ GetFirstAudioContentDescription(answer.get())->rtp_header_extensions(),
+ IsEmpty());
+}
+
+TEST_F(MediaSessionDescriptionFactoryTest,
+ OffersUnstoppedExtensionsWithAudioVideoExtensionStopped) {
+ MediaSessionOptions opts;
+ AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &opts);
+ opts.media_description_options.back().header_extensions = {
+ webrtc::RtpHeaderExtensionCapability("uri1", 1,
+ RtpTransceiverDirection::kStopped),
+ webrtc::RtpHeaderExtensionCapability("uri2", 3,
+ RtpTransceiverDirection::kSendOnly)};
+ AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video1",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &opts);
+ opts.media_description_options.back().header_extensions = {
+ webrtc::RtpHeaderExtensionCapability("uri1", 1,
+ RtpTransceiverDirection::kStopped),
+ webrtc::RtpHeaderExtensionCapability("uri3", 7,
+ RtpTransceiverDirection::kSendOnly)};
+ auto offer = f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+ EXPECT_THAT(
+ offer->contents(),
+ ElementsAre(
+ Property(&ContentInfo::media_description,
+ Pointee(Property(
+ &MediaContentDescription::rtp_header_extensions,
+ ElementsAre(Field(&RtpExtension::uri, "uri2"))))),
+ Property(&ContentInfo::media_description,
+ Pointee(Property(
+ &MediaContentDescription::rtp_header_extensions,
+ ElementsAre(Field(&RtpExtension::uri, "uri3")))))));
+}
+
+TEST_F(MediaSessionDescriptionFactoryTest,
+ OffersUnstoppedExtensionsWithAudioExtensionStopped) {
+ MediaSessionOptions opts;
+ AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &opts);
+ opts.media_description_options.back().header_extensions = {
+ webrtc::RtpHeaderExtensionCapability("uri1", 1,
+ RtpTransceiverDirection::kSendOnly),
+ webrtc::RtpHeaderExtensionCapability("uri2", 3,
+ RtpTransceiverDirection::kStopped)};
+ AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video1",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &opts);
+ opts.media_description_options.back().header_extensions = {
+ webrtc::RtpHeaderExtensionCapability("uri42", 42,
+ RtpTransceiverDirection::kSendRecv),
+ webrtc::RtpHeaderExtensionCapability("uri3", 7,
+ RtpTransceiverDirection::kSendOnly)};
+ auto offer = f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+ EXPECT_THAT(
+ offer->contents(),
+ ElementsAre(
+ Property(&ContentInfo::media_description,
+ Pointee(Property(
+ &MediaContentDescription::rtp_header_extensions,
+ ElementsAre(Field(&RtpExtension::uri, "uri1"))))),
+ Property(
+ &ContentInfo::media_description,
+ Pointee(Property(
+ &MediaContentDescription::rtp_header_extensions,
+ UnorderedElementsAre(Field(&RtpExtension::uri, "uri3"),
+ Field(&RtpExtension::uri, "uri42")))))));
+}
+
+TEST_F(MediaSessionDescriptionFactoryTest,
+ OffersUnstoppedExtensionsWithVideoExtensionStopped) {
+ MediaSessionOptions opts;
+ AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &opts);
+ opts.media_description_options.back().header_extensions = {
+ webrtc::RtpHeaderExtensionCapability("uri1", 5,
+ RtpTransceiverDirection::kSendOnly),
+ webrtc::RtpHeaderExtensionCapability("uri2", 7,
+ RtpTransceiverDirection::kSendRecv)};
+ AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video1",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &opts);
+ opts.media_description_options.back().header_extensions = {
+ webrtc::RtpHeaderExtensionCapability("uri42", 42,
+ RtpTransceiverDirection::kSendRecv),
+ webrtc::RtpHeaderExtensionCapability("uri3", 7,
+ RtpTransceiverDirection::kStopped)};
+ auto offer = f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+ EXPECT_THAT(
+ offer->contents(),
+ ElementsAre(
+ Property(
+ &ContentInfo::media_description,
+ Pointee(Property(
+ &MediaContentDescription::rtp_header_extensions,
+ UnorderedElementsAre(Field(&RtpExtension::uri, "uri1"),
+ Field(&RtpExtension::uri, "uri2"))))),
+ Property(&ContentInfo::media_description,
+ Pointee(Property(
+ &MediaContentDescription::rtp_header_extensions,
+ ElementsAre(Field(&RtpExtension::uri, "uri42")))))));
+}
+
+TEST_F(MediaSessionDescriptionFactoryTest, AnswersUnstoppedExtensions) {
+ MediaSessionOptions opts;
+ AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &opts);
+ opts.media_description_options.back().header_extensions = {
+ webrtc::RtpHeaderExtensionCapability("uri1", 4,
+ RtpTransceiverDirection::kStopped),
+ webrtc::RtpHeaderExtensionCapability("uri2", 3,
+ RtpTransceiverDirection::kSendOnly),
+ webrtc::RtpHeaderExtensionCapability("uri3", 2,
+ RtpTransceiverDirection::kRecvOnly),
+ webrtc::RtpHeaderExtensionCapability("uri4", 1,
+ RtpTransceiverDirection::kSendRecv)};
+ auto offer = f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+ opts.media_description_options.back().header_extensions = {
+ webrtc::RtpHeaderExtensionCapability("uri1", 4,
+ RtpTransceiverDirection::kSendOnly),
+ webrtc::RtpHeaderExtensionCapability("uri2", 3,
+ RtpTransceiverDirection::kRecvOnly),
+ webrtc::RtpHeaderExtensionCapability("uri3", 2,
+ RtpTransceiverDirection::kStopped),
+ webrtc::RtpHeaderExtensionCapability("uri4", 1,
+ RtpTransceiverDirection::kSendRecv)};
+ auto answer = f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue();
+ EXPECT_THAT(
+ answer->contents(),
+ ElementsAre(Property(
+ &ContentInfo::media_description,
+ Pointee(Property(&MediaContentDescription::rtp_header_extensions,
+ ElementsAre(Field(&RtpExtension::uri, "uri2"),
+ Field(&RtpExtension::uri, "uri4")))))));
+}
+
+TEST_F(MediaSessionDescriptionFactoryTest,
+ AppendsUnstoppedExtensionsToCurrentDescription) {
+ MediaSessionOptions opts;
+ AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &opts);
+ opts.media_description_options.back().header_extensions = {
+ webrtc::RtpHeaderExtensionCapability("uri1", 1,
+ RtpTransceiverDirection::kSendRecv)};
+ auto offer = f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+ opts.media_description_options.back().header_extensions = {
+ webrtc::RtpHeaderExtensionCapability("uri1", 2,
+ RtpTransceiverDirection::kSendRecv),
+ webrtc::RtpHeaderExtensionCapability("uri2", 3,
+ RtpTransceiverDirection::kRecvOnly),
+ webrtc::RtpHeaderExtensionCapability("uri3", 5,
+ RtpTransceiverDirection::kStopped),
+ webrtc::RtpHeaderExtensionCapability("uri4", 6,
+ RtpTransceiverDirection::kSendRecv)};
+ auto offer2 = f1_.CreateOfferOrError(opts, offer.get()).MoveValue();
+ EXPECT_THAT(
+ offer2->contents(),
+ ElementsAre(Property(
+ &ContentInfo::media_description,
+ Pointee(Property(&MediaContentDescription::rtp_header_extensions,
+ ElementsAre(Field(&RtpExtension::uri, "uri1"),
+ Field(&RtpExtension::uri, "uri2"),
+ Field(&RtpExtension::uri, "uri4")))))));
+}
+
+TEST_F(MediaSessionDescriptionFactoryTest,
+ AllowsStoppedExtensionsToBeRemovedFromSubsequentOffer) {
+ MediaSessionOptions opts;
+ AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &opts);
+ opts.media_description_options.back().header_extensions = {
+ webrtc::RtpHeaderExtensionCapability("uri1", 1,
+ RtpTransceiverDirection::kSendRecv),
+ webrtc::RtpHeaderExtensionCapability("uri2", 2,
+ RtpTransceiverDirection::kSendRecv)};
+ auto offer = f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+
+ // Check that a subsequent offer after setting "uri2" to stopped no longer
+ // contains the extension.
+ opts.media_description_options.back().header_extensions = {
+ webrtc::RtpHeaderExtensionCapability("uri1", 1,
+ RtpTransceiverDirection::kSendRecv),
+ webrtc::RtpHeaderExtensionCapability("uri2", 2,
+ RtpTransceiverDirection::kStopped)};
+ auto offer2 = f1_.CreateOfferOrError(opts, offer.get()).MoveValue();
+ EXPECT_THAT(
+ offer2->contents(),
+ ElementsAre(Property(
+ &ContentInfo::media_description,
+ Pointee(Property(&MediaContentDescription::rtp_header_extensions,
+ ElementsAre(Field(&RtpExtension::uri, "uri1")))))));
+}
+
+TEST_F(MediaSessionDescriptionFactoryTest,
+ TestOfferAnswerWithEncryptedRtpExtensionsBoth) {
+ MediaSessionOptions opts;
+ AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &opts);
+
+ f1_.set_enable_encrypted_rtp_header_extensions(true);
+ f2_.set_enable_encrypted_rtp_header_extensions(true);
+
+ SetAudioVideoRtpHeaderExtensions(MAKE_VECTOR(kAudioRtpExtension1),
+ MAKE_VECTOR(kVideoRtpExtension1), &opts);
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+ ASSERT_TRUE(offer.get());
+ SetAudioVideoRtpHeaderExtensions(MAKE_VECTOR(kAudioRtpExtension2),
+ MAKE_VECTOR(kVideoRtpExtension2), &opts);
+ std::unique_ptr<SessionDescription> answer =
+ f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue();
+
+ EXPECT_EQ(
+ MAKE_VECTOR(kAudioRtpExtensionEncrypted1),
+ GetFirstAudioContentDescription(offer.get())->rtp_header_extensions());
+ EXPECT_EQ(
+ MAKE_VECTOR(kVideoRtpExtensionEncrypted1),
+ GetFirstVideoContentDescription(offer.get())->rtp_header_extensions());
+ EXPECT_EQ(
+ MAKE_VECTOR(kAudioRtpExtensionEncryptedAnswer),
+ GetFirstAudioContentDescription(answer.get())->rtp_header_extensions());
+ EXPECT_EQ(
+ MAKE_VECTOR(kVideoRtpExtensionEncryptedAnswer),
+ GetFirstVideoContentDescription(answer.get())->rtp_header_extensions());
+}
+
+TEST_F(MediaSessionDescriptionFactoryTest,
+ TestOfferAnswerWithEncryptedRtpExtensionsOffer) {
+ MediaSessionOptions opts;
+ AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &opts);
+
+ f1_.set_enable_encrypted_rtp_header_extensions(true);
+
+ SetAudioVideoRtpHeaderExtensions(MAKE_VECTOR(kAudioRtpExtension1),
+ MAKE_VECTOR(kVideoRtpExtension1), &opts);
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+ ASSERT_TRUE(offer.get());
+ SetAudioVideoRtpHeaderExtensions(MAKE_VECTOR(kAudioRtpExtension2),
+ MAKE_VECTOR(kVideoRtpExtension2), &opts);
+ std::unique_ptr<SessionDescription> answer =
+ f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue();
+
+ EXPECT_EQ(
+ MAKE_VECTOR(kAudioRtpExtensionEncrypted1),
+ GetFirstAudioContentDescription(offer.get())->rtp_header_extensions());
+ EXPECT_EQ(
+ MAKE_VECTOR(kVideoRtpExtensionEncrypted1),
+ GetFirstVideoContentDescription(offer.get())->rtp_header_extensions());
+ EXPECT_EQ(
+ MAKE_VECTOR(kAudioRtpExtensionAnswer),
+ GetFirstAudioContentDescription(answer.get())->rtp_header_extensions());
+ EXPECT_EQ(
+ MAKE_VECTOR(kVideoRtpExtensionAnswer),
+ GetFirstVideoContentDescription(answer.get())->rtp_header_extensions());
+}
+
+TEST_F(MediaSessionDescriptionFactoryTest,
+ TestOfferAnswerWithEncryptedRtpExtensionsAnswer) {
+ MediaSessionOptions opts;
+ AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &opts);
+
+ f2_.set_enable_encrypted_rtp_header_extensions(true);
+
+ SetAudioVideoRtpHeaderExtensions(MAKE_VECTOR(kAudioRtpExtension1),
+ MAKE_VECTOR(kVideoRtpExtension1), &opts);
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+ ASSERT_TRUE(offer.get());
+ SetAudioVideoRtpHeaderExtensions(MAKE_VECTOR(kAudioRtpExtension2),
+ MAKE_VECTOR(kVideoRtpExtension2), &opts);
+ std::unique_ptr<SessionDescription> answer =
+ f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue();
+
+ EXPECT_EQ(
+ MAKE_VECTOR(kAudioRtpExtension1),
+ GetFirstAudioContentDescription(offer.get())->rtp_header_extensions());
+ EXPECT_EQ(
+ MAKE_VECTOR(kVideoRtpExtension1),
+ GetFirstVideoContentDescription(offer.get())->rtp_header_extensions());
+ EXPECT_EQ(
+ MAKE_VECTOR(kAudioRtpExtensionAnswer),
+ GetFirstAudioContentDescription(answer.get())->rtp_header_extensions());
+ EXPECT_EQ(
+ MAKE_VECTOR(kVideoRtpExtensionAnswer),
+ GetFirstVideoContentDescription(answer.get())->rtp_header_extensions());
+}
+
+// Create an audio, video, data answer without legacy StreamParams.
+TEST_F(MediaSessionDescriptionFactoryTest,
+ TestCreateAnswerWithoutLegacyStreams) {
+ MediaSessionOptions opts;
+ AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &opts);
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+ ASSERT_TRUE(offer.get());
+ std::unique_ptr<SessionDescription> answer =
+ f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue();
+ const ContentInfo* ac = answer->GetContentByName("audio");
+ const ContentInfo* vc = answer->GetContentByName("video");
+ ASSERT_TRUE(ac);
+ ASSERT_TRUE(vc);
+ const AudioContentDescription* acd = ac->media_description()->as_audio();
+ const VideoContentDescription* vcd = vc->media_description()->as_video();
+
+ EXPECT_FALSE(acd->has_ssrcs()); // No StreamParams.
+ EXPECT_FALSE(vcd->has_ssrcs()); // No StreamParams.
+}
+
+// Create a typical video answer, and ensure it matches what we expect.
+TEST_F(MediaSessionDescriptionFactoryTest, TestCreateVideoAnswerRtcpMux) {
+ MediaSessionOptions offer_opts;
+ AddAudioVideoSections(RtpTransceiverDirection::kSendRecv, &offer_opts);
+
+ MediaSessionOptions answer_opts;
+ AddAudioVideoSections(RtpTransceiverDirection::kSendRecv, &answer_opts);
+
+ std::unique_ptr<SessionDescription> offer;
+ std::unique_ptr<SessionDescription> answer;
+
+ offer_opts.rtcp_mux_enabled = true;
+ answer_opts.rtcp_mux_enabled = true;
+ offer = f1_.CreateOfferOrError(offer_opts, nullptr).MoveValue();
+ answer =
+ f2_.CreateAnswerOrError(offer.get(), answer_opts, nullptr).MoveValue();
+ ASSERT_TRUE(GetFirstAudioContentDescription(offer.get()));
+ ASSERT_TRUE(GetFirstVideoContentDescription(offer.get()));
+ ASSERT_TRUE(GetFirstAudioContentDescription(answer.get()));
+ ASSERT_TRUE(GetFirstVideoContentDescription(answer.get()));
+ EXPECT_TRUE(GetFirstAudioContentDescription(offer.get())->rtcp_mux());
+ EXPECT_TRUE(GetFirstVideoContentDescription(offer.get())->rtcp_mux());
+ EXPECT_TRUE(GetFirstAudioContentDescription(answer.get())->rtcp_mux());
+ EXPECT_TRUE(GetFirstVideoContentDescription(answer.get())->rtcp_mux());
+
+ offer_opts.rtcp_mux_enabled = true;
+ answer_opts.rtcp_mux_enabled = false;
+ offer = f1_.CreateOfferOrError(offer_opts, nullptr).MoveValue();
+ answer =
+ f2_.CreateAnswerOrError(offer.get(), answer_opts, nullptr).MoveValue();
+ ASSERT_TRUE(GetFirstAudioContentDescription(offer.get()));
+ ASSERT_TRUE(GetFirstVideoContentDescription(offer.get()));
+ ASSERT_TRUE(GetFirstAudioContentDescription(answer.get()));
+ ASSERT_TRUE(GetFirstVideoContentDescription(answer.get()));
+ EXPECT_TRUE(GetFirstAudioContentDescription(offer.get())->rtcp_mux());
+ EXPECT_TRUE(GetFirstVideoContentDescription(offer.get())->rtcp_mux());
+ EXPECT_FALSE(GetFirstAudioContentDescription(answer.get())->rtcp_mux());
+ EXPECT_FALSE(GetFirstVideoContentDescription(answer.get())->rtcp_mux());
+
+ offer_opts.rtcp_mux_enabled = false;
+ answer_opts.rtcp_mux_enabled = true;
+ offer = f1_.CreateOfferOrError(offer_opts, nullptr).MoveValue();
+ answer =
+ f2_.CreateAnswerOrError(offer.get(), answer_opts, nullptr).MoveValue();
+ ASSERT_TRUE(GetFirstAudioContentDescription(offer.get()));
+ ASSERT_TRUE(GetFirstVideoContentDescription(offer.get()));
+ ASSERT_TRUE(GetFirstAudioContentDescription(answer.get()));
+ ASSERT_TRUE(GetFirstVideoContentDescription(answer.get()));
+ EXPECT_FALSE(GetFirstAudioContentDescription(offer.get())->rtcp_mux());
+ EXPECT_FALSE(GetFirstVideoContentDescription(offer.get())->rtcp_mux());
+ EXPECT_FALSE(GetFirstAudioContentDescription(answer.get())->rtcp_mux());
+ EXPECT_FALSE(GetFirstVideoContentDescription(answer.get())->rtcp_mux());
+
+ offer_opts.rtcp_mux_enabled = false;
+ answer_opts.rtcp_mux_enabled = false;
+ offer = f1_.CreateOfferOrError(offer_opts, nullptr).MoveValue();
+ answer =
+ f2_.CreateAnswerOrError(offer.get(), answer_opts, nullptr).MoveValue();
+ ASSERT_TRUE(GetFirstAudioContentDescription(offer.get()));
+ ASSERT_TRUE(GetFirstVideoContentDescription(offer.get()));
+ ASSERT_TRUE(GetFirstAudioContentDescription(answer.get()));
+ ASSERT_TRUE(GetFirstVideoContentDescription(answer.get()));
+ EXPECT_FALSE(GetFirstAudioContentDescription(offer.get())->rtcp_mux());
+ EXPECT_FALSE(GetFirstVideoContentDescription(offer.get())->rtcp_mux());
+ EXPECT_FALSE(GetFirstAudioContentDescription(answer.get())->rtcp_mux());
+ EXPECT_FALSE(GetFirstVideoContentDescription(answer.get())->rtcp_mux());
+}
+
+// Create an audio-only answer to a video offer.
+TEST_F(MediaSessionDescriptionFactoryTest, TestCreateAudioAnswerToVideo) {
+ MediaSessionOptions opts;
+ AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio",
+ RtpTransceiverDirection::kRecvOnly, kActive,
+ &opts);
+ AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video",
+ RtpTransceiverDirection::kRecvOnly, kActive,
+ &opts);
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+ ASSERT_TRUE(offer.get());
+
+ opts.media_description_options[1].stopped = true;
+ std::unique_ptr<SessionDescription> answer =
+ f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue();
+ const ContentInfo* ac = answer->GetContentByName("audio");
+ const ContentInfo* vc = answer->GetContentByName("video");
+ ASSERT_TRUE(ac);
+ ASSERT_TRUE(vc);
+ ASSERT_TRUE(vc->media_description());
+ EXPECT_TRUE(vc->rejected);
+}
+
+// Create an answer that rejects the contents which are rejected in the offer.
+TEST_F(MediaSessionDescriptionFactoryTest,
+ CreateAnswerToOfferWithRejectedMedia) {
+ MediaSessionOptions opts;
+ AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &opts);
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+ ASSERT_TRUE(offer.get());
+ ContentInfo* ac = offer->GetContentByName("audio");
+ ContentInfo* vc = offer->GetContentByName("video");
+ ASSERT_TRUE(ac);
+ ASSERT_TRUE(vc);
+ ac->rejected = true;
+ vc->rejected = true;
+ std::unique_ptr<SessionDescription> answer =
+ f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue();
+ ac = answer->GetContentByName("audio");
+ vc = answer->GetContentByName("video");
+ ASSERT_TRUE(ac);
+ ASSERT_TRUE(vc);
+ EXPECT_TRUE(ac->rejected);
+ EXPECT_TRUE(vc->rejected);
+}
+
+TEST_F(MediaSessionDescriptionFactoryTest,
+ OfferAndAnswerDoesNotHaveMixedByteSessionAttribute) {
+ MediaSessionOptions opts;
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, /*current_description=*/nullptr).MoveValue();
+ offer->set_extmap_allow_mixed(false);
+
+ std::unique_ptr<SessionDescription> answer(
+ f2_.CreateAnswerOrError(offer.get(), opts,
+ /*current_description=*/nullptr)
+ .MoveValue());
+
+ EXPECT_FALSE(answer->extmap_allow_mixed());
+}
+
+TEST_F(MediaSessionDescriptionFactoryTest,
+ OfferAndAnswerHaveMixedByteSessionAttribute) {
+ MediaSessionOptions opts;
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, /*current_description=*/nullptr).MoveValue();
+ offer->set_extmap_allow_mixed(true);
+
+ std::unique_ptr<SessionDescription> answer_support(
+ f2_.CreateAnswerOrError(offer.get(), opts,
+ /*current_description=*/nullptr)
+ .MoveValue());
+
+ EXPECT_TRUE(answer_support->extmap_allow_mixed());
+}
+
+TEST_F(MediaSessionDescriptionFactoryTest,
+ OfferAndAnswerDoesNotHaveMixedByteMediaAttributes) {
+ MediaSessionOptions opts;
+ AddAudioVideoSections(RtpTransceiverDirection::kSendRecv, &opts);
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, /*current_description=*/nullptr).MoveValue();
+ offer->set_extmap_allow_mixed(false);
+ MediaContentDescription* audio_offer =
+ offer->GetContentDescriptionByName("audio");
+ MediaContentDescription* video_offer =
+ offer->GetContentDescriptionByName("video");
+ ASSERT_EQ(MediaContentDescription::kNo,
+ audio_offer->extmap_allow_mixed_enum());
+ ASSERT_EQ(MediaContentDescription::kNo,
+ video_offer->extmap_allow_mixed_enum());
+
+ std::unique_ptr<SessionDescription> answer(
+ f2_.CreateAnswerOrError(offer.get(), opts,
+ /*current_description=*/nullptr)
+ .MoveValue());
+
+ MediaContentDescription* audio_answer =
+ answer->GetContentDescriptionByName("audio");
+ MediaContentDescription* video_answer =
+ answer->GetContentDescriptionByName("video");
+ EXPECT_EQ(MediaContentDescription::kNo,
+ audio_answer->extmap_allow_mixed_enum());
+ EXPECT_EQ(MediaContentDescription::kNo,
+ video_answer->extmap_allow_mixed_enum());
+}
+
+TEST_F(MediaSessionDescriptionFactoryTest,
+ OfferAndAnswerHaveSameMixedByteMediaAttributes) {
+ MediaSessionOptions opts;
+ AddAudioVideoSections(RtpTransceiverDirection::kSendRecv, &opts);
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, /*current_description=*/nullptr).MoveValue();
+ offer->set_extmap_allow_mixed(false);
+ MediaContentDescription* audio_offer =
+ offer->GetContentDescriptionByName("audio");
+ audio_offer->set_extmap_allow_mixed_enum(MediaContentDescription::kMedia);
+ MediaContentDescription* video_offer =
+ offer->GetContentDescriptionByName("video");
+ video_offer->set_extmap_allow_mixed_enum(MediaContentDescription::kMedia);
+
+ std::unique_ptr<SessionDescription> answer(
+ f2_.CreateAnswerOrError(offer.get(), opts,
+ /*current_description=*/nullptr)
+ .MoveValue());
+
+ MediaContentDescription* audio_answer =
+ answer->GetContentDescriptionByName("audio");
+ MediaContentDescription* video_answer =
+ answer->GetContentDescriptionByName("video");
+ EXPECT_EQ(MediaContentDescription::kMedia,
+ audio_answer->extmap_allow_mixed_enum());
+ EXPECT_EQ(MediaContentDescription::kMedia,
+ video_answer->extmap_allow_mixed_enum());
+}
+
+TEST_F(MediaSessionDescriptionFactoryTest,
+ OfferAndAnswerHaveDifferentMixedByteMediaAttributes) {
+ MediaSessionOptions opts;
+ AddAudioVideoSections(RtpTransceiverDirection::kSendRecv, &opts);
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, /*current_description=*/nullptr).MoveValue();
+ offer->set_extmap_allow_mixed(false);
+ MediaContentDescription* audio_offer =
+ offer->GetContentDescriptionByName("audio");
+ audio_offer->set_extmap_allow_mixed_enum(MediaContentDescription::kNo);
+ MediaContentDescription* video_offer =
+ offer->GetContentDescriptionByName("video");
+ video_offer->set_extmap_allow_mixed_enum(MediaContentDescription::kMedia);
+
+ std::unique_ptr<SessionDescription> answer(
+ f2_.CreateAnswerOrError(offer.get(), opts,
+ /*current_description=*/nullptr)
+ .MoveValue());
+
+ MediaContentDescription* audio_answer =
+ answer->GetContentDescriptionByName("audio");
+ MediaContentDescription* video_answer =
+ answer->GetContentDescriptionByName("video");
+ EXPECT_EQ(MediaContentDescription::kNo,
+ audio_answer->extmap_allow_mixed_enum());
+ EXPECT_EQ(MediaContentDescription::kMedia,
+ video_answer->extmap_allow_mixed_enum());
+}
+
+// Create an audio and video offer with:
+// - one video track
+// - two audio tracks
+// and ensure it matches what we expect. Also updates the initial offer by
+// adding a new video track and replaces one of the audio tracks.
+TEST_F(MediaSessionDescriptionFactoryTest, TestCreateMultiStreamVideoOffer) {
+ MediaSessionOptions opts;
+ AddAudioVideoSections(RtpTransceiverDirection::kSendRecv, &opts);
+ AttachSenderToMediaDescriptionOptions("video", MEDIA_TYPE_VIDEO, kVideoTrack1,
+ {kMediaStream1}, 1, &opts);
+ AttachSenderToMediaDescriptionOptions("audio", MEDIA_TYPE_AUDIO, kAudioTrack1,
+ {kMediaStream1}, 1, &opts);
+ AttachSenderToMediaDescriptionOptions("audio", MEDIA_TYPE_AUDIO, kAudioTrack2,
+ {kMediaStream1}, 1, &opts);
+
+ f1_.set_secure(SEC_ENABLED);
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+
+ ASSERT_TRUE(offer.get());
+ const ContentInfo* ac = offer->GetContentByName("audio");
+ const ContentInfo* vc = offer->GetContentByName("video");
+ ASSERT_TRUE(ac);
+ ASSERT_TRUE(vc);
+ const AudioContentDescription* acd = ac->media_description()->as_audio();
+ const VideoContentDescription* vcd = vc->media_description()->as_video();
+ EXPECT_EQ(MEDIA_TYPE_AUDIO, acd->type());
+ EXPECT_EQ(f1_.audio_sendrecv_codecs(), acd->codecs());
+
+ const StreamParamsVec& audio_streams = acd->streams();
+ ASSERT_EQ(2U, audio_streams.size());
+ EXPECT_EQ(audio_streams[0].cname, audio_streams[1].cname);
+ EXPECT_EQ(kAudioTrack1, audio_streams[0].id);
+ ASSERT_EQ(1U, audio_streams[0].ssrcs.size());
+ EXPECT_NE(0U, audio_streams[0].ssrcs[0]);
+ EXPECT_EQ(kAudioTrack2, audio_streams[1].id);
+ ASSERT_EQ(1U, audio_streams[1].ssrcs.size());
+ EXPECT_NE(0U, audio_streams[1].ssrcs[0]);
+
+ EXPECT_EQ(kAutoBandwidth, acd->bandwidth()); // default bandwidth (auto)
+ EXPECT_TRUE(acd->rtcp_mux()); // rtcp-mux defaults on
+ ASSERT_CRYPTO(acd, kDefaultCryptoSuiteSize, kDefaultSrtpCryptoSuite);
+
+ EXPECT_EQ(MEDIA_TYPE_VIDEO, vcd->type());
+ EXPECT_EQ(f1_.video_sendrecv_codecs(), vcd->codecs());
+ ASSERT_CRYPTO(vcd, kDefaultCryptoSuiteSize, kDefaultSrtpCryptoSuite);
+
+ const StreamParamsVec& video_streams = vcd->streams();
+ ASSERT_EQ(1U, video_streams.size());
+ EXPECT_EQ(video_streams[0].cname, audio_streams[0].cname);
+ EXPECT_EQ(kVideoTrack1, video_streams[0].id);
+ EXPECT_EQ(kAutoBandwidth, vcd->bandwidth()); // default bandwidth (auto)
+ EXPECT_TRUE(vcd->rtcp_mux()); // rtcp-mux defaults on
+
+ // Update the offer. Add a new video track that is not synched to the
+ // other tracks and replace audio track 2 with audio track 3.
+ AttachSenderToMediaDescriptionOptions("video", MEDIA_TYPE_VIDEO, kVideoTrack2,
+ {kMediaStream2}, 1, &opts);
+ DetachSenderFromMediaSection("audio", kAudioTrack2, &opts);
+ AttachSenderToMediaDescriptionOptions("audio", MEDIA_TYPE_AUDIO, kAudioTrack3,
+ {kMediaStream1}, 1, &opts);
+ std::unique_ptr<SessionDescription> updated_offer(
+ f1_.CreateOfferOrError(opts, offer.get()).MoveValue());
+
+ ASSERT_TRUE(updated_offer.get());
+ ac = updated_offer->GetContentByName("audio");
+ vc = updated_offer->GetContentByName("video");
+ ASSERT_TRUE(ac);
+ ASSERT_TRUE(vc);
+ const AudioContentDescription* updated_acd =
+ ac->media_description()->as_audio();
+ const VideoContentDescription* updated_vcd =
+ vc->media_description()->as_video();
+
+ EXPECT_EQ(acd->type(), updated_acd->type());
+ EXPECT_EQ(acd->codecs(), updated_acd->codecs());
+ EXPECT_EQ(vcd->type(), updated_vcd->type());
+ EXPECT_EQ(vcd->codecs(), updated_vcd->codecs());
+ ASSERT_CRYPTO(updated_acd, kDefaultCryptoSuiteSize, kDefaultSrtpCryptoSuite);
+ EXPECT_TRUE(CompareCryptoParams(acd->cryptos(), updated_acd->cryptos()));
+ ASSERT_CRYPTO(updated_vcd, kDefaultCryptoSuiteSize, kDefaultSrtpCryptoSuite);
+ EXPECT_TRUE(CompareCryptoParams(vcd->cryptos(), updated_vcd->cryptos()));
+
+ const StreamParamsVec& updated_audio_streams = updated_acd->streams();
+ ASSERT_EQ(2U, updated_audio_streams.size());
+ EXPECT_EQ(audio_streams[0], updated_audio_streams[0]);
+ EXPECT_EQ(kAudioTrack3, updated_audio_streams[1].id); // New audio track.
+ ASSERT_EQ(1U, updated_audio_streams[1].ssrcs.size());
+ EXPECT_NE(0U, updated_audio_streams[1].ssrcs[0]);
+ EXPECT_EQ(updated_audio_streams[0].cname, updated_audio_streams[1].cname);
+
+ const StreamParamsVec& updated_video_streams = updated_vcd->streams();
+ ASSERT_EQ(2U, updated_video_streams.size());
+ EXPECT_EQ(video_streams[0], updated_video_streams[0]);
+ EXPECT_EQ(kVideoTrack2, updated_video_streams[1].id);
+ // All the media streams in one PeerConnection share one RTCP CNAME.
+ EXPECT_EQ(updated_video_streams[1].cname, updated_video_streams[0].cname);
+}
+
+// Create an offer with simulcast video stream.
+TEST_F(MediaSessionDescriptionFactoryTest, TestCreateSimulcastVideoOffer) {
+ MediaSessionOptions opts;
+ AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio",
+ RtpTransceiverDirection::kRecvOnly, kActive,
+ &opts);
+ AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &opts);
+ const int num_sim_layers = 3;
+ AttachSenderToMediaDescriptionOptions("video", MEDIA_TYPE_VIDEO, kVideoTrack1,
+ {kMediaStream1}, num_sim_layers, &opts);
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+
+ ASSERT_TRUE(offer.get());
+ const ContentInfo* vc = offer->GetContentByName("video");
+ ASSERT_TRUE(vc);
+ const VideoContentDescription* vcd = vc->media_description()->as_video();
+
+ const StreamParamsVec& video_streams = vcd->streams();
+ ASSERT_EQ(1U, video_streams.size());
+ EXPECT_EQ(kVideoTrack1, video_streams[0].id);
+ const SsrcGroup* sim_ssrc_group =
+ video_streams[0].get_ssrc_group(cricket::kSimSsrcGroupSemantics);
+ ASSERT_TRUE(sim_ssrc_group);
+ EXPECT_EQ(static_cast<size_t>(num_sim_layers), sim_ssrc_group->ssrcs.size());
+}
+
+MATCHER(RidDescriptionEquals, "Verifies that two RidDescriptions are equal.") {
+ const RidDescription& rid1 = ::testing::get<0>(arg);
+ const RidDescription& rid2 = ::testing::get<1>(arg);
+ return rid1.rid == rid2.rid && rid1.direction == rid2.direction;
+}
+
+static void CheckSimulcastInSessionDescription(
+ const SessionDescription* description,
+ const std::string& content_name,
+ const std::vector<RidDescription>& send_rids,
+ const SimulcastLayerList& send_layers) {
+ ASSERT_NE(description, nullptr);
+ const ContentInfo* content = description->GetContentByName(content_name);
+ ASSERT_NE(content, nullptr);
+ const MediaContentDescription* cd = content->media_description();
+ ASSERT_NE(cd, nullptr);
+ const StreamParamsVec& streams = cd->streams();
+ ASSERT_THAT(streams, SizeIs(1));
+ const StreamParams& stream = streams[0];
+ ASSERT_THAT(stream.ssrcs, IsEmpty());
+ EXPECT_TRUE(stream.has_rids());
+ const std::vector<RidDescription> rids = stream.rids();
+
+ EXPECT_THAT(rids, Pointwise(RidDescriptionEquals(), send_rids));
+
+ EXPECT_TRUE(cd->HasSimulcast());
+ const SimulcastDescription& simulcast = cd->simulcast_description();
+ EXPECT_THAT(simulcast.send_layers(), SizeIs(send_layers.size()));
+ EXPECT_THAT(simulcast.send_layers(), Pointwise(Eq(), send_layers));
+
+ ASSERT_THAT(simulcast.receive_layers().GetAllLayers(), SizeIs(0));
+}
+
+// Create an offer with spec-compliant simulcast video stream.
+TEST_F(MediaSessionDescriptionFactoryTest, TestCreateCompliantSimulcastOffer) {
+ MediaSessionOptions opts;
+ AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &opts);
+ std::vector<RidDescription> send_rids;
+ send_rids.push_back(RidDescription("f", RidDirection::kSend));
+ send_rids.push_back(RidDescription("h", RidDirection::kSend));
+ send_rids.push_back(RidDescription("q", RidDirection::kSend));
+ SimulcastLayerList simulcast_layers;
+ simulcast_layers.AddLayer(SimulcastLayer(send_rids[0].rid, false));
+ simulcast_layers.AddLayer(SimulcastLayer(send_rids[1].rid, true));
+ simulcast_layers.AddLayer(SimulcastLayer(send_rids[2].rid, false));
+ AttachSenderToMediaDescriptionOptions("video", MEDIA_TYPE_VIDEO, kVideoTrack1,
+ {kMediaStream1}, send_rids,
+ simulcast_layers, 0, &opts);
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+
+ CheckSimulcastInSessionDescription(offer.get(), "video", send_rids,
+ simulcast_layers);
+}
+
+// Create an offer that signals RIDs (not SSRCs) without Simulcast.
+// In this scenario, RIDs do not need to be negotiated (there is only one).
+TEST_F(MediaSessionDescriptionFactoryTest, TestOfferWithRidsNoSimulcast) {
+ MediaSessionOptions opts;
+ AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &opts);
+ RidDescription rid("f", RidDirection::kSend);
+ AttachSenderToMediaDescriptionOptions("video", MEDIA_TYPE_VIDEO, kVideoTrack1,
+ {kMediaStream1}, {rid},
+ SimulcastLayerList(), 0, &opts);
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+
+ ASSERT_NE(offer.get(), nullptr);
+ const ContentInfo* content = offer->GetContentByName("video");
+ ASSERT_NE(content, nullptr);
+ const MediaContentDescription* cd = content->media_description();
+ ASSERT_NE(cd, nullptr);
+ const StreamParamsVec& streams = cd->streams();
+ ASSERT_THAT(streams, SizeIs(1));
+ const StreamParams& stream = streams[0];
+ ASSERT_THAT(stream.ssrcs, IsEmpty());
+ EXPECT_FALSE(stream.has_rids());
+ EXPECT_FALSE(cd->HasSimulcast());
+}
+
+// Create an answer with spec-compliant simulcast video stream.
+// In this scenario, the SFU is the caller requesting that we send Simulcast.
+TEST_F(MediaSessionDescriptionFactoryTest, TestCreateCompliantSimulcastAnswer) {
+ MediaSessionOptions offer_opts;
+ AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &offer_opts);
+ AttachSenderToMediaDescriptionOptions("video", MEDIA_TYPE_VIDEO, kVideoTrack1,
+ {kMediaStream1}, 1, &offer_opts);
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(offer_opts, nullptr).MoveValue();
+
+ MediaSessionOptions answer_opts;
+ AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &answer_opts);
+
+ std::vector<RidDescription> rid_descriptions{
+ RidDescription("f", RidDirection::kSend),
+ RidDescription("h", RidDirection::kSend),
+ RidDescription("q", RidDirection::kSend),
+ };
+ SimulcastLayerList simulcast_layers;
+ simulcast_layers.AddLayer(SimulcastLayer(rid_descriptions[0].rid, false));
+ simulcast_layers.AddLayer(SimulcastLayer(rid_descriptions[1].rid, true));
+ simulcast_layers.AddLayer(SimulcastLayer(rid_descriptions[2].rid, false));
+ AttachSenderToMediaDescriptionOptions("video", MEDIA_TYPE_VIDEO, kVideoTrack1,
+ {kMediaStream1}, rid_descriptions,
+ simulcast_layers, 0, &answer_opts);
+ std::unique_ptr<SessionDescription> answer =
+ f2_.CreateAnswerOrError(offer.get(), answer_opts, nullptr).MoveValue();
+
+ CheckSimulcastInSessionDescription(answer.get(), "video", rid_descriptions,
+ simulcast_layers);
+}
+
+// Create an answer that signals RIDs (not SSRCs) without Simulcast.
+// In this scenario, RIDs do not need to be negotiated (there is only one).
+// Note that RID Direction is not the same as the transceiver direction.
+TEST_F(MediaSessionDescriptionFactoryTest, TestAnswerWithRidsNoSimulcast) {
+ MediaSessionOptions offer_opts;
+ AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &offer_opts);
+ RidDescription rid_offer("f", RidDirection::kSend);
+ AttachSenderToMediaDescriptionOptions("video", MEDIA_TYPE_VIDEO, kVideoTrack1,
+ {kMediaStream1}, {rid_offer},
+ SimulcastLayerList(), 0, &offer_opts);
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(offer_opts, nullptr).MoveValue();
+
+ MediaSessionOptions answer_opts;
+ AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &answer_opts);
+
+ RidDescription rid_answer("f", RidDirection::kReceive);
+ AttachSenderToMediaDescriptionOptions("video", MEDIA_TYPE_VIDEO, kVideoTrack1,
+ {kMediaStream1}, {rid_answer},
+ SimulcastLayerList(), 0, &answer_opts);
+ std::unique_ptr<SessionDescription> answer =
+ f2_.CreateAnswerOrError(offer.get(), answer_opts, nullptr).MoveValue();
+
+ ASSERT_NE(answer.get(), nullptr);
+ const ContentInfo* content = offer->GetContentByName("video");
+ ASSERT_NE(content, nullptr);
+ const MediaContentDescription* cd = content->media_description();
+ ASSERT_NE(cd, nullptr);
+ const StreamParamsVec& streams = cd->streams();
+ ASSERT_THAT(streams, SizeIs(1));
+ const StreamParams& stream = streams[0];
+ ASSERT_THAT(stream.ssrcs, IsEmpty());
+ EXPECT_FALSE(stream.has_rids());
+ EXPECT_FALSE(cd->HasSimulcast());
+}
+
+// Create an audio and video answer to a standard video offer with:
+// - one video track
+// - two audio tracks
+// - two data tracks
+// and ensure it matches what we expect. Also updates the initial answer by
+// adding a new video track and removes one of the audio tracks.
+TEST_F(MediaSessionDescriptionFactoryTest, TestCreateMultiStreamVideoAnswer) {
+ MediaSessionOptions offer_opts;
+ AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio",
+ RtpTransceiverDirection::kRecvOnly, kActive,
+ &offer_opts);
+ AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video",
+ RtpTransceiverDirection::kRecvOnly, kActive,
+ &offer_opts);
+ f1_.set_secure(SEC_ENABLED);
+ f2_.set_secure(SEC_ENABLED);
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(offer_opts, nullptr).MoveValue();
+
+ MediaSessionOptions answer_opts;
+ AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &answer_opts);
+ AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &answer_opts);
+ AttachSenderToMediaDescriptionOptions("video", MEDIA_TYPE_VIDEO, kVideoTrack1,
+ {kMediaStream1}, 1, &answer_opts);
+ AttachSenderToMediaDescriptionOptions("audio", MEDIA_TYPE_AUDIO, kAudioTrack1,
+ {kMediaStream1}, 1, &answer_opts);
+ AttachSenderToMediaDescriptionOptions("audio", MEDIA_TYPE_AUDIO, kAudioTrack2,
+ {kMediaStream1}, 1, &answer_opts);
+
+ std::unique_ptr<SessionDescription> answer =
+ f2_.CreateAnswerOrError(offer.get(), answer_opts, nullptr).MoveValue();
+
+ ASSERT_TRUE(answer.get());
+ const ContentInfo* ac = answer->GetContentByName("audio");
+ const ContentInfo* vc = answer->GetContentByName("video");
+ ASSERT_TRUE(ac);
+ ASSERT_TRUE(vc);
+ const AudioContentDescription* acd = ac->media_description()->as_audio();
+ const VideoContentDescription* vcd = vc->media_description()->as_video();
+ ASSERT_CRYPTO(acd, 1U, kDefaultSrtpCryptoSuite);
+ ASSERT_CRYPTO(vcd, 1U, kDefaultSrtpCryptoSuite);
+
+ EXPECT_EQ(MEDIA_TYPE_AUDIO, acd->type());
+ EXPECT_THAT(acd->codecs(), ElementsAreArray(kAudioCodecsAnswer));
+
+ const StreamParamsVec& audio_streams = acd->streams();
+ ASSERT_EQ(2U, audio_streams.size());
+ EXPECT_TRUE(audio_streams[0].cname == audio_streams[1].cname);
+ EXPECT_EQ(kAudioTrack1, audio_streams[0].id);
+ ASSERT_EQ(1U, audio_streams[0].ssrcs.size());
+ EXPECT_NE(0U, audio_streams[0].ssrcs[0]);
+ EXPECT_EQ(kAudioTrack2, audio_streams[1].id);
+ ASSERT_EQ(1U, audio_streams[1].ssrcs.size());
+ EXPECT_NE(0U, audio_streams[1].ssrcs[0]);
+
+ EXPECT_EQ(kAutoBandwidth, acd->bandwidth()); // default bandwidth (auto)
+ EXPECT_TRUE(acd->rtcp_mux()); // rtcp-mux defaults on
+
+ EXPECT_EQ(MEDIA_TYPE_VIDEO, vcd->type());
+ EXPECT_THAT(vcd->codecs(), ElementsAreArray(kVideoCodecsAnswer));
+
+ const StreamParamsVec& video_streams = vcd->streams();
+ ASSERT_EQ(1U, video_streams.size());
+ EXPECT_EQ(video_streams[0].cname, audio_streams[0].cname);
+ EXPECT_EQ(kVideoTrack1, video_streams[0].id);
+ EXPECT_EQ(kAutoBandwidth, vcd->bandwidth()); // default bandwidth (auto)
+ EXPECT_TRUE(vcd->rtcp_mux()); // rtcp-mux defaults on
+
+ // Update the answer. Add a new video track that is not synched to the
+ // other tracks and remove 1 audio track.
+ AttachSenderToMediaDescriptionOptions("video", MEDIA_TYPE_VIDEO, kVideoTrack2,
+ {kMediaStream2}, 1, &answer_opts);
+ DetachSenderFromMediaSection("audio", kAudioTrack2, &answer_opts);
+ std::unique_ptr<SessionDescription> updated_answer(
+ f2_.CreateAnswerOrError(offer.get(), answer_opts, answer.get())
+ .MoveValue());
+
+ ASSERT_TRUE(updated_answer.get());
+ ac = updated_answer->GetContentByName("audio");
+ vc = updated_answer->GetContentByName("video");
+ ASSERT_TRUE(ac);
+ ASSERT_TRUE(vc);
+ const AudioContentDescription* updated_acd =
+ ac->media_description()->as_audio();
+ const VideoContentDescription* updated_vcd =
+ vc->media_description()->as_video();
+
+ ASSERT_CRYPTO(updated_acd, 1U, kDefaultSrtpCryptoSuite);
+ EXPECT_TRUE(CompareCryptoParams(acd->cryptos(), updated_acd->cryptos()));
+ ASSERT_CRYPTO(updated_vcd, 1U, kDefaultSrtpCryptoSuite);
+ EXPECT_TRUE(CompareCryptoParams(vcd->cryptos(), updated_vcd->cryptos()));
+
+ EXPECT_EQ(acd->type(), updated_acd->type());
+ EXPECT_EQ(acd->codecs(), updated_acd->codecs());
+ EXPECT_EQ(vcd->type(), updated_vcd->type());
+ EXPECT_EQ(vcd->codecs(), updated_vcd->codecs());
+
+ const StreamParamsVec& updated_audio_streams = updated_acd->streams();
+ ASSERT_EQ(1U, updated_audio_streams.size());
+ EXPECT_TRUE(audio_streams[0] == updated_audio_streams[0]);
+
+ const StreamParamsVec& updated_video_streams = updated_vcd->streams();
+ ASSERT_EQ(2U, updated_video_streams.size());
+ EXPECT_EQ(video_streams[0], updated_video_streams[0]);
+ EXPECT_EQ(kVideoTrack2, updated_video_streams[1].id);
+ // All media streams in one PeerConnection share one CNAME.
+ EXPECT_EQ(updated_video_streams[1].cname, updated_video_streams[0].cname);
+}
+
+// Create an updated offer after creating an answer to the original offer and
+// verify that the codecs that were part of the original answer are not changed
+// in the updated offer.
+TEST_F(MediaSessionDescriptionFactoryTest,
+ RespondentCreatesOfferAfterCreatingAnswer) {
+ MediaSessionOptions opts;
+ AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &opts);
+
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+ std::unique_ptr<SessionDescription> answer =
+ f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue();
+
+ const AudioContentDescription* acd =
+ GetFirstAudioContentDescription(answer.get());
+ EXPECT_THAT(acd->codecs(), ElementsAreArray(kAudioCodecsAnswer));
+
+ const VideoContentDescription* vcd =
+ GetFirstVideoContentDescription(answer.get());
+ EXPECT_THAT(vcd->codecs(), ElementsAreArray(kVideoCodecsAnswer));
+
+ std::unique_ptr<SessionDescription> updated_offer(
+ f2_.CreateOfferOrError(opts, answer.get()).MoveValue());
+
+ // The expected audio codecs are the common audio codecs from the first
+ // offer/answer exchange plus the audio codecs only `f2_` offer, sorted in
+ // preference order.
+ // TODO(wu): `updated_offer` should not include the codec
+ // (i.e. `kAudioCodecs2[0]`) the other side doesn't support.
+ const AudioCodec kUpdatedAudioCodecOffer[] = {
+ kAudioCodecsAnswer[0],
+ kAudioCodecsAnswer[1],
+ kAudioCodecs2[0],
+ };
+
+ // The expected video codecs are the common video codecs from the first
+ // offer/answer exchange plus the video codecs only `f2_` offer, sorted in
+ // preference order.
+ const VideoCodec kUpdatedVideoCodecOffer[] = {
+ kVideoCodecsAnswer[0],
+ kVideoCodecs2[1],
+ };
+
+ const AudioContentDescription* updated_acd =
+ GetFirstAudioContentDescription(updated_offer.get());
+ EXPECT_THAT(updated_acd->codecs(), ElementsAreArray(kUpdatedAudioCodecOffer));
+
+ const VideoContentDescription* updated_vcd =
+ GetFirstVideoContentDescription(updated_offer.get());
+ EXPECT_THAT(updated_vcd->codecs(), ElementsAreArray(kUpdatedVideoCodecOffer));
+}
+
+// Test that a reoffer does not reuse audio codecs from a previous media section
+// that is being recycled.
+TEST_F(MediaSessionDescriptionFactoryTest,
+ ReOfferDoesNotReUseRecycledAudioCodecs) {
+ f1_.set_video_codecs({}, {});
+ f2_.set_video_codecs({}, {});
+
+ MediaSessionOptions opts;
+ AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "a0",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &opts);
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+ std::unique_ptr<SessionDescription> answer =
+ f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue();
+
+ // Recycle the media section by changing its mid.
+ opts.media_description_options[0].mid = "a1";
+ std::unique_ptr<SessionDescription> reoffer =
+ f2_.CreateOfferOrError(opts, answer.get()).MoveValue();
+
+ // Expect that the results of the first negotiation are ignored. If the m=
+ // section was not recycled the payload types would match the initial offerer.
+ const AudioContentDescription* acd =
+ GetFirstAudioContentDescription(reoffer.get());
+ EXPECT_THAT(acd->codecs(), ElementsAreArray(kAudioCodecs2));
+}
+
+// Test that a reoffer does not reuse video codecs from a previous media section
+// that is being recycled.
+TEST_F(MediaSessionDescriptionFactoryTest,
+ ReOfferDoesNotReUseRecycledVideoCodecs) {
+ f1_.set_audio_codecs({}, {});
+ f2_.set_audio_codecs({}, {});
+
+ MediaSessionOptions opts;
+ AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "v0",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &opts);
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+ auto answer = f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue();
+
+ // Recycle the media section by changing its mid.
+ opts.media_description_options[0].mid = "v1";
+ std::unique_ptr<SessionDescription> reoffer =
+ f2_.CreateOfferOrError(opts, answer.get()).MoveValue();
+
+ // Expect that the results of the first negotiation are ignored. If the m=
+ // section was not recycled the payload types would match the initial offerer.
+ const VideoContentDescription* vcd =
+ GetFirstVideoContentDescription(reoffer.get());
+ EXPECT_THAT(vcd->codecs(), ElementsAreArray(kVideoCodecs2));
+}
+
+// Test that a reanswer does not reuse audio codecs from a previous media
+// section that is being recycled.
+TEST_F(MediaSessionDescriptionFactoryTest,
+ ReAnswerDoesNotReUseRecycledAudioCodecs) {
+ f1_.set_video_codecs({}, {});
+ f2_.set_video_codecs({}, {});
+
+ // Perform initial offer/answer in reverse (`f2_` as offerer) so that the
+ // second offer/answer is forward (`f1_` as offerer).
+ MediaSessionOptions opts;
+ AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "a0",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &opts);
+ std::unique_ptr<SessionDescription> offer =
+ f2_.CreateOfferOrError(opts, nullptr).MoveValue();
+ std::unique_ptr<SessionDescription> answer =
+ f1_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue();
+
+ // Recycle the media section by changing its mid.
+ opts.media_description_options[0].mid = "a1";
+ std::unique_ptr<SessionDescription> reoffer =
+ f1_.CreateOfferOrError(opts, answer.get()).MoveValue();
+ std::unique_ptr<SessionDescription> reanswer =
+ f2_.CreateAnswerOrError(reoffer.get(), opts, offer.get()).MoveValue();
+
+ // Expect that the results of the first negotiation are ignored. If the m=
+ // section was not recycled the payload types would match the initial offerer.
+ const AudioContentDescription* acd =
+ GetFirstAudioContentDescription(reanswer.get());
+ EXPECT_THAT(acd->codecs(), ElementsAreArray(kAudioCodecsAnswer));
+}
+
+// Test that a reanswer does not reuse video codecs from a previous media
+// section that is being recycled.
+TEST_F(MediaSessionDescriptionFactoryTest,
+ ReAnswerDoesNotReUseRecycledVideoCodecs) {
+ f1_.set_audio_codecs({}, {});
+ f2_.set_audio_codecs({}, {});
+
+ // Perform initial offer/answer in reverse (`f2_` as offerer) so that the
+ // second offer/answer is forward (`f1_` as offerer).
+ MediaSessionOptions opts;
+ AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "v0",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &opts);
+ std::unique_ptr<SessionDescription> offer =
+ f2_.CreateOfferOrError(opts, nullptr).MoveValue();
+ std::unique_ptr<SessionDescription> answer =
+ f1_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue();
+
+ // Recycle the media section by changing its mid.
+ opts.media_description_options[0].mid = "v1";
+ std::unique_ptr<SessionDescription> reoffer =
+ f1_.CreateOfferOrError(opts, answer.get()).MoveValue();
+ std::unique_ptr<SessionDescription> reanswer =
+ f2_.CreateAnswerOrError(reoffer.get(), opts, offer.get()).MoveValue();
+
+ // Expect that the results of the first negotiation are ignored. If the m=
+ // section was not recycled the payload types would match the initial offerer.
+ const VideoContentDescription* vcd =
+ GetFirstVideoContentDescription(reanswer.get());
+ EXPECT_THAT(vcd->codecs(), ElementsAreArray(kVideoCodecsAnswer));
+}
+
+// Create an updated offer after creating an answer to the original offer and
+// verify that the codecs that were part of the original answer are not changed
+// in the updated offer. In this test Rtx is enabled.
+TEST_F(MediaSessionDescriptionFactoryTest,
+ RespondentCreatesOfferAfterCreatingAnswerWithRtx) {
+ MediaSessionOptions opts;
+ AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video",
+ RtpTransceiverDirection::kRecvOnly, kActive,
+ &opts);
+ std::vector<VideoCodec> f1_codecs = MAKE_VECTOR(kVideoCodecs1);
+ // This creates rtx for H264 with the payload type `f1_` uses.
+ AddRtxCodec(cricket::CreateVideoRtxCodec(126, kVideoCodecs1[1].id),
+ &f1_codecs);
+ f1_.set_video_codecs(f1_codecs, f1_codecs);
+
+ std::vector<VideoCodec> f2_codecs = MAKE_VECTOR(kVideoCodecs2);
+ // This creates rtx for H264 with the payload type `f2_` uses.
+ AddRtxCodec(cricket::CreateVideoRtxCodec(125, kVideoCodecs2[0].id),
+ &f2_codecs);
+ f2_.set_video_codecs(f2_codecs, f2_codecs);
+
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+ ASSERT_TRUE(offer.get());
+ std::unique_ptr<SessionDescription> answer =
+ f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue();
+
+ const VideoContentDescription* vcd =
+ GetFirstVideoContentDescription(answer.get());
+
+ std::vector<VideoCodec> expected_codecs = MAKE_VECTOR(kVideoCodecsAnswer);
+ AddRtxCodec(cricket::CreateVideoRtxCodec(126, kVideoCodecs1[1].id),
+ &expected_codecs);
+
+ EXPECT_EQ(expected_codecs, vcd->codecs());
+
+ // Now, make sure we get same result (except for the order) if `f2_` creates
+ // an updated offer even though the default payload types between `f1_` and
+ // `f2_` are different.
+ std::unique_ptr<SessionDescription> updated_offer(
+ f2_.CreateOfferOrError(opts, answer.get()).MoveValue());
+ ASSERT_TRUE(updated_offer);
+ std::unique_ptr<SessionDescription> updated_answer(
+ f1_.CreateAnswerOrError(updated_offer.get(), opts, answer.get())
+ .MoveValue());
+
+ const VideoContentDescription* updated_vcd =
+ GetFirstVideoContentDescription(updated_answer.get());
+
+ EXPECT_EQ(expected_codecs, updated_vcd->codecs());
+}
+
+// Regression test for:
+// https://bugs.chromium.org/p/webrtc/issues/detail?id=8332
+// Existing codecs should always appear before new codecs in re-offers. But
+// under a specific set of circumstances, the existing RTX codec was ending up
+// added to the end of the list.
+TEST_F(MediaSessionDescriptionFactoryTest,
+ RespondentCreatesOfferAfterCreatingAnswerWithRemappedRtxPayloadType) {
+ MediaSessionOptions opts;
+ AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video",
+ RtpTransceiverDirection::kRecvOnly, kActive,
+ &opts);
+ // We specifically choose different preferred payload types for VP8 to
+ // trigger the issue.
+ cricket::VideoCodec vp8_offerer = cricket::CreateVideoCodec(100, "VP8");
+ cricket::VideoCodec vp8_offerer_rtx =
+ cricket::CreateVideoRtxCodec(101, vp8_offerer.id);
+ cricket::VideoCodec vp8_answerer = cricket::CreateVideoCodec(110, "VP8");
+ cricket::VideoCodec vp8_answerer_rtx =
+ cricket::CreateVideoRtxCodec(111, vp8_answerer.id);
+ cricket::VideoCodec vp9 = cricket::CreateVideoCodec(120, "VP9");
+ cricket::VideoCodec vp9_rtx = cricket::CreateVideoRtxCodec(121, vp9.id);
+
+ std::vector<VideoCodec> f1_codecs = {vp8_offerer, vp8_offerer_rtx};
+ // We also specifically cause the answerer to prefer VP9, such that if it
+ // *doesn't* honor the existing preferred codec (VP8) we'll notice.
+ std::vector<VideoCodec> f2_codecs = {vp9, vp9_rtx, vp8_answerer,
+ vp8_answerer_rtx};
+
+ f1_.set_video_codecs(f1_codecs, f1_codecs);
+ f2_.set_video_codecs(f2_codecs, f2_codecs);
+ std::vector<AudioCodec> audio_codecs;
+ f1_.set_audio_codecs(audio_codecs, audio_codecs);
+ f2_.set_audio_codecs(audio_codecs, audio_codecs);
+
+ // Offer will be {VP8, RTX for VP8}. Answer will be the same.
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+ ASSERT_TRUE(offer.get());
+ std::unique_ptr<SessionDescription> answer =
+ f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue();
+
+ // Updated offer *should* be {VP8, RTX for VP8, VP9, RTX for VP9}.
+ // But if the bug is triggered, RTX for VP8 ends up last.
+ std::unique_ptr<SessionDescription> updated_offer(
+ f2_.CreateOfferOrError(opts, answer.get()).MoveValue());
+
+ const VideoContentDescription* vcd =
+ GetFirstVideoContentDescription(updated_offer.get());
+ std::vector<cricket::VideoCodec> codecs = vcd->codecs();
+ ASSERT_EQ(4u, codecs.size());
+ EXPECT_EQ(vp8_offerer, codecs[0]);
+ EXPECT_EQ(vp8_offerer_rtx, codecs[1]);
+ EXPECT_EQ(vp9, codecs[2]);
+ EXPECT_EQ(vp9_rtx, codecs[3]);
+}
+
+// Create an updated offer that adds video after creating an audio only answer
+// to the original offer. This test verifies that if a video codec and the RTX
+// codec have the same default payload type as an audio codec that is already in
+// use, the added codecs payload types are changed.
+TEST_F(MediaSessionDescriptionFactoryTest,
+ RespondentCreatesOfferWithVideoAndRtxAfterCreatingAudioAnswer) {
+ std::vector<VideoCodec> f1_codecs = MAKE_VECTOR(kVideoCodecs1);
+ // This creates rtx for H264 with the payload type `f1_` uses.
+ AddRtxCodec(cricket::CreateVideoRtxCodec(126, kVideoCodecs1[1].id),
+ &f1_codecs);
+ f1_.set_video_codecs(f1_codecs, f1_codecs);
+
+ MediaSessionOptions opts;
+ AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio",
+ RtpTransceiverDirection::kRecvOnly, kActive,
+ &opts);
+
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+ std::unique_ptr<SessionDescription> answer =
+ f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue();
+
+ const AudioContentDescription* acd =
+ GetFirstAudioContentDescription(answer.get());
+ EXPECT_THAT(acd->codecs(), ElementsAreArray(kAudioCodecsAnswer));
+
+ // Now - let `f2_` add video with RTX and let the payload type the RTX codec
+ // reference be the same as an audio codec that was negotiated in the
+ // first offer/answer exchange.
+ opts.media_description_options.clear();
+ AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &opts);
+
+ std::vector<VideoCodec> f2_codecs = MAKE_VECTOR(kVideoCodecs2);
+ int used_pl_type = acd->codecs()[0].id;
+ f2_codecs[0].id = used_pl_type; // Set the payload type for H264.
+ AddRtxCodec(cricket::CreateVideoRtxCodec(125, used_pl_type), &f2_codecs);
+ f2_.set_video_codecs(f2_codecs, f2_codecs);
+
+ std::unique_ptr<SessionDescription> updated_offer(
+ f2_.CreateOfferOrError(opts, answer.get()).MoveValue());
+ ASSERT_TRUE(updated_offer);
+ std::unique_ptr<SessionDescription> updated_answer(
+ f1_.CreateAnswerOrError(updated_offer.get(), opts, answer.get())
+ .MoveValue());
+
+ const AudioContentDescription* updated_acd =
+ GetFirstAudioContentDescription(answer.get());
+ EXPECT_THAT(updated_acd->codecs(), ElementsAreArray(kAudioCodecsAnswer));
+
+ const VideoContentDescription* updated_vcd =
+ GetFirstVideoContentDescription(updated_answer.get());
+
+ ASSERT_EQ("H264", updated_vcd->codecs()[0].name);
+ ASSERT_EQ(cricket::kRtxCodecName, updated_vcd->codecs()[1].name);
+ int new_h264_pl_type = updated_vcd->codecs()[0].id;
+ EXPECT_NE(used_pl_type, new_h264_pl_type);
+ VideoCodec rtx = updated_vcd->codecs()[1];
+ int pt_referenced_by_rtx = rtc::FromString<int>(
+ rtx.params[cricket::kCodecParamAssociatedPayloadType]);
+ EXPECT_EQ(new_h264_pl_type, pt_referenced_by_rtx);
+}
+
+// Create an updated offer with RTX after creating an answer to an offer
+// without RTX, and with different default payload types.
+// Verify that the added RTX codec references the correct payload type.
+TEST_F(MediaSessionDescriptionFactoryTest,
+ RespondentCreatesOfferWithRtxAfterCreatingAnswerWithoutRtx) {
+ MediaSessionOptions opts;
+ AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &opts);
+
+ std::vector<VideoCodec> f2_codecs = MAKE_VECTOR(kVideoCodecs2);
+ // This creates rtx for H264 with the payload type `f2_` uses.
+ AddRtxCodec(cricket::CreateVideoRtxCodec(125, kVideoCodecs2[0].id),
+ &f2_codecs);
+ f2_.set_video_codecs(f2_codecs, f2_codecs);
+
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+ ASSERT_TRUE(offer.get());
+ std::unique_ptr<SessionDescription> answer =
+ f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue();
+
+ const VideoContentDescription* vcd =
+ GetFirstVideoContentDescription(answer.get());
+
+ std::vector<VideoCodec> expected_codecs = MAKE_VECTOR(kVideoCodecsAnswer);
+ EXPECT_EQ(expected_codecs, vcd->codecs());
+
+ // Now, ensure that the RTX codec is created correctly when `f2_` creates an
+ // updated offer, even though the default payload types are different from
+ // those of `f1_`.
+ std::unique_ptr<SessionDescription> updated_offer(
+ f2_.CreateOfferOrError(opts, answer.get()).MoveValue());
+ ASSERT_TRUE(updated_offer);
+
+ const VideoContentDescription* updated_vcd =
+ GetFirstVideoContentDescription(updated_offer.get());
+
+ // New offer should attempt to add H263, and RTX for H264.
+ expected_codecs.push_back(kVideoCodecs2[1]);
+ AddRtxCodec(cricket::CreateVideoRtxCodec(125, kVideoCodecs1[1].id),
+ &expected_codecs);
+ EXPECT_EQ(expected_codecs, updated_vcd->codecs());
+}
+
+// Test that RTX is ignored when there is no associated payload type parameter.
+TEST_F(MediaSessionDescriptionFactoryTest, RtxWithoutApt) {
+ MediaSessionOptions opts;
+ AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video",
+ RtpTransceiverDirection::kRecvOnly, kActive,
+ &opts);
+ std::vector<VideoCodec> f1_codecs = MAKE_VECTOR(kVideoCodecs1);
+ // This creates RTX without associated payload type parameter.
+ AddRtxCodec(cricket::CreateVideoCodec(126, cricket::kRtxCodecName),
+ &f1_codecs);
+ f1_.set_video_codecs(f1_codecs, f1_codecs);
+
+ std::vector<VideoCodec> f2_codecs = MAKE_VECTOR(kVideoCodecs2);
+ // This creates RTX for H264 with the payload type `f2_` uses.
+ AddRtxCodec(cricket::CreateVideoRtxCodec(125, kVideoCodecs2[0].id),
+ &f2_codecs);
+ f2_.set_video_codecs(f2_codecs, f2_codecs);
+
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+ ASSERT_TRUE(offer.get());
+ // kCodecParamAssociatedPayloadType will always be added to the offer when RTX
+ // is selected. Manually remove kCodecParamAssociatedPayloadType so that it
+ // is possible to test that that RTX is dropped when
+ // kCodecParamAssociatedPayloadType is missing in the offer.
+ MediaContentDescription* media_desc =
+ offer->GetContentDescriptionByName(cricket::CN_VIDEO);
+ ASSERT_TRUE(media_desc);
+ VideoContentDescription* desc = media_desc->as_video();
+ std::vector<VideoCodec> codecs = desc->codecs();
+ for (VideoCodec& codec : codecs) {
+ if (absl::StartsWith(codec.name, cricket::kRtxCodecName)) {
+ codec.params.clear();
+ }
+ }
+ desc->set_codecs(codecs);
+
+ std::unique_ptr<SessionDescription> answer =
+ f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue();
+
+ EXPECT_THAT(
+ GetCodecNames(GetFirstVideoContentDescription(answer.get())->codecs()),
+ Not(Contains(cricket::kRtxCodecName)));
+}
+
+// Test that RTX will be filtered out in the answer if its associated payload
+// type doesn't match the local value.
+TEST_F(MediaSessionDescriptionFactoryTest, FilterOutRtxIfAptDoesntMatch) {
+ MediaSessionOptions opts;
+ AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video",
+ RtpTransceiverDirection::kRecvOnly, kActive,
+ &opts);
+ std::vector<VideoCodec> f1_codecs = MAKE_VECTOR(kVideoCodecs1);
+ // This creates RTX for H264 in sender.
+ AddRtxCodec(cricket::CreateVideoRtxCodec(126, kVideoCodecs1[1].id),
+ &f1_codecs);
+ f1_.set_video_codecs(f1_codecs, f1_codecs);
+
+ std::vector<VideoCodec> f2_codecs = MAKE_VECTOR(kVideoCodecs2);
+ // This creates RTX for H263 in receiver.
+ AddRtxCodec(cricket::CreateVideoRtxCodec(125, kVideoCodecs2[1].id),
+ &f2_codecs);
+ f2_.set_video_codecs(f2_codecs, f2_codecs);
+
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+ ASSERT_TRUE(offer.get());
+ // Associated payload type doesn't match, therefore, RTX codec is removed in
+ // the answer.
+ std::unique_ptr<SessionDescription> answer =
+ f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue();
+
+ EXPECT_THAT(
+ GetCodecNames(GetFirstVideoContentDescription(answer.get())->codecs()),
+ Not(Contains(cricket::kRtxCodecName)));
+}
+
+// Test that when multiple RTX codecs are offered, only the matched RTX codec
+// is added in the answer, and the unsupported RTX codec is filtered out.
+TEST_F(MediaSessionDescriptionFactoryTest,
+ FilterOutUnsupportedRtxWhenCreatingAnswer) {
+ MediaSessionOptions opts;
+ AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video",
+ RtpTransceiverDirection::kRecvOnly, kActive,
+ &opts);
+ std::vector<VideoCodec> f1_codecs = MAKE_VECTOR(kVideoCodecs1);
+ // This creates RTX for H264-SVC in sender.
+ AddRtxCodec(cricket::CreateVideoRtxCodec(125, kVideoCodecs1[0].id),
+ &f1_codecs);
+ f1_.set_video_codecs(f1_codecs, f1_codecs);
+
+ // This creates RTX for H264 in sender.
+ AddRtxCodec(cricket::CreateVideoRtxCodec(126, kVideoCodecs1[1].id),
+ &f1_codecs);
+ f1_.set_video_codecs(f1_codecs, f1_codecs);
+
+ std::vector<VideoCodec> f2_codecs = MAKE_VECTOR(kVideoCodecs2);
+ // This creates RTX for H264 in receiver.
+ AddRtxCodec(cricket::CreateVideoRtxCodec(124, kVideoCodecs2[0].id),
+ &f2_codecs);
+ f2_.set_video_codecs(f2_codecs, f1_codecs);
+
+ // H264-SVC codec is removed in the answer, therefore, associated RTX codec
+ // for H264-SVC should also be removed.
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+ ASSERT_TRUE(offer.get());
+ std::unique_ptr<SessionDescription> answer =
+ f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue();
+ const VideoContentDescription* vcd =
+ GetFirstVideoContentDescription(answer.get());
+ std::vector<VideoCodec> expected_codecs = MAKE_VECTOR(kVideoCodecsAnswer);
+ AddRtxCodec(cricket::CreateVideoRtxCodec(126, kVideoCodecs1[1].id),
+ &expected_codecs);
+
+ EXPECT_EQ(expected_codecs, vcd->codecs());
+}
+
+// Test that after one RTX codec has been negotiated, a new offer can attempt
+// to add another.
+TEST_F(MediaSessionDescriptionFactoryTest, AddSecondRtxInNewOffer) {
+ MediaSessionOptions opts;
+ AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video",
+ RtpTransceiverDirection::kRecvOnly, kActive,
+ &opts);
+ std::vector<VideoCodec> f1_codecs = MAKE_VECTOR(kVideoCodecs1);
+ // This creates RTX for H264 for the offerer.
+ AddRtxCodec(cricket::CreateVideoRtxCodec(126, kVideoCodecs1[1].id),
+ &f1_codecs);
+ f1_.set_video_codecs(f1_codecs, f1_codecs);
+
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+ ASSERT_TRUE(offer);
+ const VideoContentDescription* vcd =
+ GetFirstVideoContentDescription(offer.get());
+
+ std::vector<VideoCodec> expected_codecs = MAKE_VECTOR(kVideoCodecs1);
+ AddRtxCodec(cricket::CreateVideoRtxCodec(126, kVideoCodecs1[1].id),
+ &expected_codecs);
+ EXPECT_EQ(expected_codecs, vcd->codecs());
+
+ // Now, attempt to add RTX for H264-SVC.
+ AddRtxCodec(cricket::CreateVideoRtxCodec(125, kVideoCodecs1[0].id),
+ &f1_codecs);
+ f1_.set_video_codecs(f1_codecs, f1_codecs);
+
+ std::unique_ptr<SessionDescription> updated_offer(
+ f1_.CreateOfferOrError(opts, offer.get()).MoveValue());
+ ASSERT_TRUE(updated_offer);
+ vcd = GetFirstVideoContentDescription(updated_offer.get());
+
+ AddRtxCodec(cricket::CreateVideoRtxCodec(125, kVideoCodecs1[0].id),
+ &expected_codecs);
+ EXPECT_EQ(expected_codecs, vcd->codecs());
+}
+
+// Test that when RTX is used in conjunction with simulcast, an RTX ssrc is
+// generated for each simulcast ssrc and correctly grouped.
+TEST_F(MediaSessionDescriptionFactoryTest, SimSsrcsGenerateMultipleRtxSsrcs) {
+ MediaSessionOptions opts;
+ AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &opts);
+ // Add simulcast streams.
+ AttachSenderToMediaDescriptionOptions("video", MEDIA_TYPE_VIDEO, "stream1",
+ {"stream1label"}, 3, &opts);
+
+ // Use a single real codec, and then add RTX for it.
+ std::vector<VideoCodec> f1_codecs;
+ f1_codecs.push_back(cricket::CreateVideoCodec(97, "H264"));
+ AddRtxCodec(cricket::CreateVideoRtxCodec(125, 97), &f1_codecs);
+ f1_.set_video_codecs(f1_codecs, f1_codecs);
+
+ // Ensure that the offer has an RTX ssrc for each regular ssrc, and that there
+ // is a FID ssrc + grouping for each.
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+ ASSERT_TRUE(offer.get());
+ MediaContentDescription* media_desc =
+ offer->GetContentDescriptionByName(cricket::CN_VIDEO);
+ ASSERT_TRUE(media_desc);
+ VideoContentDescription* desc = media_desc->as_video();
+ const StreamParamsVec& streams = desc->streams();
+ // Single stream.
+ ASSERT_EQ(1u, streams.size());
+ // Stream should have 6 ssrcs: 3 for video, 3 for RTX.
+ EXPECT_EQ(6u, streams[0].ssrcs.size());
+ // And should have a SIM group for the simulcast.
+ EXPECT_TRUE(streams[0].has_ssrc_group("SIM"));
+ // And a FID group for RTX.
+ EXPECT_TRUE(streams[0].has_ssrc_group("FID"));
+ std::vector<uint32_t> primary_ssrcs;
+ streams[0].GetPrimarySsrcs(&primary_ssrcs);
+ EXPECT_EQ(3u, primary_ssrcs.size());
+ std::vector<uint32_t> fid_ssrcs;
+ streams[0].GetFidSsrcs(primary_ssrcs, &fid_ssrcs);
+ EXPECT_EQ(3u, fid_ssrcs.size());
+}
+
+// Test that, when the FlexFEC codec is added, a FlexFEC ssrc is created
+// together with a FEC-FR grouping. Guarded by WebRTC-FlexFEC-03 trial.
+TEST_F(MediaSessionDescriptionFactoryTest, GenerateFlexfecSsrc) {
+ webrtc::test::ScopedKeyValueConfig override_field_trials(
+ field_trials, "WebRTC-FlexFEC-03/Enabled/");
+ MediaSessionOptions opts;
+ AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &opts);
+ // Add single stream.
+ AttachSenderToMediaDescriptionOptions("video", MEDIA_TYPE_VIDEO, "stream1",
+ {"stream1label"}, 1, &opts);
+
+ // Use a single real codec, and then add FlexFEC for it.
+ std::vector<VideoCodec> f1_codecs;
+ f1_codecs.push_back(cricket::CreateVideoCodec(97, "H264"));
+ f1_codecs.push_back(cricket::CreateVideoCodec(118, "flexfec-03"));
+ f1_.set_video_codecs(f1_codecs, f1_codecs);
+
+ // Ensure that the offer has a single FlexFEC ssrc and that
+ // there is no FEC-FR ssrc + grouping for each.
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+ ASSERT_TRUE(offer.get());
+ MediaContentDescription* media_desc =
+ offer->GetContentDescriptionByName(cricket::CN_VIDEO);
+ ASSERT_TRUE(media_desc);
+ VideoContentDescription* desc = media_desc->as_video();
+ const StreamParamsVec& streams = desc->streams();
+ // Single stream.
+ ASSERT_EQ(1u, streams.size());
+ // Stream should have 2 ssrcs: 1 for video, 1 for FlexFEC.
+ EXPECT_EQ(2u, streams[0].ssrcs.size());
+ // And should have a FEC-FR group for FlexFEC.
+ EXPECT_TRUE(streams[0].has_ssrc_group("FEC-FR"));
+ std::vector<uint32_t> primary_ssrcs;
+ streams[0].GetPrimarySsrcs(&primary_ssrcs);
+ ASSERT_EQ(1u, primary_ssrcs.size());
+ uint32_t flexfec_ssrc;
+ EXPECT_TRUE(streams[0].GetFecFrSsrc(primary_ssrcs[0], &flexfec_ssrc));
+ EXPECT_NE(flexfec_ssrc, 0u);
+}
+
+// Test that FlexFEC is disabled for simulcast.
+// TODO(brandtr): Remove this test when we support simulcast, either through
+// multiple FlexfecSenders, or through multistream protection.
+TEST_F(MediaSessionDescriptionFactoryTest, SimSsrcsGenerateNoFlexfecSsrcs) {
+ webrtc::test::ScopedKeyValueConfig override_field_trials(
+ field_trials, "WebRTC-FlexFEC-03/Enabled/");
+ MediaSessionOptions opts;
+ AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &opts);
+ // Add simulcast streams.
+ AttachSenderToMediaDescriptionOptions("video", MEDIA_TYPE_VIDEO, "stream1",
+ {"stream1label"}, 3, &opts);
+
+ // Use a single real codec, and then add FlexFEC for it.
+ std::vector<VideoCodec> f1_codecs;
+ f1_codecs.push_back(cricket::CreateVideoCodec(97, "H264"));
+ f1_codecs.push_back(cricket::CreateVideoCodec(118, "flexfec-03"));
+ f1_.set_video_codecs(f1_codecs, f1_codecs);
+
+ // Ensure that the offer has no FlexFEC ssrcs for each regular ssrc, and that
+ // there is no FEC-FR ssrc + grouping for each.
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+ ASSERT_TRUE(offer.get());
+ MediaContentDescription* media_desc =
+ offer->GetContentDescriptionByName(cricket::CN_VIDEO);
+ ASSERT_TRUE(media_desc);
+ VideoContentDescription* desc = media_desc->as_video();
+ const StreamParamsVec& streams = desc->streams();
+ // Single stream.
+ ASSERT_EQ(1u, streams.size());
+ // Stream should have 3 ssrcs: 3 for video, 0 for FlexFEC.
+ EXPECT_EQ(3u, streams[0].ssrcs.size());
+ // And should have a SIM group for the simulcast.
+ EXPECT_TRUE(streams[0].has_ssrc_group("SIM"));
+ // And not a FEC-FR group for FlexFEC.
+ EXPECT_FALSE(streams[0].has_ssrc_group("FEC-FR"));
+ std::vector<uint32_t> primary_ssrcs;
+ streams[0].GetPrimarySsrcs(&primary_ssrcs);
+ EXPECT_EQ(3u, primary_ssrcs.size());
+ for (uint32_t primary_ssrc : primary_ssrcs) {
+ uint32_t flexfec_ssrc;
+ EXPECT_FALSE(streams[0].GetFecFrSsrc(primary_ssrc, &flexfec_ssrc));
+ }
+}
+
+// Create an updated offer after creating an answer to the original offer and
+// verify that the RTP header extensions that were part of the original answer
+// are not changed in the updated offer.
+TEST_F(MediaSessionDescriptionFactoryTest,
+ RespondentCreatesOfferAfterCreatingAnswerWithRtpExtensions) {
+ MediaSessionOptions opts;
+ AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &opts);
+
+ SetAudioVideoRtpHeaderExtensions(MAKE_VECTOR(kAudioRtpExtension1),
+ MAKE_VECTOR(kVideoRtpExtension1), &opts);
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+ SetAudioVideoRtpHeaderExtensions(MAKE_VECTOR(kAudioRtpExtension2),
+ MAKE_VECTOR(kVideoRtpExtension2), &opts);
+ std::unique_ptr<SessionDescription> answer =
+ f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue();
+
+ EXPECT_EQ(
+ MAKE_VECTOR(kAudioRtpExtensionAnswer),
+ GetFirstAudioContentDescription(answer.get())->rtp_header_extensions());
+ EXPECT_EQ(
+ MAKE_VECTOR(kVideoRtpExtensionAnswer),
+ GetFirstVideoContentDescription(answer.get())->rtp_header_extensions());
+
+ std::unique_ptr<SessionDescription> updated_offer(
+ f2_.CreateOfferOrError(opts, answer.get()).MoveValue());
+
+ // The expected RTP header extensions in the new offer are the resulting
+ // extensions from the first offer/answer exchange plus the extensions only
+ // `f2_` offer.
+ // Since the default local extension id `f2_` uses has already been used by
+ // `f1_` for another extensions, it is changed to 13.
+ const RtpExtension kUpdatedAudioRtpExtensions[] = {
+ kAudioRtpExtensionAnswer[0],
+ RtpExtension(kAudioRtpExtension2[1].uri, 13),
+ kAudioRtpExtension2[2],
+ };
+
+ // Since the default local extension id `f2_` uses has already been used by
+ // `f1_` for another extensions, is is changed to 12.
+ const RtpExtension kUpdatedVideoRtpExtensions[] = {
+ kVideoRtpExtensionAnswer[0],
+ RtpExtension(kVideoRtpExtension2[1].uri, 12),
+ kVideoRtpExtension2[2],
+ };
+
+ const AudioContentDescription* updated_acd =
+ GetFirstAudioContentDescription(updated_offer.get());
+ EXPECT_EQ(MAKE_VECTOR(kUpdatedAudioRtpExtensions),
+ updated_acd->rtp_header_extensions());
+
+ const VideoContentDescription* updated_vcd =
+ GetFirstVideoContentDescription(updated_offer.get());
+ EXPECT_EQ(MAKE_VECTOR(kUpdatedVideoRtpExtensions),
+ updated_vcd->rtp_header_extensions());
+}
+
+// Verify that if the same RTP extension URI is used for audio and video, the
+// same ID is used. Also verify that the ID isn't changed when creating an
+// updated offer (this was previously a bug).
+TEST_F(MediaSessionDescriptionFactoryTest, RtpExtensionIdReused) {
+ MediaSessionOptions opts;
+ AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &opts);
+
+ SetAudioVideoRtpHeaderExtensions(MAKE_VECTOR(kAudioRtpExtension3),
+ MAKE_VECTOR(kVideoRtpExtension3), &opts);
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+
+ // Since the audio extensions used ID 3 for "both_audio_and_video", so should
+ // the video extensions.
+ const RtpExtension kExpectedVideoRtpExtension[] = {
+ kVideoRtpExtension3[0],
+ kAudioRtpExtension3[1],
+ };
+
+ EXPECT_EQ(
+ MAKE_VECTOR(kAudioRtpExtension3),
+ GetFirstAudioContentDescription(offer.get())->rtp_header_extensions());
+ EXPECT_EQ(
+ MAKE_VECTOR(kExpectedVideoRtpExtension),
+ GetFirstVideoContentDescription(offer.get())->rtp_header_extensions());
+
+ // Nothing should change when creating a new offer
+ std::unique_ptr<SessionDescription> updated_offer(
+ f1_.CreateOfferOrError(opts, offer.get()).MoveValue());
+
+ EXPECT_EQ(MAKE_VECTOR(kAudioRtpExtension3),
+ GetFirstAudioContentDescription(updated_offer.get())
+ ->rtp_header_extensions());
+ EXPECT_EQ(MAKE_VECTOR(kExpectedVideoRtpExtension),
+ GetFirstVideoContentDescription(updated_offer.get())
+ ->rtp_header_extensions());
+}
+
+// Same as "RtpExtensionIdReused" above for encrypted RTP extensions.
+TEST_F(MediaSessionDescriptionFactoryTest, RtpExtensionIdReusedEncrypted) {
+ MediaSessionOptions opts;
+ AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &opts);
+
+ f1_.set_enable_encrypted_rtp_header_extensions(true);
+ f2_.set_enable_encrypted_rtp_header_extensions(true);
+
+ SetAudioVideoRtpHeaderExtensions(
+ MAKE_VECTOR(kAudioRtpExtension3ForEncryption),
+ MAKE_VECTOR(kVideoRtpExtension3ForEncryption), &opts);
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+
+ EXPECT_EQ(
+ MAKE_VECTOR(kAudioRtpExtension3ForEncryptionOffer),
+ GetFirstAudioContentDescription(offer.get())->rtp_header_extensions());
+ EXPECT_EQ(
+ MAKE_VECTOR(kVideoRtpExtension3ForEncryptionOffer),
+ GetFirstVideoContentDescription(offer.get())->rtp_header_extensions());
+
+ // Nothing should change when creating a new offer
+ std::unique_ptr<SessionDescription> updated_offer(
+ f1_.CreateOfferOrError(opts, offer.get()).MoveValue());
+
+ EXPECT_EQ(MAKE_VECTOR(kAudioRtpExtension3ForEncryptionOffer),
+ GetFirstAudioContentDescription(updated_offer.get())
+ ->rtp_header_extensions());
+ EXPECT_EQ(MAKE_VECTOR(kVideoRtpExtension3ForEncryptionOffer),
+ GetFirstVideoContentDescription(updated_offer.get())
+ ->rtp_header_extensions());
+}
+
+TEST(MediaSessionDescription, CopySessionDescription) {
+ SessionDescription source;
+ cricket::ContentGroup group(cricket::CN_AUDIO);
+ source.AddGroup(group);
+ std::unique_ptr<AudioContentDescription> acd =
+ std::make_unique<AudioContentDescription>();
+ acd->set_codecs(MAKE_VECTOR(kAudioCodecs1));
+ acd->AddLegacyStream(1);
+ source.AddContent(cricket::CN_AUDIO, MediaProtocolType::kRtp, acd->Clone());
+ std::unique_ptr<VideoContentDescription> vcd =
+ std::make_unique<VideoContentDescription>();
+ vcd->set_codecs(MAKE_VECTOR(kVideoCodecs1));
+ vcd->AddLegacyStream(2);
+ source.AddContent(cricket::CN_VIDEO, MediaProtocolType::kRtp, vcd->Clone());
+
+ std::unique_ptr<SessionDescription> copy = source.Clone();
+ ASSERT_TRUE(copy.get());
+ EXPECT_TRUE(copy->HasGroup(cricket::CN_AUDIO));
+ const ContentInfo* ac = copy->GetContentByName("audio");
+ const ContentInfo* vc = copy->GetContentByName("video");
+ ASSERT_TRUE(ac);
+ ASSERT_TRUE(vc);
+ EXPECT_EQ(MediaProtocolType::kRtp, ac->type);
+ const AudioContentDescription* acd_copy = ac->media_description()->as_audio();
+ EXPECT_EQ(acd->codecs(), acd_copy->codecs());
+ EXPECT_EQ(1u, acd->first_ssrc());
+
+ EXPECT_EQ(MediaProtocolType::kRtp, vc->type);
+ const VideoContentDescription* vcd_copy = vc->media_description()->as_video();
+ EXPECT_EQ(vcd->codecs(), vcd_copy->codecs());
+ EXPECT_EQ(2u, vcd->first_ssrc());
+}
+
+// The below TestTransportInfoXXX tests create different offers/answers, and
+// ensure the TransportInfo in the SessionDescription matches what we expect.
+TEST_F(MediaSessionDescriptionFactoryTest, TestTransportInfoOfferAudio) {
+ MediaSessionOptions options;
+ AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio",
+ RtpTransceiverDirection::kRecvOnly, kActive,
+ &options);
+ TestTransportInfo(true, options, false);
+}
+
+TEST_F(MediaSessionDescriptionFactoryTest,
+ TestTransportInfoOfferIceRenomination) {
+ MediaSessionOptions options;
+ AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio",
+ RtpTransceiverDirection::kRecvOnly, kActive,
+ &options);
+ options.media_description_options[0]
+ .transport_options.enable_ice_renomination = true;
+ TestTransportInfo(true, options, false);
+}
+
+TEST_F(MediaSessionDescriptionFactoryTest, TestTransportInfoOfferAudioCurrent) {
+ MediaSessionOptions options;
+ AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio",
+ RtpTransceiverDirection::kRecvOnly, kActive,
+ &options);
+ TestTransportInfo(true, options, true);
+}
+
+TEST_F(MediaSessionDescriptionFactoryTest, TestTransportInfoOfferMultimedia) {
+ MediaSessionOptions options;
+ AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &options);
+ TestTransportInfo(true, options, false);
+}
+
+TEST_F(MediaSessionDescriptionFactoryTest,
+ TestTransportInfoOfferMultimediaCurrent) {
+ MediaSessionOptions options;
+ AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &options);
+ TestTransportInfo(true, options, true);
+}
+
+TEST_F(MediaSessionDescriptionFactoryTest, TestTransportInfoOfferBundle) {
+ MediaSessionOptions options;
+ AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &options);
+ options.bundle_enabled = true;
+ TestTransportInfo(true, options, false);
+}
+
+TEST_F(MediaSessionDescriptionFactoryTest,
+ TestTransportInfoOfferBundleCurrent) {
+ MediaSessionOptions options;
+ AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &options);
+ options.bundle_enabled = true;
+ TestTransportInfo(true, options, true);
+}
+
+TEST_F(MediaSessionDescriptionFactoryTest, TestTransportInfoAnswerAudio) {
+ MediaSessionOptions options;
+ AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio",
+ RtpTransceiverDirection::kRecvOnly, kActive,
+ &options);
+ TestTransportInfo(false, options, false);
+}
+
+TEST_F(MediaSessionDescriptionFactoryTest,
+ TestTransportInfoAnswerIceRenomination) {
+ MediaSessionOptions options;
+ AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio",
+ RtpTransceiverDirection::kRecvOnly, kActive,
+ &options);
+ options.media_description_options[0]
+ .transport_options.enable_ice_renomination = true;
+ TestTransportInfo(false, options, false);
+}
+
+TEST_F(MediaSessionDescriptionFactoryTest,
+ TestTransportInfoAnswerAudioCurrent) {
+ MediaSessionOptions options;
+ AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio",
+ RtpTransceiverDirection::kRecvOnly, kActive,
+ &options);
+ TestTransportInfo(false, options, true);
+}
+
+TEST_F(MediaSessionDescriptionFactoryTest, TestTransportInfoAnswerMultimedia) {
+ MediaSessionOptions options;
+ AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &options);
+ TestTransportInfo(false, options, false);
+}
+
+TEST_F(MediaSessionDescriptionFactoryTest,
+ TestTransportInfoAnswerMultimediaCurrent) {
+ MediaSessionOptions options;
+ AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &options);
+ TestTransportInfo(false, options, true);
+}
+
+TEST_F(MediaSessionDescriptionFactoryTest, TestTransportInfoAnswerBundle) {
+ MediaSessionOptions options;
+ AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &options);
+ options.bundle_enabled = true;
+ TestTransportInfo(false, options, false);
+}
+
+TEST_F(MediaSessionDescriptionFactoryTest,
+ TestTransportInfoAnswerBundleCurrent) {
+ MediaSessionOptions options;
+ AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &options);
+ options.bundle_enabled = true;
+ TestTransportInfo(false, options, true);
+}
+
+// Create an offer with bundle enabled and verify the crypto parameters are
+// the common set of the available cryptos.
+TEST_F(MediaSessionDescriptionFactoryTest, TestCryptoWithOfferBundle) {
+ TestCryptoWithBundle(true);
+}
+
+// Create an answer with bundle enabled and verify the crypto parameters are
+// the common set of the available cryptos.
+TEST_F(MediaSessionDescriptionFactoryTest, TestCryptoWithAnswerBundle) {
+ TestCryptoWithBundle(false);
+}
+
+// Verifies that creating answer fails if the offer has UDP/TLS/RTP/SAVPF but
+// DTLS is not enabled locally.
+TEST_F(MediaSessionDescriptionFactoryTest,
+ TestOfferDtlsSavpfWithoutDtlsFailed) {
+ f1_.set_secure(SEC_ENABLED);
+ f2_.set_secure(SEC_ENABLED);
+ tdf1_.set_secure(SEC_DISABLED);
+ tdf2_.set_secure(SEC_DISABLED);
+
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(CreatePlanBMediaSessionOptions(), nullptr)
+ .MoveValue();
+ ASSERT_TRUE(offer.get());
+ ContentInfo* offer_content = offer->GetContentByName("audio");
+ ASSERT_TRUE(offer_content);
+ AudioContentDescription* offer_audio_desc =
+ offer_content->media_description()->as_audio();
+ offer_audio_desc->set_protocol(cricket::kMediaProtocolDtlsSavpf);
+
+ std::unique_ptr<SessionDescription> answer =
+ f2_.CreateAnswerOrError(offer.get(), CreatePlanBMediaSessionOptions(),
+ nullptr)
+ .MoveValue();
+ ASSERT_TRUE(answer);
+ ContentInfo* answer_content = answer->GetContentByName("audio");
+ ASSERT_TRUE(answer_content);
+
+ ASSERT_TRUE(answer_content->rejected);
+}
+
+// Offers UDP/TLS/RTP/SAVPF and verifies the answer can be created and contains
+// UDP/TLS/RTP/SAVPF.
+TEST_F(MediaSessionDescriptionFactoryTest, TestOfferDtlsSavpfCreateAnswer) {
+ f1_.set_secure(SEC_ENABLED);
+ f2_.set_secure(SEC_ENABLED);
+ tdf1_.set_secure(SEC_ENABLED);
+ tdf2_.set_secure(SEC_ENABLED);
+
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(CreatePlanBMediaSessionOptions(), nullptr)
+ .MoveValue();
+ ASSERT_TRUE(offer.get());
+ ContentInfo* offer_content = offer->GetContentByName("audio");
+ ASSERT_TRUE(offer_content);
+ AudioContentDescription* offer_audio_desc =
+ offer_content->media_description()->as_audio();
+ offer_audio_desc->set_protocol(cricket::kMediaProtocolDtlsSavpf);
+
+ std::unique_ptr<SessionDescription> answer =
+ f2_.CreateAnswerOrError(offer.get(), CreatePlanBMediaSessionOptions(),
+ nullptr)
+ .MoveValue();
+ ASSERT_TRUE(answer);
+
+ const ContentInfo* answer_content = answer->GetContentByName("audio");
+ ASSERT_TRUE(answer_content);
+ ASSERT_FALSE(answer_content->rejected);
+
+ const AudioContentDescription* answer_audio_desc =
+ answer_content->media_description()->as_audio();
+ EXPECT_EQ(cricket::kMediaProtocolDtlsSavpf, answer_audio_desc->protocol());
+}
+
+// Test that we include both SDES and DTLS in the offer, but only include SDES
+// in the answer if DTLS isn't negotiated.
+TEST_F(MediaSessionDescriptionFactoryTest, TestCryptoDtls) {
+ f1_.set_secure(SEC_ENABLED);
+ f2_.set_secure(SEC_ENABLED);
+ tdf1_.set_secure(SEC_ENABLED);
+ tdf2_.set_secure(SEC_DISABLED);
+ MediaSessionOptions options;
+ AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &options);
+ std::unique_ptr<SessionDescription> offer, answer;
+ const cricket::MediaContentDescription* audio_media_desc;
+ const cricket::MediaContentDescription* video_media_desc;
+ const cricket::TransportDescription* audio_trans_desc;
+ const cricket::TransportDescription* video_trans_desc;
+
+ // Generate an offer with SDES and DTLS support.
+ offer = f1_.CreateOfferOrError(options, nullptr).MoveValue();
+ ASSERT_TRUE(offer.get());
+
+ audio_media_desc = offer->GetContentDescriptionByName("audio");
+ ASSERT_TRUE(audio_media_desc);
+ video_media_desc = offer->GetContentDescriptionByName("video");
+ ASSERT_TRUE(video_media_desc);
+ EXPECT_EQ(kDefaultCryptoSuiteSize, audio_media_desc->cryptos().size());
+ EXPECT_EQ(kDefaultCryptoSuiteSize, video_media_desc->cryptos().size());
+
+ audio_trans_desc = offer->GetTransportDescriptionByName("audio");
+ ASSERT_TRUE(audio_trans_desc);
+ video_trans_desc = offer->GetTransportDescriptionByName("video");
+ ASSERT_TRUE(video_trans_desc);
+ ASSERT_TRUE(audio_trans_desc->identity_fingerprint.get());
+ ASSERT_TRUE(video_trans_desc->identity_fingerprint.get());
+
+ // Generate an answer with only SDES support, since tdf2 has crypto disabled.
+ answer = f2_.CreateAnswerOrError(offer.get(), options, nullptr).MoveValue();
+ ASSERT_TRUE(answer.get());
+
+ audio_media_desc = answer->GetContentDescriptionByName("audio");
+ ASSERT_TRUE(audio_media_desc);
+ video_media_desc = answer->GetContentDescriptionByName("video");
+ ASSERT_TRUE(video_media_desc);
+ EXPECT_EQ(1u, audio_media_desc->cryptos().size());
+ EXPECT_EQ(1u, video_media_desc->cryptos().size());
+
+ audio_trans_desc = answer->GetTransportDescriptionByName("audio");
+ ASSERT_TRUE(audio_trans_desc);
+ video_trans_desc = answer->GetTransportDescriptionByName("video");
+ ASSERT_TRUE(video_trans_desc);
+ ASSERT_FALSE(audio_trans_desc->identity_fingerprint.get());
+ ASSERT_FALSE(video_trans_desc->identity_fingerprint.get());
+
+ // Enable DTLS; the answer should now only have DTLS support.
+ tdf2_.set_secure(SEC_ENABLED);
+ answer = f2_.CreateAnswerOrError(offer.get(), options, nullptr).MoveValue();
+ ASSERT_TRUE(answer.get());
+
+ audio_media_desc = answer->GetContentDescriptionByName("audio");
+ ASSERT_TRUE(audio_media_desc);
+ video_media_desc = answer->GetContentDescriptionByName("video");
+ ASSERT_TRUE(video_media_desc);
+ EXPECT_TRUE(audio_media_desc->cryptos().empty());
+ EXPECT_TRUE(video_media_desc->cryptos().empty());
+ EXPECT_EQ(cricket::kMediaProtocolSavpf, audio_media_desc->protocol());
+ EXPECT_EQ(cricket::kMediaProtocolSavpf, video_media_desc->protocol());
+
+ audio_trans_desc = answer->GetTransportDescriptionByName("audio");
+ ASSERT_TRUE(audio_trans_desc);
+ video_trans_desc = answer->GetTransportDescriptionByName("video");
+ ASSERT_TRUE(video_trans_desc);
+ ASSERT_TRUE(audio_trans_desc->identity_fingerprint.get());
+ ASSERT_TRUE(video_trans_desc->identity_fingerprint.get());
+
+ // Try creating offer again. DTLS enabled now, crypto's should be empty
+ // in new offer.
+ offer = f1_.CreateOfferOrError(options, offer.get()).MoveValue();
+ ASSERT_TRUE(offer.get());
+ audio_media_desc = offer->GetContentDescriptionByName("audio");
+ ASSERT_TRUE(audio_media_desc);
+ video_media_desc = offer->GetContentDescriptionByName("video");
+ ASSERT_TRUE(video_media_desc);
+ EXPECT_TRUE(audio_media_desc->cryptos().empty());
+ EXPECT_TRUE(video_media_desc->cryptos().empty());
+
+ audio_trans_desc = offer->GetTransportDescriptionByName("audio");
+ ASSERT_TRUE(audio_trans_desc);
+ video_trans_desc = offer->GetTransportDescriptionByName("video");
+ ASSERT_TRUE(video_trans_desc);
+ ASSERT_TRUE(audio_trans_desc->identity_fingerprint.get());
+ ASSERT_TRUE(video_trans_desc->identity_fingerprint.get());
+}
+
+// Test that an answer can't be created if cryptos are required but the offer is
+// unsecure.
+TEST_F(MediaSessionDescriptionFactoryTest, TestSecureAnswerToUnsecureOffer) {
+ MediaSessionOptions options = CreatePlanBMediaSessionOptions();
+ f1_.set_secure(SEC_DISABLED);
+ tdf1_.set_secure(SEC_DISABLED);
+ f2_.set_secure(SEC_REQUIRED);
+ tdf1_.set_secure(SEC_ENABLED);
+
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(options, nullptr).MoveValue();
+ ASSERT_TRUE(offer.get());
+
+ auto error = f2_.CreateAnswerOrError(offer.get(), options, nullptr);
+ EXPECT_FALSE(error.ok());
+}
+
+// Test that we accept a DTLS offer without SDES and create an appropriate
+// answer.
+TEST_F(MediaSessionDescriptionFactoryTest, TestCryptoOfferDtlsButNotSdes) {
+ f1_.set_secure(SEC_DISABLED);
+ f2_.set_secure(SEC_ENABLED);
+ tdf1_.set_secure(SEC_ENABLED);
+ tdf2_.set_secure(SEC_ENABLED);
+ MediaSessionOptions options;
+ AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &options);
+
+ // Generate an offer with DTLS but without SDES.
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(options, nullptr).MoveValue();
+ ASSERT_TRUE(offer.get());
+
+ const AudioContentDescription* audio_offer =
+ GetFirstAudioContentDescription(offer.get());
+ ASSERT_TRUE(audio_offer->cryptos().empty());
+ const VideoContentDescription* video_offer =
+ GetFirstVideoContentDescription(offer.get());
+ ASSERT_TRUE(video_offer->cryptos().empty());
+
+ const cricket::TransportDescription* audio_offer_trans_desc =
+ offer->GetTransportDescriptionByName("audio");
+ ASSERT_TRUE(audio_offer_trans_desc->identity_fingerprint.get());
+ const cricket::TransportDescription* video_offer_trans_desc =
+ offer->GetTransportDescriptionByName("video");
+ ASSERT_TRUE(video_offer_trans_desc->identity_fingerprint.get());
+
+ // Generate an answer with DTLS.
+ std::unique_ptr<SessionDescription> answer =
+ f2_.CreateAnswerOrError(offer.get(), options, nullptr).MoveValue();
+ ASSERT_TRUE(answer.get());
+
+ const cricket::TransportDescription* audio_answer_trans_desc =
+ answer->GetTransportDescriptionByName("audio");
+ EXPECT_TRUE(audio_answer_trans_desc->identity_fingerprint.get());
+ const cricket::TransportDescription* video_answer_trans_desc =
+ answer->GetTransportDescriptionByName("video");
+ EXPECT_TRUE(video_answer_trans_desc->identity_fingerprint.get());
+}
+
+// Verifies if vad_enabled option is set to false, CN codecs are not present in
+// offer or answer.
+TEST_F(MediaSessionDescriptionFactoryTest, TestVADEnableOption) {
+ MediaSessionOptions options;
+ AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &options);
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(options, nullptr).MoveValue();
+ ASSERT_TRUE(offer.get());
+ const ContentInfo* audio_content = offer->GetContentByName("audio");
+ EXPECT_FALSE(VerifyNoCNCodecs(audio_content));
+
+ options.vad_enabled = false;
+ offer = f1_.CreateOfferOrError(options, nullptr).MoveValue();
+ ASSERT_TRUE(offer.get());
+ audio_content = offer->GetContentByName("audio");
+ EXPECT_TRUE(VerifyNoCNCodecs(audio_content));
+ std::unique_ptr<SessionDescription> answer =
+ f1_.CreateAnswerOrError(offer.get(), options, nullptr).MoveValue();
+ ASSERT_TRUE(answer.get());
+ audio_content = answer->GetContentByName("audio");
+ EXPECT_TRUE(VerifyNoCNCodecs(audio_content));
+}
+
+// Test that the generated MIDs match the existing offer.
+TEST_F(MediaSessionDescriptionFactoryTest, TestMIDsMatchesExistingOffer) {
+ MediaSessionOptions opts;
+ AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio_modified",
+ RtpTransceiverDirection::kRecvOnly, kActive,
+ &opts);
+ AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video_modified",
+ RtpTransceiverDirection::kRecvOnly, kActive,
+ &opts);
+ AddMediaDescriptionOptions(MEDIA_TYPE_DATA, "data_modified",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &opts);
+ // Create offer.
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+ std::unique_ptr<SessionDescription> updated_offer(
+ f1_.CreateOfferOrError(opts, offer.get()).MoveValue());
+
+ const ContentInfo* audio_content = GetFirstAudioContent(updated_offer.get());
+ const ContentInfo* video_content = GetFirstVideoContent(updated_offer.get());
+ const ContentInfo* data_content = GetFirstDataContent(updated_offer.get());
+ ASSERT_TRUE(audio_content);
+ ASSERT_TRUE(video_content);
+ ASSERT_TRUE(data_content);
+ EXPECT_EQ("audio_modified", audio_content->name);
+ EXPECT_EQ("video_modified", video_content->name);
+ EXPECT_EQ("data_modified", data_content->name);
+}
+
+// The following tests verify that the unified plan SDP is supported.
+// Test that we can create an offer with multiple media sections of same media
+// type.
+TEST_F(MediaSessionDescriptionFactoryTest,
+ CreateOfferWithMultipleAVMediaSections) {
+ MediaSessionOptions opts;
+ AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio_1",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &opts);
+ AttachSenderToMediaDescriptionOptions(
+ "audio_1", MEDIA_TYPE_AUDIO, kAudioTrack1, {kMediaStream1}, 1, &opts);
+
+ AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video_1",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &opts);
+ AttachSenderToMediaDescriptionOptions(
+ "video_1", MEDIA_TYPE_VIDEO, kVideoTrack1, {kMediaStream1}, 1, &opts);
+
+ AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio_2",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &opts);
+ AttachSenderToMediaDescriptionOptions(
+ "audio_2", MEDIA_TYPE_AUDIO, kAudioTrack2, {kMediaStream2}, 1, &opts);
+
+ AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video_2",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &opts);
+ AttachSenderToMediaDescriptionOptions(
+ "video_2", MEDIA_TYPE_VIDEO, kVideoTrack2, {kMediaStream2}, 1, &opts);
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+ ASSERT_TRUE(offer);
+
+ ASSERT_EQ(4u, offer->contents().size());
+ EXPECT_FALSE(offer->contents()[0].rejected);
+ const AudioContentDescription* acd =
+ offer->contents()[0].media_description()->as_audio();
+ ASSERT_EQ(1u, acd->streams().size());
+ EXPECT_EQ(kAudioTrack1, acd->streams()[0].id);
+ EXPECT_EQ(RtpTransceiverDirection::kSendRecv, acd->direction());
+
+ EXPECT_FALSE(offer->contents()[1].rejected);
+ const VideoContentDescription* vcd =
+ offer->contents()[1].media_description()->as_video();
+ ASSERT_EQ(1u, vcd->streams().size());
+ EXPECT_EQ(kVideoTrack1, vcd->streams()[0].id);
+ EXPECT_EQ(RtpTransceiverDirection::kSendRecv, vcd->direction());
+
+ EXPECT_FALSE(offer->contents()[2].rejected);
+ acd = offer->contents()[2].media_description()->as_audio();
+ ASSERT_EQ(1u, acd->streams().size());
+ EXPECT_EQ(kAudioTrack2, acd->streams()[0].id);
+ EXPECT_EQ(RtpTransceiverDirection::kSendRecv, acd->direction());
+
+ EXPECT_FALSE(offer->contents()[3].rejected);
+ vcd = offer->contents()[3].media_description()->as_video();
+ ASSERT_EQ(1u, vcd->streams().size());
+ EXPECT_EQ(kVideoTrack2, vcd->streams()[0].id);
+ EXPECT_EQ(RtpTransceiverDirection::kSendRecv, vcd->direction());
+}
+
+// Test that we can create an answer with multiple media sections of same media
+// type.
+TEST_F(MediaSessionDescriptionFactoryTest,
+ CreateAnswerWithMultipleAVMediaSections) {
+ MediaSessionOptions opts;
+ AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio_1",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &opts);
+ AttachSenderToMediaDescriptionOptions(
+ "audio_1", MEDIA_TYPE_AUDIO, kAudioTrack1, {kMediaStream1}, 1, &opts);
+
+ AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video_1",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &opts);
+ AttachSenderToMediaDescriptionOptions(
+ "video_1", MEDIA_TYPE_VIDEO, kVideoTrack1, {kMediaStream1}, 1, &opts);
+
+ AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio_2",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &opts);
+ AttachSenderToMediaDescriptionOptions(
+ "audio_2", MEDIA_TYPE_AUDIO, kAudioTrack2, {kMediaStream2}, 1, &opts);
+
+ AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video_2",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &opts);
+ AttachSenderToMediaDescriptionOptions(
+ "video_2", MEDIA_TYPE_VIDEO, kVideoTrack2, {kMediaStream2}, 1, &opts);
+
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+ ASSERT_TRUE(offer);
+ std::unique_ptr<SessionDescription> answer =
+ f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue();
+
+ ASSERT_EQ(4u, answer->contents().size());
+ EXPECT_FALSE(answer->contents()[0].rejected);
+ const AudioContentDescription* acd =
+ answer->contents()[0].media_description()->as_audio();
+ ASSERT_EQ(1u, acd->streams().size());
+ EXPECT_EQ(kAudioTrack1, acd->streams()[0].id);
+ EXPECT_EQ(RtpTransceiverDirection::kSendRecv, acd->direction());
+
+ EXPECT_FALSE(answer->contents()[1].rejected);
+ const VideoContentDescription* vcd =
+ answer->contents()[1].media_description()->as_video();
+ ASSERT_EQ(1u, vcd->streams().size());
+ EXPECT_EQ(kVideoTrack1, vcd->streams()[0].id);
+ EXPECT_EQ(RtpTransceiverDirection::kSendRecv, vcd->direction());
+
+ EXPECT_FALSE(answer->contents()[2].rejected);
+ acd = answer->contents()[2].media_description()->as_audio();
+ ASSERT_EQ(1u, acd->streams().size());
+ EXPECT_EQ(kAudioTrack2, acd->streams()[0].id);
+ EXPECT_EQ(RtpTransceiverDirection::kSendRecv, acd->direction());
+
+ EXPECT_FALSE(answer->contents()[3].rejected);
+ vcd = answer->contents()[3].media_description()->as_video();
+ ASSERT_EQ(1u, vcd->streams().size());
+ EXPECT_EQ(kVideoTrack2, vcd->streams()[0].id);
+ EXPECT_EQ(RtpTransceiverDirection::kSendRecv, vcd->direction());
+}
+
+// Test that the media section will be rejected in offer if the corresponding
+// MediaDescriptionOptions is stopped by the offerer.
+TEST_F(MediaSessionDescriptionFactoryTest,
+ CreateOfferWithMediaSectionStoppedByOfferer) {
+ // Create an offer with two audio sections and one of them is stopped.
+ MediaSessionOptions offer_opts;
+ AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio1",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &offer_opts);
+ AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio2",
+ RtpTransceiverDirection::kInactive, kStopped,
+ &offer_opts);
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(offer_opts, nullptr).MoveValue();
+ ASSERT_TRUE(offer);
+ ASSERT_EQ(2u, offer->contents().size());
+ EXPECT_FALSE(offer->contents()[0].rejected);
+ EXPECT_TRUE(offer->contents()[1].rejected);
+}
+
+// Test that the media section will be rejected in answer if the corresponding
+// MediaDescriptionOptions is stopped by the offerer.
+TEST_F(MediaSessionDescriptionFactoryTest,
+ CreateAnswerWithMediaSectionStoppedByOfferer) {
+ // Create an offer with two audio sections and one of them is stopped.
+ MediaSessionOptions offer_opts;
+ AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio1",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &offer_opts);
+ AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio2",
+ RtpTransceiverDirection::kInactive, kStopped,
+ &offer_opts);
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(offer_opts, nullptr).MoveValue();
+ ASSERT_TRUE(offer);
+ ASSERT_EQ(2u, offer->contents().size());
+ EXPECT_FALSE(offer->contents()[0].rejected);
+ EXPECT_TRUE(offer->contents()[1].rejected);
+
+ // Create an answer based on the offer.
+ MediaSessionOptions answer_opts;
+ AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio1",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &answer_opts);
+ AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio2",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &answer_opts);
+ std::unique_ptr<SessionDescription> answer =
+ f2_.CreateAnswerOrError(offer.get(), answer_opts, nullptr).MoveValue();
+ ASSERT_EQ(2u, answer->contents().size());
+ EXPECT_FALSE(answer->contents()[0].rejected);
+ EXPECT_TRUE(answer->contents()[1].rejected);
+}
+
+// Test that the media section will be rejected in answer if the corresponding
+// MediaDescriptionOptions is stopped by the answerer.
+TEST_F(MediaSessionDescriptionFactoryTest,
+ CreateAnswerWithMediaSectionRejectedByAnswerer) {
+ // Create an offer with two audio sections.
+ MediaSessionOptions offer_opts;
+ AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio1",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &offer_opts);
+ AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio2",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &offer_opts);
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(offer_opts, nullptr).MoveValue();
+ ASSERT_TRUE(offer);
+ ASSERT_EQ(2u, offer->contents().size());
+ ASSERT_FALSE(offer->contents()[0].rejected);
+ ASSERT_FALSE(offer->contents()[1].rejected);
+
+ // The answerer rejects one of the audio sections.
+ MediaSessionOptions answer_opts;
+ AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio1",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &answer_opts);
+ AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio2",
+ RtpTransceiverDirection::kInactive, kStopped,
+ &answer_opts);
+ std::unique_ptr<SessionDescription> answer =
+ f2_.CreateAnswerOrError(offer.get(), answer_opts, nullptr).MoveValue();
+ ASSERT_EQ(2u, answer->contents().size());
+ EXPECT_FALSE(answer->contents()[0].rejected);
+ EXPECT_TRUE(answer->contents()[1].rejected);
+
+ // The TransportInfo of the rejected m= section is expected to be added in the
+ // answer.
+ EXPECT_EQ(offer->transport_infos().size(), answer->transport_infos().size());
+}
+
+// Test the generated media sections has the same order of the
+// corresponding MediaDescriptionOptions.
+TEST_F(MediaSessionDescriptionFactoryTest,
+ CreateOfferRespectsMediaDescriptionOptionsOrder) {
+ MediaSessionOptions opts;
+ // This tests put video section first because normally audio comes first by
+ // default.
+ AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &opts);
+ AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &opts);
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+
+ ASSERT_TRUE(offer);
+ ASSERT_EQ(2u, offer->contents().size());
+ EXPECT_EQ("video", offer->contents()[0].name);
+ EXPECT_EQ("audio", offer->contents()[1].name);
+}
+
+// Test that different media sections using the same codec have same payload
+// type.
+TEST_F(MediaSessionDescriptionFactoryTest,
+ PayloadTypesSharedByMediaSectionsOfSameType) {
+ MediaSessionOptions opts;
+ AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video1",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &opts);
+ AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video2",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &opts);
+ // Create an offer with two video sections using same codecs.
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+ ASSERT_TRUE(offer);
+ ASSERT_EQ(2u, offer->contents().size());
+ const VideoContentDescription* vcd1 =
+ offer->contents()[0].media_description()->as_video();
+ const VideoContentDescription* vcd2 =
+ offer->contents()[1].media_description()->as_video();
+ EXPECT_EQ(vcd1->codecs().size(), vcd2->codecs().size());
+ ASSERT_EQ(2u, vcd1->codecs().size());
+ EXPECT_EQ(vcd1->codecs()[0].name, vcd2->codecs()[0].name);
+ EXPECT_EQ(vcd1->codecs()[0].id, vcd2->codecs()[0].id);
+ EXPECT_EQ(vcd1->codecs()[1].name, vcd2->codecs()[1].name);
+ EXPECT_EQ(vcd1->codecs()[1].id, vcd2->codecs()[1].id);
+
+ // Create answer and negotiate the codecs.
+ std::unique_ptr<SessionDescription> answer =
+ f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue();
+ ASSERT_TRUE(answer);
+ ASSERT_EQ(2u, answer->contents().size());
+ vcd1 = answer->contents()[0].media_description()->as_video();
+ vcd2 = answer->contents()[1].media_description()->as_video();
+ EXPECT_EQ(vcd1->codecs().size(), vcd2->codecs().size());
+ ASSERT_EQ(1u, vcd1->codecs().size());
+ EXPECT_EQ(vcd1->codecs()[0].name, vcd2->codecs()[0].name);
+ EXPECT_EQ(vcd1->codecs()[0].id, vcd2->codecs()[0].id);
+}
+
+// Test verifying that negotiating codecs with the same packetization retains
+// the packetization value.
+TEST_F(MediaSessionDescriptionFactoryTest, PacketizationIsEqual) {
+ std::vector f1_codecs = {cricket::CreateVideoCodec(96, "H264")};
+ f1_codecs.back().packetization = "raw";
+ f1_.set_video_codecs(f1_codecs, f1_codecs);
+
+ std::vector f2_codecs = {cricket::CreateVideoCodec(96, "H264")};
+ f2_codecs.back().packetization = "raw";
+ f2_.set_video_codecs(f2_codecs, f2_codecs);
+
+ MediaSessionOptions opts;
+ AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video1",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &opts);
+
+ // Create an offer with two video sections using same codecs.
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+ ASSERT_TRUE(offer);
+ ASSERT_EQ(1u, offer->contents().size());
+ const VideoContentDescription* vcd1 =
+ offer->contents()[0].media_description()->as_video();
+ ASSERT_EQ(1u, vcd1->codecs().size());
+ EXPECT_EQ(vcd1->codecs()[0].packetization, "raw");
+
+ // Create answer and negotiate the codecs.
+ std::unique_ptr<SessionDescription> answer =
+ f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue();
+ ASSERT_TRUE(answer);
+ ASSERT_EQ(1u, answer->contents().size());
+ vcd1 = answer->contents()[0].media_description()->as_video();
+ ASSERT_EQ(1u, vcd1->codecs().size());
+ EXPECT_EQ(vcd1->codecs()[0].packetization, "raw");
+}
+
+// Test verifying that negotiating codecs with different packetization removes
+// the packetization value.
+TEST_F(MediaSessionDescriptionFactoryTest, PacketizationIsDifferent) {
+ std::vector f1_codecs = {cricket::CreateVideoCodec(96, "H264")};
+ f1_codecs.back().packetization = "raw";
+ f1_.set_video_codecs(f1_codecs, f1_codecs);
+
+ std::vector f2_codecs = {cricket::CreateVideoCodec(96, "H264")};
+ f2_codecs.back().packetization = "notraw";
+ f2_.set_video_codecs(f2_codecs, f2_codecs);
+
+ MediaSessionOptions opts;
+ AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video1",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &opts);
+
+ // Create an offer with two video sections using same codecs.
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+ ASSERT_TRUE(offer);
+ ASSERT_EQ(1u, offer->contents().size());
+ const VideoContentDescription* vcd1 =
+ offer->contents()[0].media_description()->as_video();
+ ASSERT_EQ(1u, vcd1->codecs().size());
+ EXPECT_EQ(vcd1->codecs()[0].packetization, "raw");
+
+ // Create answer and negotiate the codecs.
+ std::unique_ptr<SessionDescription> answer =
+ f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue();
+ ASSERT_TRUE(answer);
+ ASSERT_EQ(1u, answer->contents().size());
+ vcd1 = answer->contents()[0].media_description()->as_video();
+ ASSERT_EQ(1u, vcd1->codecs().size());
+ EXPECT_EQ(vcd1->codecs()[0].packetization, absl::nullopt);
+}
+
+// Test that the codec preference order per media section is respected in
+// subsequent offer.
+TEST_F(MediaSessionDescriptionFactoryTest,
+ CreateOfferRespectsCodecPreferenceOrder) {
+ MediaSessionOptions opts;
+ AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video1",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &opts);
+ AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video2",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &opts);
+ // Create an offer with two video sections using same codecs.
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+ ASSERT_TRUE(offer);
+ ASSERT_EQ(2u, offer->contents().size());
+ VideoContentDescription* vcd1 =
+ offer->contents()[0].media_description()->as_video();
+ const VideoContentDescription* vcd2 =
+ offer->contents()[1].media_description()->as_video();
+ auto video_codecs = MAKE_VECTOR(kVideoCodecs1);
+ EXPECT_EQ(video_codecs, vcd1->codecs());
+ EXPECT_EQ(video_codecs, vcd2->codecs());
+
+ // Change the codec preference of the first video section and create a
+ // follow-up offer.
+ auto video_codecs_reverse = MAKE_VECTOR(kVideoCodecs1Reverse);
+ vcd1->set_codecs(video_codecs_reverse);
+ std::unique_ptr<SessionDescription> updated_offer(
+ f1_.CreateOfferOrError(opts, offer.get()).MoveValue());
+ vcd1 = updated_offer->contents()[0].media_description()->as_video();
+ vcd2 = updated_offer->contents()[1].media_description()->as_video();
+ // The video codec preference order should be respected.
+ EXPECT_EQ(video_codecs_reverse, vcd1->codecs());
+ EXPECT_EQ(video_codecs, vcd2->codecs());
+}
+
+// Test that the codec preference order per media section is respected in
+// the answer.
+TEST_F(MediaSessionDescriptionFactoryTest,
+ CreateAnswerRespectsCodecPreferenceOrder) {
+ MediaSessionOptions opts;
+ AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video1",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &opts);
+ AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video2",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &opts);
+ // Create an offer with two video sections using same codecs.
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+ ASSERT_TRUE(offer);
+ ASSERT_EQ(2u, offer->contents().size());
+ VideoContentDescription* vcd1 =
+ offer->contents()[0].media_description()->as_video();
+ const VideoContentDescription* vcd2 =
+ offer->contents()[1].media_description()->as_video();
+ auto video_codecs = MAKE_VECTOR(kVideoCodecs1);
+ EXPECT_EQ(video_codecs, vcd1->codecs());
+ EXPECT_EQ(video_codecs, vcd2->codecs());
+
+ // Change the codec preference of the first video section and create an
+ // answer.
+ auto video_codecs_reverse = MAKE_VECTOR(kVideoCodecs1Reverse);
+ vcd1->set_codecs(video_codecs_reverse);
+ std::unique_ptr<SessionDescription> answer =
+ f1_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue();
+ vcd1 = answer->contents()[0].media_description()->as_video();
+ vcd2 = answer->contents()[1].media_description()->as_video();
+ // The video codec preference order should be respected.
+ EXPECT_EQ(video_codecs_reverse, vcd1->codecs());
+ EXPECT_EQ(video_codecs, vcd2->codecs());
+}
+
+// Test that when creating an answer, the codecs use local parameters instead of
+// the remote ones.
+TEST_F(MediaSessionDescriptionFactoryTest, CreateAnswerWithLocalCodecParams) {
+ const std::string audio_param_name = "audio_param";
+ const std::string audio_value1 = "audio_v1";
+ const std::string audio_value2 = "audio_v2";
+ const std::string video_param_name = "video_param";
+ const std::string video_value1 = "video_v1";
+ const std::string video_value2 = "video_v2";
+
+ auto audio_codecs1 = MAKE_VECTOR(kAudioCodecs1);
+ auto audio_codecs2 = MAKE_VECTOR(kAudioCodecs1);
+ auto video_codecs1 = MAKE_VECTOR(kVideoCodecs1);
+ auto video_codecs2 = MAKE_VECTOR(kVideoCodecs1);
+
+ // Set the parameters for codecs.
+ audio_codecs1[0].SetParam(audio_param_name, audio_value1);
+ video_codecs1[0].SetParam(video_param_name, video_value1);
+ audio_codecs2[0].SetParam(audio_param_name, audio_value2);
+ video_codecs2[0].SetParam(video_param_name, video_value2);
+
+ f1_.set_audio_codecs(audio_codecs1, audio_codecs1);
+ f1_.set_video_codecs(video_codecs1, video_codecs1);
+ f2_.set_audio_codecs(audio_codecs2, audio_codecs2);
+ f2_.set_video_codecs(video_codecs2, video_codecs2);
+
+ MediaSessionOptions opts;
+ AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &opts);
+ AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &opts);
+
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+ ASSERT_TRUE(offer);
+ auto offer_acd = offer->contents()[0].media_description()->as_audio();
+ auto offer_vcd = offer->contents()[1].media_description()->as_video();
+ std::string value;
+ EXPECT_TRUE(offer_acd->codecs()[0].GetParam(audio_param_name, &value));
+ EXPECT_EQ(audio_value1, value);
+ EXPECT_TRUE(offer_vcd->codecs()[0].GetParam(video_param_name, &value));
+ EXPECT_EQ(video_value1, value);
+
+ std::unique_ptr<SessionDescription> answer =
+ f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue();
+ ASSERT_TRUE(answer);
+ auto answer_acd = answer->contents()[0].media_description()->as_audio();
+ auto answer_vcd = answer->contents()[1].media_description()->as_video();
+ // Use the parameters from the local codecs.
+ EXPECT_TRUE(answer_acd->codecs()[0].GetParam(audio_param_name, &value));
+ EXPECT_EQ(audio_value2, value);
+ EXPECT_TRUE(answer_vcd->codecs()[0].GetParam(video_param_name, &value));
+ EXPECT_EQ(video_value2, value);
+}
+
+// Test that matching packetization-mode is part of the criteria for matching
+// H264 codecs (in addition to profile-level-id). Previously, this was not the
+// case, so the first H264 codec with the same profile-level-id would match and
+// the payload type in the answer would be incorrect.
+// This is a regression test for bugs.webrtc.org/8808
+TEST_F(MediaSessionDescriptionFactoryTest,
+ H264MatchCriteriaIncludesPacketizationMode) {
+ // Create two H264 codecs with the same profile level ID and different
+ // packetization modes.
+ VideoCodec h264_pm0 = cricket::CreateVideoCodec(96, "H264");
+ h264_pm0.params[cricket::kH264FmtpProfileLevelId] = "42c01f";
+ h264_pm0.params[cricket::kH264FmtpPacketizationMode] = "0";
+ VideoCodec h264_pm1 = cricket::CreateVideoCodec(97, "H264");
+ h264_pm1.params[cricket::kH264FmtpProfileLevelId] = "42c01f";
+ h264_pm1.params[cricket::kH264FmtpPacketizationMode] = "1";
+
+ // Offerer will send both codecs, answerer should choose the one with matching
+ // packetization mode (and not the first one it sees).
+ f1_.set_video_codecs({h264_pm0, h264_pm1}, {h264_pm0, h264_pm1});
+ f2_.set_video_codecs({h264_pm1}, {h264_pm1});
+
+ MediaSessionOptions opts;
+ AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video",
+ RtpTransceiverDirection::kSendRecv, kActive,
+ &opts);
+
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+ ASSERT_TRUE(offer);
+
+ std::unique_ptr<SessionDescription> answer =
+ f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue();
+ ASSERT_TRUE(answer);
+
+ // Answer should have one negotiated codec with packetization-mode=1 using the
+ // offered payload type.
+ ASSERT_EQ(1u, answer->contents().size());
+ auto answer_vcd = answer->contents()[0].media_description()->as_video();
+ ASSERT_EQ(1u, answer_vcd->codecs().size());
+ auto answer_codec = answer_vcd->codecs()[0];
+ EXPECT_EQ(h264_pm1.id, answer_codec.id);
+}
+
+class MediaProtocolTest : public ::testing::TestWithParam<const char*> {
+ public:
+ MediaProtocolTest()
+ : tdf1_(field_trials_),
+ tdf2_(field_trials_),
+ f1_(nullptr, false, &ssrc_generator1, &tdf1_),
+ f2_(nullptr, false, &ssrc_generator2, &tdf2_) {
+ f1_.set_audio_codecs(MAKE_VECTOR(kAudioCodecs1),
+ MAKE_VECTOR(kAudioCodecs1));
+ f1_.set_video_codecs(MAKE_VECTOR(kVideoCodecs1),
+ MAKE_VECTOR(kVideoCodecs1));
+ f2_.set_audio_codecs(MAKE_VECTOR(kAudioCodecs2),
+ MAKE_VECTOR(kAudioCodecs2));
+ f2_.set_video_codecs(MAKE_VECTOR(kVideoCodecs2),
+ MAKE_VECTOR(kVideoCodecs2));
+ f1_.set_secure(SEC_ENABLED);
+ f2_.set_secure(SEC_ENABLED);
+ tdf1_.set_certificate(rtc::RTCCertificate::Create(
+ std::unique_ptr<rtc::SSLIdentity>(new rtc::FakeSSLIdentity("id1"))));
+ tdf2_.set_certificate(rtc::RTCCertificate::Create(
+ std::unique_ptr<rtc::SSLIdentity>(new rtc::FakeSSLIdentity("id2"))));
+ tdf1_.set_secure(SEC_ENABLED);
+ tdf2_.set_secure(SEC_ENABLED);
+ }
+
+ protected:
+ webrtc::test::ScopedKeyValueConfig field_trials_;
+ TransportDescriptionFactory tdf1_;
+ TransportDescriptionFactory tdf2_;
+ MediaSessionDescriptionFactory f1_;
+ MediaSessionDescriptionFactory f2_;
+ UniqueRandomIdGenerator ssrc_generator1;
+ UniqueRandomIdGenerator ssrc_generator2;
+};
+
+TEST_P(MediaProtocolTest, TestAudioVideoAcceptance) {
+ MediaSessionOptions opts;
+ AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &opts);
+ std::unique_ptr<SessionDescription> offer =
+ f1_.CreateOfferOrError(opts, nullptr).MoveValue();
+ ASSERT_TRUE(offer.get());
+ // Set the protocol for all the contents.
+ for (auto& content : offer.get()->contents()) {
+ content.media_description()->set_protocol(GetParam());
+ }
+ std::unique_ptr<SessionDescription> answer =
+ f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue();
+ const ContentInfo* ac = answer->GetContentByName("audio");
+ const ContentInfo* vc = answer->GetContentByName("video");
+ ASSERT_TRUE(ac);
+ ASSERT_TRUE(vc);
+ EXPECT_FALSE(ac->rejected); // the offer is accepted
+ EXPECT_FALSE(vc->rejected);
+ const AudioContentDescription* acd = ac->media_description()->as_audio();
+ const VideoContentDescription* vcd = vc->media_description()->as_video();
+ EXPECT_EQ(GetParam(), acd->protocol());
+ EXPECT_EQ(GetParam(), vcd->protocol());
+}
+
+INSTANTIATE_TEST_SUITE_P(MediaProtocolPatternTest,
+ MediaProtocolTest,
+ ::testing::ValuesIn(kMediaProtocols));
+INSTANTIATE_TEST_SUITE_P(MediaProtocolDtlsPatternTest,
+ MediaProtocolTest,
+ ::testing::ValuesIn(kMediaProtocolsDtls));
+
+TEST_F(MediaSessionDescriptionFactoryTest, TestSetAudioCodecs) {
+ webrtc::test::ScopedKeyValueConfig field_trials;
+ TransportDescriptionFactory tdf(field_trials);
+ UniqueRandomIdGenerator ssrc_generator;
+ MediaSessionDescriptionFactory sf(nullptr, false, &ssrc_generator, &tdf);
+ std::vector<AudioCodec> send_codecs = MAKE_VECTOR(kAudioCodecs1);
+ std::vector<AudioCodec> recv_codecs = MAKE_VECTOR(kAudioCodecs2);
+
+ // The merged list of codecs should contain any send codecs that are also
+ // nominally in the receive codecs list. Payload types should be picked from
+ // the send codecs and a number-of-channels of 0 and 1 should be equivalent
+ // (set to 1). This equals what happens when the send codecs are used in an
+ // offer and the receive codecs are used in the following answer.
+ const std::vector<AudioCodec> sendrecv_codecs =
+ MAKE_VECTOR(kAudioCodecsAnswer);
+ const std::vector<AudioCodec> no_codecs;
+
+ RTC_CHECK_EQ(send_codecs[2].name, "iLBC")
+ << "Please don't change shared test data!";
+ RTC_CHECK_EQ(recv_codecs[2].name, "iLBC")
+ << "Please don't change shared test data!";
+ // Alter iLBC send codec to have zero channels, to test that that is handled
+ // properly.
+ send_codecs[2].channels = 0;
+
+ // Alter iLBC receive codec to be lowercase, to test that case conversions
+ // are handled properly.
+ recv_codecs[2].name = "ilbc";
+
+ // Test proper merge
+ sf.set_audio_codecs(send_codecs, recv_codecs);
+ EXPECT_EQ(send_codecs, sf.audio_send_codecs());
+ EXPECT_EQ(recv_codecs, sf.audio_recv_codecs());
+ EXPECT_EQ(sendrecv_codecs, sf.audio_sendrecv_codecs());
+
+ // Test empty send codecs list
+ sf.set_audio_codecs(no_codecs, recv_codecs);
+ EXPECT_EQ(no_codecs, sf.audio_send_codecs());
+ EXPECT_EQ(recv_codecs, sf.audio_recv_codecs());
+ EXPECT_EQ(no_codecs, sf.audio_sendrecv_codecs());
+
+ // Test empty recv codecs list
+ sf.set_audio_codecs(send_codecs, no_codecs);
+ EXPECT_EQ(send_codecs, sf.audio_send_codecs());
+ EXPECT_EQ(no_codecs, sf.audio_recv_codecs());
+ EXPECT_EQ(no_codecs, sf.audio_sendrecv_codecs());
+
+ // Test all empty codec lists
+ sf.set_audio_codecs(no_codecs, no_codecs);
+ EXPECT_EQ(no_codecs, sf.audio_send_codecs());
+ EXPECT_EQ(no_codecs, sf.audio_recv_codecs());
+ EXPECT_EQ(no_codecs, sf.audio_sendrecv_codecs());
+}
+
+namespace {
+// Compare the two vectors of codecs ignoring the payload type.
+bool CodecsMatch(const std::vector<cricket::Codec>& codecs1,
+ const std::vector<cricket::Codec>& codecs2,
+ const webrtc::FieldTrialsView* field_trials) {
+ if (codecs1.size() != codecs2.size()) {
+ return false;
+ }
+
+ for (size_t i = 0; i < codecs1.size(); ++i) {
+ if (!codecs1[i].Matches(codecs2[i], field_trials)) {
+ return false;
+ }
+ }
+ return true;
+}
+
+void TestAudioCodecsOffer(RtpTransceiverDirection direction) {
+ webrtc::test::ScopedKeyValueConfig field_trials;
+ TransportDescriptionFactory tdf(field_trials);
+ UniqueRandomIdGenerator ssrc_generator;
+ MediaSessionDescriptionFactory sf(nullptr, false, &ssrc_generator, &tdf);
+ const std::vector<AudioCodec> send_codecs = MAKE_VECTOR(kAudioCodecs1);
+ const std::vector<AudioCodec> recv_codecs = MAKE_VECTOR(kAudioCodecs2);
+ const std::vector<AudioCodec> sendrecv_codecs =
+ MAKE_VECTOR(kAudioCodecsAnswer);
+ sf.set_audio_codecs(send_codecs, recv_codecs);
+
+ MediaSessionOptions opts;
+ AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio", direction, kActive,
+ &opts);
+
+ if (direction == RtpTransceiverDirection::kSendRecv ||
+ direction == RtpTransceiverDirection::kSendOnly) {
+ AttachSenderToMediaDescriptionOptions(
+ "audio", MEDIA_TYPE_AUDIO, kAudioTrack1, {kMediaStream1}, 1, &opts);
+ }
+
+ std::unique_ptr<SessionDescription> offer =
+ sf.CreateOfferOrError(opts, nullptr).MoveValue();
+ ASSERT_TRUE(offer.get());
+ ContentInfo* ac = offer->GetContentByName("audio");
+
+ // If the factory didn't add any audio content to the offer, we cannot check
+ // that the codecs put in are right. This happens when we neither want to
+ // send nor receive audio. The checks are still in place if at some point
+ // we'd instead create an inactive stream.
+ if (ac) {
+ AudioContentDescription* acd = ac->media_description()->as_audio();
+ // sendrecv and inactive should both present lists as if the channel was
+ // to be used for sending and receiving. Inactive essentially means it
+ // might eventually be used anything, but we don't know more at this
+ // moment.
+ if (acd->direction() == RtpTransceiverDirection::kSendOnly) {
+ EXPECT_TRUE(CodecsMatch(send_codecs, acd->codecs(), &field_trials));
+ } else if (acd->direction() == RtpTransceiverDirection::kRecvOnly) {
+ EXPECT_TRUE(CodecsMatch(recv_codecs, acd->codecs(), &field_trials));
+ } else {
+ EXPECT_TRUE(CodecsMatch(sendrecv_codecs, acd->codecs(), &field_trials));
+ }
+ }
+}
+
+static const AudioCodec kOfferAnswerCodecs[] = {
+ cricket::CreateAudioCodec(0, "codec0", 16000, 1),
+ cricket::CreateAudioCodec(1, "codec1", 8000, 1),
+ cricket::CreateAudioCodec(2, "codec2", 8000, 1),
+ cricket::CreateAudioCodec(3, "codec3", 8000, 1),
+ cricket::CreateAudioCodec(4, "codec4", 8000, 2),
+ cricket::CreateAudioCodec(5, "codec5", 32000, 1),
+ cricket::CreateAudioCodec(6, "codec6", 48000, 1)};
+
+/* The codecs groups below are chosen as per the matrix below. The objective
+ * is to have different sets of codecs in the inputs, to get unique sets of
+ * codecs after negotiation, depending on offer and answer communication
+ * directions. One-way directions in the offer should either result in the
+ * opposite direction in the answer, or an inactive answer. Regardless, the
+ * choice of codecs should be as if the answer contained the opposite
+ * direction. Inactive offers should be treated as sendrecv/sendrecv.
+ *
+ * | Offer | Answer | Result
+ * codec|send recv sr | send recv sr | s/r r/s sr/s sr/r sr/sr
+ * 0 | x - - | - x - | x - - - -
+ * 1 | x x x | - x - | x - - x -
+ * 2 | - x - | x - - | - x - - -
+ * 3 | x x x | x - - | - x x - -
+ * 4 | - x - | x x x | - x - - -
+ * 5 | x - - | x x x | x - - - -
+ * 6 | x x x | x x x | x x x x x
+ */
+// Codecs used by offerer in the AudioCodecsAnswerTest
+static const int kOfferSendCodecs[] = {0, 1, 3, 5, 6};
+static const int kOfferRecvCodecs[] = {1, 2, 3, 4, 6};
+// Codecs used in the answerer in the AudioCodecsAnswerTest. The order is
+// jumbled to catch the answer not following the order in the offer.
+static const int kAnswerSendCodecs[] = {6, 5, 2, 3, 4};
+static const int kAnswerRecvCodecs[] = {6, 5, 4, 1, 0};
+// The resulting sets of codecs in the answer in the AudioCodecsAnswerTest
+static const int kResultSend_RecvCodecs[] = {0, 1, 5, 6};
+static const int kResultRecv_SendCodecs[] = {2, 3, 4, 6};
+static const int kResultSendrecv_SendCodecs[] = {3, 6};
+static const int kResultSendrecv_RecvCodecs[] = {1, 6};
+static const int kResultSendrecv_SendrecvCodecs[] = {6};
+
+template <typename T, int IDXS>
+std::vector<T> VectorFromIndices(const T* array, const int (&indices)[IDXS]) {
+ std::vector<T> out;
+ out.reserve(IDXS);
+ for (int idx : indices)
+ out.push_back(array[idx]);
+
+ return out;
+}
+
+void TestAudioCodecsAnswer(RtpTransceiverDirection offer_direction,
+ RtpTransceiverDirection answer_direction,
+ bool add_legacy_stream) {
+ webrtc::test::ScopedKeyValueConfig field_trials;
+ TransportDescriptionFactory offer_tdf(field_trials);
+ TransportDescriptionFactory answer_tdf(field_trials);
+ UniqueRandomIdGenerator ssrc_generator1, ssrc_generator2;
+ MediaSessionDescriptionFactory offer_factory(nullptr, false, &ssrc_generator1,
+ &offer_tdf);
+ MediaSessionDescriptionFactory answer_factory(nullptr, false,
+ &ssrc_generator2, &answer_tdf);
+
+ offer_factory.set_audio_codecs(
+ VectorFromIndices(kOfferAnswerCodecs, kOfferSendCodecs),
+ VectorFromIndices(kOfferAnswerCodecs, kOfferRecvCodecs));
+ answer_factory.set_audio_codecs(
+ VectorFromIndices(kOfferAnswerCodecs, kAnswerSendCodecs),
+ VectorFromIndices(kOfferAnswerCodecs, kAnswerRecvCodecs));
+
+ MediaSessionOptions offer_opts;
+ AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio", offer_direction,
+ kActive, &offer_opts);
+
+ if (webrtc::RtpTransceiverDirectionHasSend(offer_direction)) {
+ AttachSenderToMediaDescriptionOptions("audio", MEDIA_TYPE_AUDIO,
+ kAudioTrack1, {kMediaStream1}, 1,
+ &offer_opts);
+ }
+
+ std::unique_ptr<SessionDescription> offer =
+ offer_factory.CreateOfferOrError(offer_opts, nullptr).MoveValue();
+ ASSERT_TRUE(offer.get());
+
+ MediaSessionOptions answer_opts;
+ AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio", answer_direction,
+ kActive, &answer_opts);
+
+ if (webrtc::RtpTransceiverDirectionHasSend(answer_direction)) {
+ AttachSenderToMediaDescriptionOptions("audio", MEDIA_TYPE_AUDIO,
+ kAudioTrack1, {kMediaStream1}, 1,
+ &answer_opts);
+ }
+ std::unique_ptr<SessionDescription> answer =
+ answer_factory.CreateAnswerOrError(offer.get(), answer_opts, nullptr)
+ .MoveValue();
+ const ContentInfo* ac = answer->GetContentByName("audio");
+
+ // If the factory didn't add any audio content to the answer, we cannot
+ // check that the codecs put in are right. This happens when we neither want
+ // to send nor receive audio. The checks are still in place if at some point
+ // we'd instead create an inactive stream.
+ if (ac) {
+ ASSERT_EQ(MEDIA_TYPE_AUDIO, ac->media_description()->type());
+ const AudioContentDescription* acd = ac->media_description()->as_audio();
+
+ std::vector<AudioCodec> target_codecs;
+ // For offers with sendrecv or inactive, we should never reply with more
+ // codecs than offered, with these codec sets.
+ switch (offer_direction) {
+ case RtpTransceiverDirection::kInactive:
+ target_codecs = VectorFromIndices(kOfferAnswerCodecs,
+ kResultSendrecv_SendrecvCodecs);
+ break;
+ case RtpTransceiverDirection::kSendOnly:
+ target_codecs =
+ VectorFromIndices(kOfferAnswerCodecs, kResultSend_RecvCodecs);
+ break;
+ case RtpTransceiverDirection::kRecvOnly:
+ target_codecs =
+ VectorFromIndices(kOfferAnswerCodecs, kResultRecv_SendCodecs);
+ break;
+ case RtpTransceiverDirection::kSendRecv:
+ if (acd->direction() == RtpTransceiverDirection::kSendOnly) {
+ target_codecs =
+ VectorFromIndices(kOfferAnswerCodecs, kResultSendrecv_SendCodecs);
+ } else if (acd->direction() == RtpTransceiverDirection::kRecvOnly) {
+ target_codecs =
+ VectorFromIndices(kOfferAnswerCodecs, kResultSendrecv_RecvCodecs);
+ } else {
+ target_codecs = VectorFromIndices(kOfferAnswerCodecs,
+ kResultSendrecv_SendrecvCodecs);
+ }
+ break;
+ case RtpTransceiverDirection::kStopped:
+ // This does not happen in any current test.
+ RTC_DCHECK_NOTREACHED();
+ }
+
+ auto format_codecs = [](const std::vector<AudioCodec>& codecs) {
+ rtc::StringBuilder os;
+ bool first = true;
+ os << "{";
+ for (const auto& c : codecs) {
+ os << (first ? " " : ", ") << c.id;
+ first = false;
+ }
+ os << " }";
+ return os.Release();
+ };
+
+ EXPECT_TRUE(acd->codecs() == target_codecs)
+ << "Expected: " << format_codecs(target_codecs)
+ << ", got: " << format_codecs(acd->codecs()) << "; Offered: "
+ << webrtc::RtpTransceiverDirectionToString(offer_direction)
+ << ", answerer wants: "
+ << webrtc::RtpTransceiverDirectionToString(answer_direction)
+ << "; got: "
+ << webrtc::RtpTransceiverDirectionToString(acd->direction());
+ } else {
+ EXPECT_EQ(offer_direction, RtpTransceiverDirection::kInactive)
+ << "Only inactive offers are allowed to not generate any audio "
+ "content";
+ }
+}
+
+} // namespace
+
+class AudioCodecsOfferTest
+ : public ::testing::TestWithParam<RtpTransceiverDirection> {};
+
+TEST_P(AudioCodecsOfferTest, TestCodecsInOffer) {
+ TestAudioCodecsOffer(GetParam());
+}
+
+INSTANTIATE_TEST_SUITE_P(MediaSessionDescriptionFactoryTest,
+ AudioCodecsOfferTest,
+ ::testing::Values(RtpTransceiverDirection::kSendOnly,
+ RtpTransceiverDirection::kRecvOnly,
+ RtpTransceiverDirection::kSendRecv,
+ RtpTransceiverDirection::kInactive));
+
+class AudioCodecsAnswerTest
+ : public ::testing::TestWithParam<::testing::tuple<RtpTransceiverDirection,
+ RtpTransceiverDirection,
+ bool>> {};
+
+TEST_P(AudioCodecsAnswerTest, TestCodecsInAnswer) {
+ TestAudioCodecsAnswer(::testing::get<0>(GetParam()),
+ ::testing::get<1>(GetParam()),
+ ::testing::get<2>(GetParam()));
+}
+
+INSTANTIATE_TEST_SUITE_P(
+ MediaSessionDescriptionFactoryTest,
+ AudioCodecsAnswerTest,
+ ::testing::Combine(::testing::Values(RtpTransceiverDirection::kSendOnly,
+ RtpTransceiverDirection::kRecvOnly,
+ RtpTransceiverDirection::kSendRecv,
+ RtpTransceiverDirection::kInactive),
+ ::testing::Values(RtpTransceiverDirection::kSendOnly,
+ RtpTransceiverDirection::kRecvOnly,
+ RtpTransceiverDirection::kSendRecv,
+ RtpTransceiverDirection::kInactive),
+ ::testing::Bool()));
diff --git a/third_party/libwebrtc/pc/media_stream.cc b/third_party/libwebrtc/pc/media_stream.cc
new file mode 100644
index 0000000000..57be76c6c8
--- /dev/null
+++ b/third_party/libwebrtc/pc/media_stream.cc
@@ -0,0 +1,96 @@
+/*
+ * Copyright 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/media_stream.h"
+
+#include <stddef.h>
+
+#include <utility>
+
+#include "rtc_base/checks.h"
+
+namespace webrtc {
+
+template <class V>
+static typename V::iterator FindTrack(V* vector, const std::string& track_id) {
+ typename V::iterator it = vector->begin();
+ for (; it != vector->end(); ++it) {
+ if ((*it)->id() == track_id) {
+ break;
+ }
+ }
+ return it;
+}
+
+rtc::scoped_refptr<MediaStream> MediaStream::Create(const std::string& id) {
+ return rtc::make_ref_counted<MediaStream>(id);
+}
+
+MediaStream::MediaStream(const std::string& id) : id_(id) {}
+
+bool MediaStream::AddTrack(rtc::scoped_refptr<AudioTrackInterface> track) {
+ return AddTrack<AudioTrackVector, AudioTrackInterface>(&audio_tracks_, track);
+}
+
+bool MediaStream::AddTrack(rtc::scoped_refptr<VideoTrackInterface> track) {
+ return AddTrack<VideoTrackVector, VideoTrackInterface>(&video_tracks_, track);
+}
+
+bool MediaStream::RemoveTrack(rtc::scoped_refptr<AudioTrackInterface> track) {
+ return RemoveTrack<AudioTrackVector>(&audio_tracks_, track);
+}
+
+bool MediaStream::RemoveTrack(rtc::scoped_refptr<VideoTrackInterface> track) {
+ return RemoveTrack<VideoTrackVector>(&video_tracks_, track);
+}
+
+rtc::scoped_refptr<AudioTrackInterface> MediaStream::FindAudioTrack(
+ const std::string& track_id) {
+ AudioTrackVector::iterator it = FindTrack(&audio_tracks_, track_id);
+ if (it == audio_tracks_.end())
+ return nullptr;
+ return *it;
+}
+
+rtc::scoped_refptr<VideoTrackInterface> MediaStream::FindVideoTrack(
+ const std::string& track_id) {
+ VideoTrackVector::iterator it = FindTrack(&video_tracks_, track_id);
+ if (it == video_tracks_.end())
+ return nullptr;
+ return *it;
+}
+
+template <typename TrackVector, typename Track>
+bool MediaStream::AddTrack(TrackVector* tracks,
+ rtc::scoped_refptr<Track> track) {
+ typename TrackVector::iterator it = FindTrack(tracks, track->id());
+ if (it != tracks->end())
+ return false;
+ tracks->emplace_back(std::move((track)));
+ FireOnChanged();
+ return true;
+}
+
+template <typename TrackVector>
+bool MediaStream::RemoveTrack(
+ TrackVector* tracks,
+ rtc::scoped_refptr<MediaStreamTrackInterface> track) {
+ RTC_DCHECK(tracks != NULL);
+ if (!track)
+ return false;
+ typename TrackVector::iterator it = FindTrack(tracks, track->id());
+ if (it == tracks->end())
+ return false;
+ tracks->erase(it);
+ FireOnChanged();
+ return true;
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/media_stream.h b/third_party/libwebrtc/pc/media_stream.h
new file mode 100644
index 0000000000..c033cf6f35
--- /dev/null
+++ b/third_party/libwebrtc/pc/media_stream.h
@@ -0,0 +1,59 @@
+/*
+ * Copyright 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This file contains the implementation of MediaStreamInterface interface.
+
+#ifndef PC_MEDIA_STREAM_H_
+#define PC_MEDIA_STREAM_H_
+
+#include <string>
+
+#include "api/media_stream_interface.h"
+#include "api/notifier.h"
+#include "api/scoped_refptr.h"
+
+namespace webrtc {
+
+class MediaStream : public Notifier<MediaStreamInterface> {
+ public:
+ static rtc::scoped_refptr<MediaStream> Create(const std::string& id);
+
+ std::string id() const override { return id_; }
+
+ bool AddTrack(rtc::scoped_refptr<AudioTrackInterface> track) override;
+ bool AddTrack(rtc::scoped_refptr<VideoTrackInterface> track) override;
+ bool RemoveTrack(rtc::scoped_refptr<AudioTrackInterface> track) override;
+ bool RemoveTrack(rtc::scoped_refptr<VideoTrackInterface> track) override;
+ rtc::scoped_refptr<AudioTrackInterface> FindAudioTrack(
+ const std::string& track_id) override;
+ rtc::scoped_refptr<VideoTrackInterface> FindVideoTrack(
+ const std::string& track_id) override;
+
+ AudioTrackVector GetAudioTracks() override { return audio_tracks_; }
+ VideoTrackVector GetVideoTracks() override { return video_tracks_; }
+
+ protected:
+ explicit MediaStream(const std::string& id);
+
+ private:
+ template <typename TrackVector, typename Track>
+ bool AddTrack(TrackVector* Tracks, rtc::scoped_refptr<Track> track);
+ template <typename TrackVector>
+ bool RemoveTrack(TrackVector* Tracks,
+ rtc::scoped_refptr<MediaStreamTrackInterface> track);
+
+ const std::string id_;
+ AudioTrackVector audio_tracks_;
+ VideoTrackVector video_tracks_;
+};
+
+} // namespace webrtc
+
+#endif // PC_MEDIA_STREAM_H_
diff --git a/third_party/libwebrtc/pc/media_stream_observer.cc b/third_party/libwebrtc/pc/media_stream_observer.cc
new file mode 100644
index 0000000000..6264a7657a
--- /dev/null
+++ b/third_party/libwebrtc/pc/media_stream_observer.cc
@@ -0,0 +1,98 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/media_stream_observer.h"
+
+#include <functional>
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "absl/algorithm/container.h"
+
+namespace webrtc {
+
+MediaStreamObserver::MediaStreamObserver(
+ MediaStreamInterface* stream,
+ std::function<void(AudioTrackInterface*, MediaStreamInterface*)>
+ audio_track_added_callback,
+ std::function<void(AudioTrackInterface*, MediaStreamInterface*)>
+ audio_track_removed_callback,
+ std::function<void(VideoTrackInterface*, MediaStreamInterface*)>
+ video_track_added_callback,
+ std::function<void(VideoTrackInterface*, MediaStreamInterface*)>
+ video_track_removed_callback)
+ : stream_(stream),
+ cached_audio_tracks_(stream->GetAudioTracks()),
+ cached_video_tracks_(stream->GetVideoTracks()),
+ audio_track_added_callback_(std::move(audio_track_added_callback)),
+ audio_track_removed_callback_(std::move(audio_track_removed_callback)),
+ video_track_added_callback_(std::move(video_track_added_callback)),
+ video_track_removed_callback_(std::move(video_track_removed_callback)) {
+ stream_->RegisterObserver(this);
+}
+
+MediaStreamObserver::~MediaStreamObserver() {
+ stream_->UnregisterObserver(this);
+}
+
+void MediaStreamObserver::OnChanged() {
+ AudioTrackVector new_audio_tracks = stream_->GetAudioTracks();
+ VideoTrackVector new_video_tracks = stream_->GetVideoTracks();
+
+ // Find removed audio tracks.
+ for (const auto& cached_track : cached_audio_tracks_) {
+ if (absl::c_none_of(
+ new_audio_tracks,
+ [cached_track](const AudioTrackVector::value_type& new_track) {
+ return new_track->id() == cached_track->id();
+ })) {
+ audio_track_removed_callback_(cached_track.get(), stream_.get());
+ }
+ }
+
+ // Find added audio tracks.
+ for (const auto& new_track : new_audio_tracks) {
+ if (absl::c_none_of(
+ cached_audio_tracks_,
+ [new_track](const AudioTrackVector::value_type& cached_track) {
+ return new_track->id() == cached_track->id();
+ })) {
+ audio_track_added_callback_(new_track.get(), stream_.get());
+ }
+ }
+
+ // Find removed video tracks.
+ for (const auto& cached_track : cached_video_tracks_) {
+ if (absl::c_none_of(
+ new_video_tracks,
+ [cached_track](const VideoTrackVector::value_type& new_track) {
+ return new_track->id() == cached_track->id();
+ })) {
+ video_track_removed_callback_(cached_track.get(), stream_.get());
+ }
+ }
+
+ // Find added video tracks.
+ for (const auto& new_track : new_video_tracks) {
+ if (absl::c_none_of(
+ cached_video_tracks_,
+ [new_track](const VideoTrackVector::value_type& cached_track) {
+ return new_track->id() == cached_track->id();
+ })) {
+ video_track_added_callback_(new_track.get(), stream_.get());
+ }
+ }
+
+ cached_audio_tracks_ = new_audio_tracks;
+ cached_video_tracks_ = new_video_tracks;
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/media_stream_observer.h b/third_party/libwebrtc/pc/media_stream_observer.h
new file mode 100644
index 0000000000..83bbd20994
--- /dev/null
+++ b/third_party/libwebrtc/pc/media_stream_observer.h
@@ -0,0 +1,57 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_MEDIA_STREAM_OBSERVER_H_
+#define PC_MEDIA_STREAM_OBSERVER_H_
+
+#include <functional>
+
+#include "api/media_stream_interface.h"
+#include "api/scoped_refptr.h"
+
+namespace webrtc {
+
+// Helper class which will listen for changes to a stream and emit the
+// corresponding signals.
+class MediaStreamObserver : public ObserverInterface {
+ public:
+ explicit MediaStreamObserver(
+ MediaStreamInterface* stream,
+ std::function<void(AudioTrackInterface*, MediaStreamInterface*)>
+ audio_track_added_callback,
+ std::function<void(AudioTrackInterface*, MediaStreamInterface*)>
+ audio_track_removed_callback,
+ std::function<void(VideoTrackInterface*, MediaStreamInterface*)>
+ video_track_added_callback,
+ std::function<void(VideoTrackInterface*, MediaStreamInterface*)>
+ video_track_removed_callback);
+ ~MediaStreamObserver() override;
+
+ const MediaStreamInterface* stream() const { return stream_.get(); }
+
+ void OnChanged() override;
+
+ private:
+ rtc::scoped_refptr<MediaStreamInterface> stream_;
+ AudioTrackVector cached_audio_tracks_;
+ VideoTrackVector cached_video_tracks_;
+ const std::function<void(AudioTrackInterface*, MediaStreamInterface*)>
+ audio_track_added_callback_;
+ const std::function<void(AudioTrackInterface*, MediaStreamInterface*)>
+ audio_track_removed_callback_;
+ const std::function<void(VideoTrackInterface*, MediaStreamInterface*)>
+ video_track_added_callback_;
+ const std::function<void(VideoTrackInterface*, MediaStreamInterface*)>
+ video_track_removed_callback_;
+};
+
+} // namespace webrtc
+
+#endif // PC_MEDIA_STREAM_OBSERVER_H_
diff --git a/third_party/libwebrtc/pc/media_stream_proxy.h b/third_party/libwebrtc/pc/media_stream_proxy.h
new file mode 100644
index 0000000000..3e263bfd8b
--- /dev/null
+++ b/third_party/libwebrtc/pc/media_stream_proxy.h
@@ -0,0 +1,44 @@
+/*
+ * Copyright 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_MEDIA_STREAM_PROXY_H_
+#define PC_MEDIA_STREAM_PROXY_H_
+
+#include <string>
+
+#include "api/media_stream_interface.h"
+#include "pc/proxy.h"
+
+namespace webrtc {
+
+// TODO(deadbeef): Move this to a .cc file. What threads methods are called on
+// is an implementation detail.
+BEGIN_PRIMARY_PROXY_MAP(MediaStream)
+PROXY_PRIMARY_THREAD_DESTRUCTOR()
+BYPASS_PROXY_CONSTMETHOD0(std::string, id)
+PROXY_METHOD0(AudioTrackVector, GetAudioTracks)
+PROXY_METHOD0(VideoTrackVector, GetVideoTracks)
+PROXY_METHOD1(rtc::scoped_refptr<AudioTrackInterface>,
+ FindAudioTrack,
+ const std::string&)
+PROXY_METHOD1(rtc::scoped_refptr<VideoTrackInterface>,
+ FindVideoTrack,
+ const std::string&)
+PROXY_METHOD1(bool, AddTrack, rtc::scoped_refptr<AudioTrackInterface>)
+PROXY_METHOD1(bool, AddTrack, rtc::scoped_refptr<VideoTrackInterface>)
+PROXY_METHOD1(bool, RemoveTrack, rtc::scoped_refptr<AudioTrackInterface>)
+PROXY_METHOD1(bool, RemoveTrack, rtc::scoped_refptr<VideoTrackInterface>)
+PROXY_METHOD1(void, RegisterObserver, ObserverInterface*)
+PROXY_METHOD1(void, UnregisterObserver, ObserverInterface*)
+END_PROXY_MAP(MediaStream)
+
+} // namespace webrtc
+
+#endif // PC_MEDIA_STREAM_PROXY_H_
diff --git a/third_party/libwebrtc/pc/media_stream_track_proxy.h b/third_party/libwebrtc/pc/media_stream_track_proxy.h
new file mode 100644
index 0000000000..2af3aedb22
--- /dev/null
+++ b/third_party/libwebrtc/pc/media_stream_track_proxy.h
@@ -0,0 +1,65 @@
+/*
+ * Copyright 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This file includes proxy classes for tracks. The purpose is
+// to make sure tracks are only accessed from the signaling thread.
+
+#ifndef PC_MEDIA_STREAM_TRACK_PROXY_H_
+#define PC_MEDIA_STREAM_TRACK_PROXY_H_
+
+#include <string>
+
+#include "api/media_stream_interface.h"
+#include "pc/proxy.h"
+
+namespace webrtc {
+
+// TODO(deadbeef): Move this to .cc file. What threads methods are called on is
+// an implementation detail.
+BEGIN_PRIMARY_PROXY_MAP(AudioTrack)
+PROXY_PRIMARY_THREAD_DESTRUCTOR()
+BYPASS_PROXY_CONSTMETHOD0(std::string, kind)
+BYPASS_PROXY_CONSTMETHOD0(std::string, id)
+PROXY_CONSTMETHOD0(TrackState, state)
+PROXY_CONSTMETHOD0(bool, enabled)
+BYPASS_PROXY_CONSTMETHOD0(AudioSourceInterface*, GetSource)
+PROXY_METHOD1(void, AddSink, AudioTrackSinkInterface*)
+PROXY_METHOD1(void, RemoveSink, AudioTrackSinkInterface*)
+PROXY_METHOD1(bool, GetSignalLevel, int*)
+PROXY_METHOD0(rtc::scoped_refptr<AudioProcessorInterface>, GetAudioProcessor)
+PROXY_METHOD1(bool, set_enabled, bool)
+PROXY_METHOD1(void, RegisterObserver, ObserverInterface*)
+PROXY_METHOD1(void, UnregisterObserver, ObserverInterface*)
+END_PROXY_MAP(AudioTrack)
+
+BEGIN_PROXY_MAP(VideoTrack)
+PROXY_PRIMARY_THREAD_DESTRUCTOR()
+BYPASS_PROXY_CONSTMETHOD0(std::string, kind)
+BYPASS_PROXY_CONSTMETHOD0(std::string, id)
+PROXY_SECONDARY_CONSTMETHOD0(TrackState, state)
+PROXY_CONSTMETHOD0(bool, enabled)
+PROXY_METHOD1(bool, set_enabled, bool)
+PROXY_CONSTMETHOD0(ContentHint, content_hint)
+PROXY_METHOD1(void, set_content_hint, ContentHint)
+PROXY_SECONDARY_METHOD2(void,
+ AddOrUpdateSink,
+ rtc::VideoSinkInterface<VideoFrame>*,
+ const rtc::VideoSinkWants&)
+PROXY_SECONDARY_METHOD1(void, RemoveSink, rtc::VideoSinkInterface<VideoFrame>*)
+PROXY_SECONDARY_METHOD0(void, RequestRefreshFrame)
+BYPASS_PROXY_CONSTMETHOD0(VideoTrackSourceInterface*, GetSource)
+
+PROXY_METHOD1(void, RegisterObserver, ObserverInterface*)
+PROXY_METHOD1(void, UnregisterObserver, ObserverInterface*)
+END_PROXY_MAP(VideoTrack)
+
+} // namespace webrtc
+
+#endif // PC_MEDIA_STREAM_TRACK_PROXY_H_
diff --git a/third_party/libwebrtc/pc/media_stream_unittest.cc b/third_party/libwebrtc/pc/media_stream_unittest.cc
new file mode 100644
index 0000000000..f55ea203fb
--- /dev/null
+++ b/third_party/libwebrtc/pc/media_stream_unittest.cc
@@ -0,0 +1,153 @@
+/*
+ * Copyright 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/media_stream.h"
+
+#include <stddef.h>
+
+#include "pc/audio_track.h"
+#include "pc/test/fake_video_track_source.h"
+#include "pc/video_track.h"
+#include "rtc_base/thread.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+
+static const char kStreamId1[] = "local_stream_1";
+static const char kVideoTrackId[] = "dummy_video_cam_1";
+static const char kAudioTrackId[] = "dummy_microphone_1";
+
+using rtc::scoped_refptr;
+using ::testing::Exactly;
+
+namespace webrtc {
+
+// Helper class to test Observer.
+class MockObserver : public ObserverInterface {
+ public:
+ explicit MockObserver(NotifierInterface* notifier) : notifier_(notifier) {
+ notifier_->RegisterObserver(this);
+ }
+
+ ~MockObserver() { Unregister(); }
+
+ void Unregister() {
+ if (notifier_) {
+ notifier_->UnregisterObserver(this);
+ notifier_ = nullptr;
+ }
+ }
+
+ MOCK_METHOD(void, OnChanged, (), (override));
+
+ private:
+ NotifierInterface* notifier_;
+};
+
+class MediaStreamTest : public ::testing::Test {
+ protected:
+ virtual void SetUp() {
+ stream_ = MediaStream::Create(kStreamId1);
+ ASSERT_TRUE(stream_.get() != NULL);
+
+ video_track_ = VideoTrack::Create(
+ kVideoTrackId, FakeVideoTrackSource::Create(), rtc::Thread::Current());
+ ASSERT_TRUE(video_track_.get() != NULL);
+ EXPECT_EQ(MediaStreamTrackInterface::kLive, video_track_->state());
+
+ audio_track_ = AudioTrack::Create(kAudioTrackId, nullptr);
+
+ ASSERT_TRUE(audio_track_.get() != NULL);
+ EXPECT_EQ(MediaStreamTrackInterface::kLive, audio_track_->state());
+
+ EXPECT_TRUE(stream_->AddTrack(video_track_));
+ EXPECT_FALSE(stream_->AddTrack(video_track_));
+ EXPECT_TRUE(stream_->AddTrack(audio_track_));
+ EXPECT_FALSE(stream_->AddTrack(audio_track_));
+ }
+
+ void ChangeTrack(MediaStreamTrackInterface* track) {
+ MockObserver observer(track);
+
+ EXPECT_CALL(observer, OnChanged()).Times(Exactly(1));
+ track->set_enabled(false);
+ EXPECT_FALSE(track->enabled());
+ }
+
+ rtc::AutoThread main_thread_;
+ scoped_refptr<MediaStreamInterface> stream_;
+ scoped_refptr<AudioTrackInterface> audio_track_;
+ scoped_refptr<VideoTrackInterface> video_track_;
+};
+
+TEST_F(MediaStreamTest, GetTrackInfo) {
+ ASSERT_EQ(1u, stream_->GetVideoTracks().size());
+ ASSERT_EQ(1u, stream_->GetAudioTracks().size());
+
+ // Verify the video track.
+ scoped_refptr<webrtc::MediaStreamTrackInterface> video_track(
+ stream_->GetVideoTracks()[0]);
+ EXPECT_EQ(0, video_track->id().compare(kVideoTrackId));
+ EXPECT_TRUE(video_track->enabled());
+
+ ASSERT_EQ(1u, stream_->GetVideoTracks().size());
+ EXPECT_TRUE(stream_->GetVideoTracks()[0].get() == video_track.get());
+ EXPECT_TRUE(stream_->FindVideoTrack(video_track->id()).get() ==
+ video_track.get());
+ video_track = stream_->GetVideoTracks()[0];
+ EXPECT_EQ(0, video_track->id().compare(kVideoTrackId));
+ EXPECT_TRUE(video_track->enabled());
+
+ // Verify the audio track.
+ scoped_refptr<webrtc::MediaStreamTrackInterface> audio_track(
+ stream_->GetAudioTracks()[0]);
+ EXPECT_EQ(0, audio_track->id().compare(kAudioTrackId));
+ EXPECT_TRUE(audio_track->enabled());
+ ASSERT_EQ(1u, stream_->GetAudioTracks().size());
+ EXPECT_TRUE(stream_->GetAudioTracks()[0].get() == audio_track.get());
+ EXPECT_TRUE(stream_->FindAudioTrack(audio_track->id()).get() ==
+ audio_track.get());
+ audio_track = stream_->GetAudioTracks()[0];
+ EXPECT_EQ(0, audio_track->id().compare(kAudioTrackId));
+ EXPECT_TRUE(audio_track->enabled());
+}
+
+TEST_F(MediaStreamTest, RemoveTrack) {
+ MockObserver observer(stream_.get());
+
+ EXPECT_CALL(observer, OnChanged()).Times(Exactly(2));
+
+ EXPECT_TRUE(stream_->RemoveTrack(audio_track_));
+ EXPECT_FALSE(stream_->RemoveTrack(audio_track_));
+ EXPECT_EQ(0u, stream_->GetAudioTracks().size());
+ EXPECT_EQ(0u, stream_->GetAudioTracks().size());
+
+ EXPECT_TRUE(stream_->RemoveTrack(video_track_));
+ EXPECT_FALSE(stream_->RemoveTrack(video_track_));
+
+ EXPECT_EQ(0u, stream_->GetVideoTracks().size());
+ EXPECT_EQ(0u, stream_->GetVideoTracks().size());
+
+ EXPECT_FALSE(stream_->RemoveTrack(rtc::scoped_refptr<AudioTrackInterface>()));
+ EXPECT_FALSE(stream_->RemoveTrack(rtc::scoped_refptr<VideoTrackInterface>()));
+}
+
+TEST_F(MediaStreamTest, ChangeVideoTrack) {
+ scoped_refptr<webrtc::VideoTrackInterface> video_track(
+ stream_->GetVideoTracks()[0]);
+ ChangeTrack(video_track.get());
+}
+
+TEST_F(MediaStreamTest, ChangeAudioTrack) {
+ scoped_refptr<webrtc::AudioTrackInterface> audio_track(
+ stream_->GetAudioTracks()[0]);
+ ChangeTrack(audio_track.get());
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/peer_connection.cc b/third_party/libwebrtc/pc/peer_connection.cc
new file mode 100644
index 0000000000..183cbeb7cd
--- /dev/null
+++ b/third_party/libwebrtc/pc/peer_connection.cc
@@ -0,0 +1,3046 @@
+/*
+ * Copyright 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/peer_connection.h"
+
+#include <limits.h>
+#include <stddef.h>
+
+#include <algorithm>
+#include <memory>
+#include <set>
+#include <string>
+#include <utility>
+
+#include "absl/algorithm/container.h"
+#include "absl/strings/match.h"
+#include "absl/strings/string_view.h"
+#include "absl/types/optional.h"
+#include "api/jsep_ice_candidate.h"
+#include "api/media_types.h"
+#include "api/rtp_parameters.h"
+#include "api/rtp_transceiver_direction.h"
+#include "api/uma_metrics.h"
+#include "api/video/video_codec_constants.h"
+#include "call/audio_state.h"
+#include "call/packet_receiver.h"
+#include "media/base/media_channel.h"
+#include "media/base/media_config.h"
+#include "media/base/media_engine.h"
+#include "media/base/rid_description.h"
+#include "media/base/stream_params.h"
+#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
+#include "p2p/base/basic_async_resolver_factory.h"
+#include "p2p/base/connection.h"
+#include "p2p/base/connection_info.h"
+#include "p2p/base/dtls_transport_internal.h"
+#include "p2p/base/p2p_constants.h"
+#include "p2p/base/p2p_transport_channel.h"
+#include "p2p/base/transport_info.h"
+#include "pc/ice_server_parsing.h"
+#include "pc/rtp_receiver.h"
+#include "pc/rtp_receiver_proxy.h"
+#include "pc/rtp_sender.h"
+#include "pc/rtp_sender_proxy.h"
+#include "pc/sctp_transport.h"
+#include "pc/simulcast_description.h"
+#include "pc/webrtc_session_description_factory.h"
+#include "rtc_base/helpers.h"
+#include "rtc_base/ip_address.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/net_helper.h"
+#include "rtc_base/network.h"
+#include "rtc_base/network_constants.h"
+#include "rtc_base/socket_address.h"
+#include "rtc_base/string_encode.h"
+#include "rtc_base/trace_event.h"
+#include "rtc_base/unique_id_generator.h"
+#include "system_wrappers/include/metrics.h"
+
+using cricket::ContentInfo;
+using cricket::ContentInfos;
+using cricket::MediaContentDescription;
+using cricket::MediaProtocolType;
+using cricket::RidDescription;
+using cricket::RidDirection;
+using cricket::SessionDescription;
+using cricket::SimulcastDescription;
+using cricket::SimulcastLayer;
+using cricket::SimulcastLayerList;
+using cricket::StreamParams;
+using cricket::TransportInfo;
+
+using cricket::LOCAL_PORT_TYPE;
+using cricket::PRFLX_PORT_TYPE;
+using cricket::RELAY_PORT_TYPE;
+using cricket::STUN_PORT_TYPE;
+
+namespace webrtc {
+
+namespace {
+
+// UMA metric names.
+const char kSimulcastNumberOfEncodings[] =
+ "WebRTC.PeerConnection.Simulcast.NumberOfSendEncodings";
+
+static const int REPORT_USAGE_PATTERN_DELAY_MS = 60000;
+
+uint32_t ConvertIceTransportTypeToCandidateFilter(
+ PeerConnectionInterface::IceTransportsType type) {
+ switch (type) {
+ case PeerConnectionInterface::kNone:
+ return cricket::CF_NONE;
+ case PeerConnectionInterface::kRelay:
+ return cricket::CF_RELAY;
+ case PeerConnectionInterface::kNoHost:
+ return (cricket::CF_ALL & ~cricket::CF_HOST);
+ case PeerConnectionInterface::kAll:
+ return cricket::CF_ALL;
+ default:
+ RTC_DCHECK_NOTREACHED();
+ }
+ return cricket::CF_NONE;
+}
+
+IceCandidatePairType GetIceCandidatePairCounter(
+ const cricket::Candidate& local,
+ const cricket::Candidate& remote) {
+ const auto& l = local.type();
+ const auto& r = remote.type();
+ const auto& host = LOCAL_PORT_TYPE;
+ const auto& srflx = STUN_PORT_TYPE;
+ const auto& relay = RELAY_PORT_TYPE;
+ const auto& prflx = PRFLX_PORT_TYPE;
+ if (l == host && r == host) {
+ bool local_hostname =
+ !local.address().hostname().empty() && local.address().IsUnresolvedIP();
+ bool remote_hostname = !remote.address().hostname().empty() &&
+ remote.address().IsUnresolvedIP();
+ bool local_private = IPIsPrivate(local.address().ipaddr());
+ bool remote_private = IPIsPrivate(remote.address().ipaddr());
+ if (local_hostname) {
+ if (remote_hostname) {
+ return kIceCandidatePairHostNameHostName;
+ } else if (remote_private) {
+ return kIceCandidatePairHostNameHostPrivate;
+ } else {
+ return kIceCandidatePairHostNameHostPublic;
+ }
+ } else if (local_private) {
+ if (remote_hostname) {
+ return kIceCandidatePairHostPrivateHostName;
+ } else if (remote_private) {
+ return kIceCandidatePairHostPrivateHostPrivate;
+ } else {
+ return kIceCandidatePairHostPrivateHostPublic;
+ }
+ } else {
+ if (remote_hostname) {
+ return kIceCandidatePairHostPublicHostName;
+ } else if (remote_private) {
+ return kIceCandidatePairHostPublicHostPrivate;
+ } else {
+ return kIceCandidatePairHostPublicHostPublic;
+ }
+ }
+ }
+ if (l == host && r == srflx)
+ return kIceCandidatePairHostSrflx;
+ if (l == host && r == relay)
+ return kIceCandidatePairHostRelay;
+ if (l == host && r == prflx)
+ return kIceCandidatePairHostPrflx;
+ if (l == srflx && r == host)
+ return kIceCandidatePairSrflxHost;
+ if (l == srflx && r == srflx)
+ return kIceCandidatePairSrflxSrflx;
+ if (l == srflx && r == relay)
+ return kIceCandidatePairSrflxRelay;
+ if (l == srflx && r == prflx)
+ return kIceCandidatePairSrflxPrflx;
+ if (l == relay && r == host)
+ return kIceCandidatePairRelayHost;
+ if (l == relay && r == srflx)
+ return kIceCandidatePairRelaySrflx;
+ if (l == relay && r == relay)
+ return kIceCandidatePairRelayRelay;
+ if (l == relay && r == prflx)
+ return kIceCandidatePairRelayPrflx;
+ if (l == prflx && r == host)
+ return kIceCandidatePairPrflxHost;
+ if (l == prflx && r == srflx)
+ return kIceCandidatePairPrflxSrflx;
+ if (l == prflx && r == relay)
+ return kIceCandidatePairPrflxRelay;
+ return kIceCandidatePairMax;
+}
+
+absl::optional<int> RTCConfigurationToIceConfigOptionalInt(
+ int rtc_configuration_parameter) {
+ if (rtc_configuration_parameter ==
+ webrtc::PeerConnectionInterface::RTCConfiguration::kUndefined) {
+ return absl::nullopt;
+ }
+ return rtc_configuration_parameter;
+}
+
+// Check if the changes of IceTransportsType motives an ice restart.
+bool NeedIceRestart(bool surface_ice_candidates_on_ice_transport_type_changed,
+ PeerConnectionInterface::IceTransportsType current,
+ PeerConnectionInterface::IceTransportsType modified) {
+ if (current == modified) {
+ return false;
+ }
+
+ if (!surface_ice_candidates_on_ice_transport_type_changed) {
+ return true;
+ }
+
+ auto current_filter = ConvertIceTransportTypeToCandidateFilter(current);
+ auto modified_filter = ConvertIceTransportTypeToCandidateFilter(modified);
+
+ // If surface_ice_candidates_on_ice_transport_type_changed is true and we
+ // extend the filter, then no ice restart is needed.
+ return (current_filter & modified_filter) != current_filter;
+}
+
+cricket::IceConfig ParseIceConfig(
+ const PeerConnectionInterface::RTCConfiguration& config) {
+ cricket::ContinualGatheringPolicy gathering_policy;
+ switch (config.continual_gathering_policy) {
+ case PeerConnectionInterface::GATHER_ONCE:
+ gathering_policy = cricket::GATHER_ONCE;
+ break;
+ case PeerConnectionInterface::GATHER_CONTINUALLY:
+ gathering_policy = cricket::GATHER_CONTINUALLY;
+ break;
+ default:
+ RTC_DCHECK_NOTREACHED();
+ gathering_policy = cricket::GATHER_ONCE;
+ }
+
+ cricket::IceConfig ice_config;
+ ice_config.receiving_timeout = RTCConfigurationToIceConfigOptionalInt(
+ config.ice_connection_receiving_timeout);
+ ice_config.prioritize_most_likely_candidate_pairs =
+ config.prioritize_most_likely_ice_candidate_pairs;
+ ice_config.backup_connection_ping_interval =
+ RTCConfigurationToIceConfigOptionalInt(
+ config.ice_backup_candidate_pair_ping_interval);
+ ice_config.continual_gathering_policy = gathering_policy;
+ ice_config.presume_writable_when_fully_relayed =
+ config.presume_writable_when_fully_relayed;
+ ice_config.surface_ice_candidates_on_ice_transport_type_changed =
+ config.surface_ice_candidates_on_ice_transport_type_changed;
+ ice_config.ice_check_interval_strong_connectivity =
+ config.ice_check_interval_strong_connectivity;
+ ice_config.ice_check_interval_weak_connectivity =
+ config.ice_check_interval_weak_connectivity;
+ ice_config.ice_check_min_interval = config.ice_check_min_interval;
+ ice_config.ice_unwritable_timeout = config.ice_unwritable_timeout;
+ ice_config.ice_unwritable_min_checks = config.ice_unwritable_min_checks;
+ ice_config.ice_inactive_timeout = config.ice_inactive_timeout;
+ ice_config.stun_keepalive_interval = config.stun_candidate_keepalive_interval;
+ ice_config.network_preference = config.network_preference;
+ ice_config.stable_writable_connection_ping_interval =
+ config.stable_writable_connection_ping_interval_ms;
+ return ice_config;
+}
+
+// Ensures the configuration doesn't have any parameters with invalid values,
+// or values that conflict with other parameters.
+//
+// Returns RTCError::OK() if there are no issues.
+RTCError ValidateConfiguration(
+ const PeerConnectionInterface::RTCConfiguration& config) {
+ return cricket::P2PTransportChannel::ValidateIceConfig(
+ ParseIceConfig(config));
+}
+
+// Checks for valid pool size range and if a previous value has already been
+// set, which is done via SetLocalDescription.
+RTCError ValidateIceCandidatePoolSize(
+ int ice_candidate_pool_size,
+ absl::optional<int> previous_ice_candidate_pool_size) {
+ // Note that this isn't possible through chromium, since it's an unsigned
+ // short in WebIDL.
+ if (ice_candidate_pool_size < 0 ||
+ ice_candidate_pool_size > static_cast<int>(UINT16_MAX)) {
+ return RTCError(RTCErrorType::INVALID_RANGE);
+ }
+
+ // According to JSEP, after setLocalDescription, changing the candidate pool
+ // size is not allowed, and changing the set of ICE servers will not result
+ // in new candidates being gathered.
+ if (previous_ice_candidate_pool_size.has_value() &&
+ ice_candidate_pool_size != previous_ice_candidate_pool_size.value()) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_MODIFICATION,
+ "Can't change candidate pool size after calling "
+ "SetLocalDescription.");
+ }
+
+ return RTCError::OK();
+}
+
+// The simplest (and most future-compatible) way to tell if a config was
+// modified in an invalid way is to copy each property we do support modifying,
+// then use operator==. There are far more properties we don't support modifying
+// than those we do, and more could be added.
+// This helper function accepts a proposed new `configuration` object, an
+// existing configuration and returns a valid, modified, configuration that's
+// based on the existing configuration, with modified properties copied from
+// `configuration`.
+// If the result of creating a modified configuration doesn't pass the above
+// `operator==` test or a call to `ValidateConfiguration()`, then the function
+// will return an error. Otherwise, the return value will be the new config.
+RTCErrorOr<PeerConnectionInterface::RTCConfiguration> ApplyConfiguration(
+ const PeerConnectionInterface::RTCConfiguration& configuration,
+ const PeerConnectionInterface::RTCConfiguration& existing_configuration) {
+ PeerConnectionInterface::RTCConfiguration modified_config =
+ existing_configuration;
+ modified_config.servers = configuration.servers;
+ modified_config.type = configuration.type;
+ modified_config.ice_candidate_pool_size =
+ configuration.ice_candidate_pool_size;
+ modified_config.prune_turn_ports = configuration.prune_turn_ports;
+ modified_config.turn_port_prune_policy = configuration.turn_port_prune_policy;
+ modified_config.surface_ice_candidates_on_ice_transport_type_changed =
+ configuration.surface_ice_candidates_on_ice_transport_type_changed;
+ modified_config.ice_check_min_interval = configuration.ice_check_min_interval;
+ modified_config.ice_check_interval_strong_connectivity =
+ configuration.ice_check_interval_strong_connectivity;
+ modified_config.ice_check_interval_weak_connectivity =
+ configuration.ice_check_interval_weak_connectivity;
+ modified_config.ice_unwritable_timeout = configuration.ice_unwritable_timeout;
+ modified_config.ice_unwritable_min_checks =
+ configuration.ice_unwritable_min_checks;
+ modified_config.ice_inactive_timeout = configuration.ice_inactive_timeout;
+ modified_config.stun_candidate_keepalive_interval =
+ configuration.stun_candidate_keepalive_interval;
+ modified_config.turn_customizer = configuration.turn_customizer;
+ modified_config.network_preference = configuration.network_preference;
+ modified_config.active_reset_srtp_params =
+ configuration.active_reset_srtp_params;
+ modified_config.turn_logging_id = configuration.turn_logging_id;
+ modified_config.stable_writable_connection_ping_interval_ms =
+ configuration.stable_writable_connection_ping_interval_ms;
+ if (configuration != modified_config) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_MODIFICATION,
+ "Modifying the configuration in an unsupported way.");
+ }
+
+ RTCError err = ValidateConfiguration(modified_config);
+ if (!err.ok()) {
+ return err;
+ }
+
+ return modified_config;
+}
+
+bool HasRtcpMuxEnabled(const cricket::ContentInfo* content) {
+ return content->media_description()->rtcp_mux();
+}
+
+bool DtlsEnabled(const PeerConnectionInterface::RTCConfiguration& configuration,
+ const PeerConnectionFactoryInterface::Options& options,
+ const PeerConnectionDependencies& dependencies) {
+ if (options.disable_encryption)
+ return false;
+
+ // Enable DTLS by default if we have an identity store or a certificate.
+ bool default_enabled =
+ (dependencies.cert_generator || !configuration.certificates.empty());
+
+#if defined(WEBRTC_FUCHSIA)
+ // The `configuration` can override the default value.
+ return configuration.enable_dtls_srtp.value_or(default_enabled);
+#else
+ return default_enabled;
+#endif
+}
+
+// Calls `ParseIceServersOrError` to extract ice server information from the
+// `configuration` and then validates the extracted configuration. For a
+// non-empty list of servers, usage gets recorded via `usage_pattern`.
+RTCError ParseAndValidateIceServersFromConfiguration(
+ const PeerConnectionInterface::RTCConfiguration& configuration,
+ cricket::ServerAddresses& stun_servers,
+ std::vector<cricket::RelayServerConfig>& turn_servers,
+ UsagePattern& usage_pattern) {
+ RTC_DCHECK(stun_servers.empty());
+ RTC_DCHECK(turn_servers.empty());
+ RTCError err = ParseIceServersOrError(configuration.servers, &stun_servers,
+ &turn_servers);
+ if (!err.ok()) {
+ return err;
+ }
+
+ // Restrict number of TURN servers.
+ if (turn_servers.size() > cricket::kMaxTurnServers) {
+ RTC_LOG(LS_WARNING) << "Number of configured TURN servers is "
+ << turn_servers.size()
+ << " which exceeds the maximum allowed number of "
+ << cricket::kMaxTurnServers;
+ turn_servers.resize(cricket::kMaxTurnServers);
+ }
+
+ // Add the turn logging id to all turn servers
+ for (cricket::RelayServerConfig& turn_server : turn_servers) {
+ turn_server.turn_logging_id = configuration.turn_logging_id;
+ }
+
+ // Note if STUN or TURN servers were supplied.
+ if (!stun_servers.empty()) {
+ usage_pattern.NoteUsageEvent(UsageEvent::STUN_SERVER_ADDED);
+ }
+ if (!turn_servers.empty()) {
+ usage_pattern.NoteUsageEvent(UsageEvent::TURN_SERVER_ADDED);
+ }
+ return RTCError::OK();
+}
+
+} // namespace
+
+bool PeerConnectionInterface::RTCConfiguration::operator==(
+ const PeerConnectionInterface::RTCConfiguration& o) const {
+ // This static_assert prevents us from accidentally breaking operator==.
+ // Note: Order matters! Fields must be ordered the same as RTCConfiguration.
+ struct stuff_being_tested_for_equality {
+ IceServers servers;
+ IceTransportsType type;
+ BundlePolicy bundle_policy;
+ RtcpMuxPolicy rtcp_mux_policy;
+ std::vector<rtc::scoped_refptr<rtc::RTCCertificate>> certificates;
+ int ice_candidate_pool_size;
+ bool disable_ipv6_on_wifi;
+ int max_ipv6_networks;
+ bool disable_link_local_networks;
+ absl::optional<int> screencast_min_bitrate;
+#if defined(WEBRTC_FUCHSIA)
+ absl::optional<bool> enable_dtls_srtp;
+#endif
+ TcpCandidatePolicy tcp_candidate_policy;
+ CandidateNetworkPolicy candidate_network_policy;
+ int audio_jitter_buffer_max_packets;
+ bool audio_jitter_buffer_fast_accelerate;
+ int audio_jitter_buffer_min_delay_ms;
+ int ice_connection_receiving_timeout;
+ int ice_backup_candidate_pair_ping_interval;
+ ContinualGatheringPolicy continual_gathering_policy;
+ bool prioritize_most_likely_ice_candidate_pairs;
+ struct cricket::MediaConfig media_config;
+ bool prune_turn_ports;
+ PortPrunePolicy turn_port_prune_policy;
+ bool presume_writable_when_fully_relayed;
+ bool enable_ice_renomination;
+ bool redetermine_role_on_ice_restart;
+ bool surface_ice_candidates_on_ice_transport_type_changed;
+ absl::optional<int> ice_check_interval_strong_connectivity;
+ absl::optional<int> ice_check_interval_weak_connectivity;
+ absl::optional<int> ice_check_min_interval;
+ absl::optional<int> ice_unwritable_timeout;
+ absl::optional<int> ice_unwritable_min_checks;
+ absl::optional<int> ice_inactive_timeout;
+ absl::optional<int> stun_candidate_keepalive_interval;
+ webrtc::TurnCustomizer* turn_customizer;
+ SdpSemantics sdp_semantics;
+ absl::optional<rtc::AdapterType> network_preference;
+ bool active_reset_srtp_params;
+ absl::optional<CryptoOptions> crypto_options;
+ bool offer_extmap_allow_mixed;
+ std::string turn_logging_id;
+ bool enable_implicit_rollback;
+ absl::optional<int> report_usage_pattern_delay_ms;
+ absl::optional<int> stable_writable_connection_ping_interval_ms;
+ webrtc::VpnPreference vpn_preference;
+ std::vector<rtc::NetworkMask> vpn_list;
+ PortAllocatorConfig port_allocator_config;
+ absl::optional<TimeDelta> pacer_burst_interval;
+ };
+ static_assert(sizeof(stuff_being_tested_for_equality) == sizeof(*this),
+ "Did you add something to RTCConfiguration and forget to "
+ "update operator==?");
+ return type == o.type && servers == o.servers &&
+ bundle_policy == o.bundle_policy &&
+ rtcp_mux_policy == o.rtcp_mux_policy &&
+ tcp_candidate_policy == o.tcp_candidate_policy &&
+ candidate_network_policy == o.candidate_network_policy &&
+ audio_jitter_buffer_max_packets == o.audio_jitter_buffer_max_packets &&
+ audio_jitter_buffer_fast_accelerate ==
+ o.audio_jitter_buffer_fast_accelerate &&
+ audio_jitter_buffer_min_delay_ms ==
+ o.audio_jitter_buffer_min_delay_ms &&
+ ice_connection_receiving_timeout ==
+ o.ice_connection_receiving_timeout &&
+ ice_backup_candidate_pair_ping_interval ==
+ o.ice_backup_candidate_pair_ping_interval &&
+ continual_gathering_policy == o.continual_gathering_policy &&
+ certificates == o.certificates &&
+ prioritize_most_likely_ice_candidate_pairs ==
+ o.prioritize_most_likely_ice_candidate_pairs &&
+ media_config == o.media_config &&
+ disable_ipv6_on_wifi == o.disable_ipv6_on_wifi &&
+ max_ipv6_networks == o.max_ipv6_networks &&
+ disable_link_local_networks == o.disable_link_local_networks &&
+ screencast_min_bitrate == o.screencast_min_bitrate &&
+#if defined(WEBRTC_FUCHSIA)
+ enable_dtls_srtp == o.enable_dtls_srtp &&
+#endif
+ ice_candidate_pool_size == o.ice_candidate_pool_size &&
+ prune_turn_ports == o.prune_turn_ports &&
+ turn_port_prune_policy == o.turn_port_prune_policy &&
+ presume_writable_when_fully_relayed ==
+ o.presume_writable_when_fully_relayed &&
+ enable_ice_renomination == o.enable_ice_renomination &&
+ redetermine_role_on_ice_restart == o.redetermine_role_on_ice_restart &&
+ surface_ice_candidates_on_ice_transport_type_changed ==
+ o.surface_ice_candidates_on_ice_transport_type_changed &&
+ ice_check_interval_strong_connectivity ==
+ o.ice_check_interval_strong_connectivity &&
+ ice_check_interval_weak_connectivity ==
+ o.ice_check_interval_weak_connectivity &&
+ ice_check_min_interval == o.ice_check_min_interval &&
+ ice_unwritable_timeout == o.ice_unwritable_timeout &&
+ ice_unwritable_min_checks == o.ice_unwritable_min_checks &&
+ ice_inactive_timeout == o.ice_inactive_timeout &&
+ stun_candidate_keepalive_interval ==
+ o.stun_candidate_keepalive_interval &&
+ turn_customizer == o.turn_customizer &&
+ sdp_semantics == o.sdp_semantics &&
+ network_preference == o.network_preference &&
+ active_reset_srtp_params == o.active_reset_srtp_params &&
+ crypto_options == o.crypto_options &&
+ offer_extmap_allow_mixed == o.offer_extmap_allow_mixed &&
+ turn_logging_id == o.turn_logging_id &&
+ enable_implicit_rollback == o.enable_implicit_rollback &&
+ report_usage_pattern_delay_ms == o.report_usage_pattern_delay_ms &&
+ stable_writable_connection_ping_interval_ms ==
+ o.stable_writable_connection_ping_interval_ms &&
+ vpn_preference == o.vpn_preference && vpn_list == o.vpn_list &&
+ port_allocator_config.min_port == o.port_allocator_config.min_port &&
+ port_allocator_config.max_port == o.port_allocator_config.max_port &&
+ port_allocator_config.flags == o.port_allocator_config.flags &&
+ pacer_burst_interval == o.pacer_burst_interval;
+}
+
+bool PeerConnectionInterface::RTCConfiguration::operator!=(
+ const PeerConnectionInterface::RTCConfiguration& o) const {
+ return !(*this == o);
+}
+
+RTCErrorOr<rtc::scoped_refptr<PeerConnection>> PeerConnection::Create(
+ rtc::scoped_refptr<ConnectionContext> context,
+ const PeerConnectionFactoryInterface::Options& options,
+ std::unique_ptr<RtcEventLog> event_log,
+ std::unique_ptr<Call> call,
+ const PeerConnectionInterface::RTCConfiguration& configuration,
+ PeerConnectionDependencies dependencies) {
+ // TODO(https://crbug.com/webrtc/13528): Remove support for kPlanB.
+ if (configuration.sdp_semantics == SdpSemantics::kPlanB_DEPRECATED) {
+ RTC_LOG(LS_WARNING)
+ << "PeerConnection constructed with legacy SDP semantics!";
+ }
+
+ RTCError config_error = cricket::P2PTransportChannel::ValidateIceConfig(
+ ParseIceConfig(configuration));
+ if (!config_error.ok()) {
+ RTC_LOG(LS_ERROR) << "Invalid ICE configuration: "
+ << config_error.message();
+ return config_error;
+ }
+
+ if (!dependencies.allocator) {
+ RTC_LOG(LS_ERROR)
+ << "PeerConnection initialized without a PortAllocator? "
+ "This shouldn't happen if using PeerConnectionFactory.";
+ return RTCError(
+ RTCErrorType::INVALID_PARAMETER,
+ "Attempt to create a PeerConnection without a PortAllocatorFactory");
+ }
+
+ if (!dependencies.observer) {
+ // TODO(deadbeef): Why do we do this?
+ RTC_LOG(LS_ERROR) << "PeerConnection initialized without a "
+ "PeerConnectionObserver";
+ return RTCError(RTCErrorType::INVALID_PARAMETER,
+ "Attempt to create a PeerConnection without an observer");
+ }
+
+ bool is_unified_plan =
+ configuration.sdp_semantics == SdpSemantics::kUnifiedPlan;
+ bool dtls_enabled = DtlsEnabled(configuration, options, dependencies);
+
+ // Interim code: If an AsyncResolverFactory is given, but not an
+ // AsyncDnsResolverFactory, wrap it in a WrappingAsyncDnsResolverFactory
+ // If neither is given, create a BasicAsyncDnsResolverFactory.
+ // TODO(bugs.webrtc.org/12598): Remove code once all callers pass a
+ // AsyncDnsResolverFactory.
+#pragma clang diagnostic push
+#pragma clang diagnostic ignored "-Wdeprecated-declarations"
+ if (dependencies.async_dns_resolver_factory &&
+ dependencies.async_resolver_factory) {
+ RTC_LOG(LS_ERROR)
+ << "Attempt to set both old and new type of DNS resolver factory";
+ return RTCError(RTCErrorType::INVALID_PARAMETER,
+ "Both old and new type of DNS resolver given");
+ }
+ if (!dependencies.async_dns_resolver_factory) {
+ if (dependencies.async_resolver_factory) {
+ dependencies.async_dns_resolver_factory =
+ std::make_unique<WrappingAsyncDnsResolverFactory>(
+ std::move(dependencies.async_resolver_factory));
+ } else {
+ dependencies.async_dns_resolver_factory =
+ std::make_unique<BasicAsyncDnsResolverFactory>();
+ }
+ }
+#pragma clang diagnostic pop
+
+ // The PeerConnection constructor consumes some, but not all, dependencies.
+ auto pc = rtc::make_ref_counted<PeerConnection>(
+ context, options, is_unified_plan, std::move(event_log), std::move(call),
+ dependencies, dtls_enabled);
+ RTCError init_error = pc->Initialize(configuration, std::move(dependencies));
+ if (!init_error.ok()) {
+ RTC_LOG(LS_ERROR) << "PeerConnection initialization failed";
+ return init_error;
+ }
+ return pc;
+}
+
+PeerConnection::PeerConnection(
+ rtc::scoped_refptr<ConnectionContext> context,
+ const PeerConnectionFactoryInterface::Options& options,
+ bool is_unified_plan,
+ std::unique_ptr<RtcEventLog> event_log,
+ std::unique_ptr<Call> call,
+ PeerConnectionDependencies& dependencies,
+ bool dtls_enabled)
+ : context_(context),
+ trials_(std::move(dependencies.trials), &context->field_trials()),
+ options_(options),
+ observer_(dependencies.observer),
+ is_unified_plan_(is_unified_plan),
+ event_log_(std::move(event_log)),
+ event_log_ptr_(event_log_.get()),
+ async_dns_resolver_factory_(
+ std::move(dependencies.async_dns_resolver_factory)),
+ port_allocator_(std::move(dependencies.allocator)),
+ ice_transport_factory_(std::move(dependencies.ice_transport_factory)),
+ tls_cert_verifier_(std::move(dependencies.tls_cert_verifier)),
+ call_(std::move(call)),
+ worker_thread_safety_(PendingTaskSafetyFlag::CreateAttachedToTaskQueue(
+ /*alive=*/call_ != nullptr,
+ worker_thread())),
+ call_ptr_(call_.get()),
+ // RFC 3264: The numeric value of the session id and version in the
+ // o line MUST be representable with a "64 bit signed integer".
+ // Due to this constraint session id `session_id_` is max limited to
+ // LLONG_MAX.
+ session_id_(rtc::ToString(rtc::CreateRandomId64() & LLONG_MAX)),
+ dtls_enabled_(dtls_enabled),
+ data_channel_controller_(this),
+ message_handler_(signaling_thread()),
+ weak_factory_(this) {}
+
+PeerConnection::~PeerConnection() {
+ TRACE_EVENT0("webrtc", "PeerConnection::~PeerConnection");
+ RTC_DCHECK_RUN_ON(signaling_thread());
+
+ if (sdp_handler_) {
+ sdp_handler_->PrepareForShutdown();
+ }
+
+ // In case `Close()` wasn't called, always make sure the controller cancels
+ // potentially pending operations.
+ data_channel_controller_.PrepareForShutdown();
+
+ // Need to stop transceivers before destroying the stats collector because
+ // AudioRtpSender has a reference to the LegacyStatsCollector it will update
+ // when stopping.
+ if (rtp_manager()) {
+ for (const auto& transceiver : rtp_manager()->transceivers()->List()) {
+ transceiver->StopInternal();
+ }
+ }
+
+ legacy_stats_.reset(nullptr);
+ if (stats_collector_) {
+ stats_collector_->WaitForPendingRequest();
+ stats_collector_ = nullptr;
+ }
+
+ if (sdp_handler_) {
+ // Don't destroy BaseChannels until after stats has been cleaned up so that
+ // the last stats request can still read from the channels.
+ sdp_handler_->DestroyMediaChannels();
+ RTC_LOG(LS_INFO) << "Session: " << session_id() << " is destroyed.";
+ sdp_handler_->ResetSessionDescFactory();
+ }
+
+ // port_allocator_ and transport_controller_ live on the network thread and
+ // should be destroyed there.
+ transport_controller_copy_ = nullptr;
+ network_thread()->BlockingCall([this] {
+ RTC_DCHECK_RUN_ON(network_thread());
+ TeardownDataChannelTransport_n(RTCError::OK());
+ transport_controller_.reset();
+ port_allocator_.reset();
+ if (network_thread_safety_)
+ network_thread_safety_->SetNotAlive();
+ });
+ sctp_mid_s_.reset();
+ SetSctpTransportName("");
+
+ // call_ and event_log_ must be destroyed on the worker thread.
+ worker_thread()->BlockingCall([this] {
+ RTC_DCHECK_RUN_ON(worker_thread());
+ worker_thread_safety_->SetNotAlive();
+ call_.reset();
+ // The event log must outlive call (and any other object that uses it).
+ event_log_.reset();
+ });
+
+ data_channel_controller_.PrepareForShutdown();
+}
+
+RTCError PeerConnection::Initialize(
+ const PeerConnectionInterface::RTCConfiguration& configuration,
+ PeerConnectionDependencies dependencies) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ TRACE_EVENT0("webrtc", "PeerConnection::Initialize");
+
+ cricket::ServerAddresses stun_servers;
+ std::vector<cricket::RelayServerConfig> turn_servers;
+ RTCError parse_error = ParseAndValidateIceServersFromConfiguration(
+ configuration, stun_servers, turn_servers, usage_pattern_);
+ if (!parse_error.ok()) {
+ return parse_error;
+ }
+
+ // Network thread initialization.
+ transport_controller_copy_ = network_thread()->BlockingCall([&] {
+ RTC_DCHECK_RUN_ON(network_thread());
+ network_thread_safety_ = PendingTaskSafetyFlag::Create();
+ InitializePortAllocatorResult pa_result =
+ InitializePortAllocator_n(stun_servers, turn_servers, configuration);
+ // Send information about IPv4/IPv6 status.
+ PeerConnectionAddressFamilyCounter address_family =
+ pa_result.enable_ipv6 ? kPeerConnection_IPv6 : kPeerConnection_IPv4;
+ RTC_HISTOGRAM_ENUMERATION("WebRTC.PeerConnection.IPMetrics", address_family,
+ kPeerConnectionAddressFamilyCounter_Max);
+ return InitializeTransportController_n(configuration, dependencies);
+ });
+
+ configuration_ = configuration;
+
+ legacy_stats_ = std::make_unique<LegacyStatsCollector>(this);
+ stats_collector_ = RTCStatsCollector::Create(this);
+
+ sdp_handler_ = SdpOfferAnswerHandler::Create(this, configuration,
+ dependencies, context_.get());
+
+ rtp_manager_ = std::make_unique<RtpTransmissionManager>(
+ IsUnifiedPlan(), context_.get(), &usage_pattern_, observer_,
+ legacy_stats_.get(), [this]() {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ sdp_handler_->UpdateNegotiationNeeded();
+ });
+
+ // Add default audio/video transceivers for Plan B SDP.
+ if (!IsUnifiedPlan()) {
+ rtp_manager()->transceivers()->Add(
+ RtpTransceiverProxyWithInternal<RtpTransceiver>::Create(
+ signaling_thread(), rtc::make_ref_counted<RtpTransceiver>(
+ cricket::MEDIA_TYPE_AUDIO, context())));
+ rtp_manager()->transceivers()->Add(
+ RtpTransceiverProxyWithInternal<RtpTransceiver>::Create(
+ signaling_thread(), rtc::make_ref_counted<RtpTransceiver>(
+ cricket::MEDIA_TYPE_VIDEO, context())));
+ }
+
+ int delay_ms = configuration.report_usage_pattern_delay_ms
+ ? *configuration.report_usage_pattern_delay_ms
+ : REPORT_USAGE_PATTERN_DELAY_MS;
+ message_handler_.RequestUsagePatternReport(
+ [this]() {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ ReportUsagePattern();
+ },
+ delay_ms);
+
+ return RTCError::OK();
+}
+
+JsepTransportController* PeerConnection::InitializeTransportController_n(
+ const RTCConfiguration& configuration,
+ const PeerConnectionDependencies& dependencies) {
+ JsepTransportController::Config config;
+ config.redetermine_role_on_ice_restart =
+ configuration.redetermine_role_on_ice_restart;
+ config.ssl_max_version = options_.ssl_max_version;
+ config.disable_encryption = options_.disable_encryption;
+ config.bundle_policy = configuration.bundle_policy;
+ config.rtcp_mux_policy = configuration.rtcp_mux_policy;
+ // TODO(bugs.webrtc.org/9891) - Remove options_.crypto_options then remove
+ // this stub.
+ config.crypto_options = configuration.crypto_options.has_value()
+ ? *configuration.crypto_options
+ : options_.crypto_options;
+ config.transport_observer = this;
+ config.rtcp_handler = InitializeRtcpCallback();
+ config.un_demuxable_packet_handler = InitializeUnDemuxablePacketHandler();
+ config.event_log = event_log_ptr_;
+#if defined(ENABLE_EXTERNAL_AUTH)
+ config.enable_external_auth = true;
+#endif
+ config.active_reset_srtp_params = configuration.active_reset_srtp_params;
+
+ // DTLS has to be enabled to use SCTP.
+ if (dtls_enabled_) {
+ config.sctp_factory = context_->sctp_transport_factory();
+ }
+
+ config.ice_transport_factory = ice_transport_factory_.get();
+ config.on_dtls_handshake_error_ =
+ [weak_ptr = weak_factory_.GetWeakPtr()](rtc::SSLHandshakeError s) {
+ if (weak_ptr) {
+ weak_ptr->OnTransportControllerDtlsHandshakeError(s);
+ }
+ };
+
+ config.field_trials = trials_.get();
+
+ transport_controller_.reset(new JsepTransportController(
+ network_thread(), port_allocator_.get(),
+ async_dns_resolver_factory_.get(), std::move(config)));
+
+ transport_controller_->SubscribeIceConnectionState(
+ [this](cricket::IceConnectionState s) {
+ RTC_DCHECK_RUN_ON(network_thread());
+ signaling_thread()->PostTask(
+ SafeTask(signaling_thread_safety_.flag(), [this, s]() {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ OnTransportControllerConnectionState(s);
+ }));
+ });
+ transport_controller_->SubscribeConnectionState(
+ [this](PeerConnectionInterface::PeerConnectionState s) {
+ RTC_DCHECK_RUN_ON(network_thread());
+ signaling_thread()->PostTask(
+ SafeTask(signaling_thread_safety_.flag(), [this, s]() {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ SetConnectionState(s);
+ }));
+ });
+ transport_controller_->SubscribeStandardizedIceConnectionState(
+ [this](PeerConnectionInterface::IceConnectionState s) {
+ RTC_DCHECK_RUN_ON(network_thread());
+ signaling_thread()->PostTask(
+ SafeTask(signaling_thread_safety_.flag(), [this, s]() {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ SetStandardizedIceConnectionState(s);
+ }));
+ });
+ transport_controller_->SubscribeIceGatheringState(
+ [this](cricket::IceGatheringState s) {
+ RTC_DCHECK_RUN_ON(network_thread());
+ signaling_thread()->PostTask(
+ SafeTask(signaling_thread_safety_.flag(), [this, s]() {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ OnTransportControllerGatheringState(s);
+ }));
+ });
+ transport_controller_->SubscribeIceCandidateGathered(
+ [this](const std::string& transport,
+ const std::vector<cricket::Candidate>& candidates) {
+ RTC_DCHECK_RUN_ON(network_thread());
+ signaling_thread()->PostTask(
+ SafeTask(signaling_thread_safety_.flag(),
+ [this, t = transport, c = candidates]() {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ OnTransportControllerCandidatesGathered(t, c);
+ }));
+ });
+ transport_controller_->SubscribeIceCandidateError(
+ [this](const cricket::IceCandidateErrorEvent& event) {
+ RTC_DCHECK_RUN_ON(network_thread());
+ signaling_thread()->PostTask(
+ SafeTask(signaling_thread_safety_.flag(), [this, event = event]() {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ OnTransportControllerCandidateError(event);
+ }));
+ });
+ transport_controller_->SubscribeIceCandidatesRemoved(
+ [this](const std::vector<cricket::Candidate>& c) {
+ RTC_DCHECK_RUN_ON(network_thread());
+ signaling_thread()->PostTask(
+ SafeTask(signaling_thread_safety_.flag(), [this, c = c]() {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ OnTransportControllerCandidatesRemoved(c);
+ }));
+ });
+ transport_controller_->SubscribeIceCandidatePairChanged(
+ [this](const cricket::CandidatePairChangeEvent& event) {
+ RTC_DCHECK_RUN_ON(network_thread());
+ signaling_thread()->PostTask(
+ SafeTask(signaling_thread_safety_.flag(), [this, event = event]() {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ OnTransportControllerCandidateChanged(event);
+ }));
+ });
+
+ transport_controller_->SetIceConfig(ParseIceConfig(configuration));
+ return transport_controller_.get();
+}
+
+rtc::scoped_refptr<StreamCollectionInterface> PeerConnection::local_streams() {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ RTC_CHECK(!IsUnifiedPlan()) << "local_streams is not available with Unified "
+ "Plan SdpSemantics. Please use GetSenders "
+ "instead.";
+ return sdp_handler_->local_streams();
+}
+
+rtc::scoped_refptr<StreamCollectionInterface> PeerConnection::remote_streams() {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ RTC_CHECK(!IsUnifiedPlan()) << "remote_streams is not available with Unified "
+ "Plan SdpSemantics. Please use GetReceivers "
+ "instead.";
+ return sdp_handler_->remote_streams();
+}
+
+bool PeerConnection::AddStream(MediaStreamInterface* local_stream) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ RTC_CHECK(!IsUnifiedPlan()) << "AddStream is not available with Unified Plan "
+ "SdpSemantics. Please use AddTrack instead.";
+ TRACE_EVENT0("webrtc", "PeerConnection::AddStream");
+ if (!ConfiguredForMedia()) {
+ RTC_LOG(LS_ERROR) << "AddStream: Not configured for media";
+ return false;
+ }
+ return sdp_handler_->AddStream(local_stream);
+}
+
+void PeerConnection::RemoveStream(MediaStreamInterface* local_stream) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ RTC_DCHECK(ConfiguredForMedia());
+ RTC_CHECK(!IsUnifiedPlan()) << "RemoveStream is not available with Unified "
+ "Plan SdpSemantics. Please use RemoveTrack "
+ "instead.";
+ TRACE_EVENT0("webrtc", "PeerConnection::RemoveStream");
+ sdp_handler_->RemoveStream(local_stream);
+}
+
+RTCErrorOr<rtc::scoped_refptr<RtpSenderInterface>> PeerConnection::AddTrack(
+ rtc::scoped_refptr<MediaStreamTrackInterface> track,
+ const std::vector<std::string>& stream_ids) {
+ return AddTrack(std::move(track), stream_ids, nullptr);
+}
+
+RTCErrorOr<rtc::scoped_refptr<RtpSenderInterface>> PeerConnection::AddTrack(
+ rtc::scoped_refptr<MediaStreamTrackInterface> track,
+ const std::vector<std::string>& stream_ids,
+ const std::vector<RtpEncodingParameters>& init_send_encodings) {
+ return AddTrack(std::move(track), stream_ids, &init_send_encodings);
+}
+
+RTCErrorOr<rtc::scoped_refptr<RtpSenderInterface>> PeerConnection::AddTrack(
+ rtc::scoped_refptr<MediaStreamTrackInterface> track,
+ const std::vector<std::string>& stream_ids,
+ const std::vector<RtpEncodingParameters>* init_send_encodings) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ TRACE_EVENT0("webrtc", "PeerConnection::AddTrack");
+ if (!ConfiguredForMedia()) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::UNSUPPORTED_OPERATION,
+ "Not configured for media");
+ }
+ if (!track) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, "Track is null.");
+ }
+ if (!(track->kind() == MediaStreamTrackInterface::kAudioKind ||
+ track->kind() == MediaStreamTrackInterface::kVideoKind)) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER,
+ "Track has invalid kind: " + track->kind());
+ }
+ if (IsClosed()) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_STATE,
+ "PeerConnection is closed.");
+ }
+ if (rtp_manager()->FindSenderForTrack(track.get())) {
+ LOG_AND_RETURN_ERROR(
+ RTCErrorType::INVALID_PARAMETER,
+ "Sender already exists for track " + track->id() + ".");
+ }
+ auto sender_or_error =
+ rtp_manager()->AddTrack(track, stream_ids, init_send_encodings);
+ if (sender_or_error.ok()) {
+ sdp_handler_->UpdateNegotiationNeeded();
+ legacy_stats_->AddTrack(track.get());
+ }
+ return sender_or_error;
+}
+
+RTCError PeerConnection::RemoveTrackOrError(
+ rtc::scoped_refptr<RtpSenderInterface> sender) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ if (!ConfiguredForMedia()) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::UNSUPPORTED_OPERATION,
+ "Not configured for media");
+ }
+ if (!sender) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, "Sender is null.");
+ }
+ if (IsClosed()) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_STATE,
+ "PeerConnection is closed.");
+ }
+ if (IsUnifiedPlan()) {
+ auto transceiver = FindTransceiverBySender(sender);
+ if (!transceiver || !sender->track()) {
+ return RTCError::OK();
+ }
+ sender->SetTrack(nullptr);
+ if (transceiver->direction() == RtpTransceiverDirection::kSendRecv) {
+ transceiver->internal()->set_direction(
+ RtpTransceiverDirection::kRecvOnly);
+ } else if (transceiver->direction() == RtpTransceiverDirection::kSendOnly) {
+ transceiver->internal()->set_direction(
+ RtpTransceiverDirection::kInactive);
+ }
+ } else {
+ bool removed;
+ if (sender->media_type() == cricket::MEDIA_TYPE_AUDIO) {
+ removed = rtp_manager()->GetAudioTransceiver()->internal()->RemoveSender(
+ sender.get());
+ } else {
+ RTC_DCHECK_EQ(cricket::MEDIA_TYPE_VIDEO, sender->media_type());
+ removed = rtp_manager()->GetVideoTransceiver()->internal()->RemoveSender(
+ sender.get());
+ }
+ if (!removed) {
+ LOG_AND_RETURN_ERROR(
+ RTCErrorType::INVALID_PARAMETER,
+ "Couldn't find sender " + sender->id() + " to remove.");
+ }
+ }
+ sdp_handler_->UpdateNegotiationNeeded();
+ return RTCError::OK();
+}
+
+rtc::scoped_refptr<RtpTransceiverProxyWithInternal<RtpTransceiver>>
+PeerConnection::FindTransceiverBySender(
+ rtc::scoped_refptr<RtpSenderInterface> sender) {
+ return rtp_manager()->transceivers()->FindBySender(sender);
+}
+
+RTCErrorOr<rtc::scoped_refptr<RtpTransceiverInterface>>
+PeerConnection::AddTransceiver(
+ rtc::scoped_refptr<MediaStreamTrackInterface> track) {
+ if (!ConfiguredForMedia()) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::UNSUPPORTED_OPERATION,
+ "Not configured for media");
+ }
+
+ return AddTransceiver(track, RtpTransceiverInit());
+}
+
+RtpTransportInternal* PeerConnection::GetRtpTransport(const std::string& mid) {
+ // TODO(bugs.webrtc.org/9987): Avoid the thread jump.
+ // This might be done by caching the value on the signaling thread.
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ return network_thread()->BlockingCall([this, &mid] {
+ RTC_DCHECK_RUN_ON(network_thread());
+ auto rtp_transport = transport_controller_->GetRtpTransport(mid);
+ RTC_DCHECK(rtp_transport);
+ return rtp_transport;
+ });
+}
+
+RTCErrorOr<rtc::scoped_refptr<RtpTransceiverInterface>>
+PeerConnection::AddTransceiver(
+ rtc::scoped_refptr<MediaStreamTrackInterface> track,
+ const RtpTransceiverInit& init) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ if (!ConfiguredForMedia()) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::UNSUPPORTED_OPERATION,
+ "Not configured for media");
+ }
+ RTC_CHECK(IsUnifiedPlan())
+ << "AddTransceiver is only available with Unified Plan SdpSemantics";
+ if (!track) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, "track is null");
+ }
+ cricket::MediaType media_type;
+ if (track->kind() == MediaStreamTrackInterface::kAudioKind) {
+ media_type = cricket::MEDIA_TYPE_AUDIO;
+ } else if (track->kind() == MediaStreamTrackInterface::kVideoKind) {
+ media_type = cricket::MEDIA_TYPE_VIDEO;
+ } else {
+ LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER,
+ "Track kind is not audio or video");
+ }
+ return AddTransceiver(media_type, track, init);
+}
+
+RTCErrorOr<rtc::scoped_refptr<RtpTransceiverInterface>>
+PeerConnection::AddTransceiver(cricket::MediaType media_type) {
+ return AddTransceiver(media_type, RtpTransceiverInit());
+}
+
+RTCErrorOr<rtc::scoped_refptr<RtpTransceiverInterface>>
+PeerConnection::AddTransceiver(cricket::MediaType media_type,
+ const RtpTransceiverInit& init) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ if (!ConfiguredForMedia()) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::UNSUPPORTED_OPERATION,
+ "Not configured for media");
+ }
+ RTC_CHECK(IsUnifiedPlan())
+ << "AddTransceiver is only available with Unified Plan SdpSemantics";
+ if (!(media_type == cricket::MEDIA_TYPE_AUDIO ||
+ media_type == cricket::MEDIA_TYPE_VIDEO)) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER,
+ "media type is not audio or video");
+ }
+ return AddTransceiver(media_type, nullptr, init);
+}
+
+RTCErrorOr<rtc::scoped_refptr<RtpTransceiverInterface>>
+PeerConnection::AddTransceiver(
+ cricket::MediaType media_type,
+ rtc::scoped_refptr<MediaStreamTrackInterface> track,
+ const RtpTransceiverInit& init,
+ bool update_negotiation_needed) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ if (!ConfiguredForMedia()) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::UNSUPPORTED_OPERATION,
+ "Not configured for media");
+ }
+ RTC_DCHECK((media_type == cricket::MEDIA_TYPE_AUDIO ||
+ media_type == cricket::MEDIA_TYPE_VIDEO));
+ if (track) {
+ RTC_DCHECK_EQ(media_type,
+ (track->kind() == MediaStreamTrackInterface::kAudioKind
+ ? cricket::MEDIA_TYPE_AUDIO
+ : cricket::MEDIA_TYPE_VIDEO));
+ }
+
+ RTC_HISTOGRAM_COUNTS_LINEAR(kSimulcastNumberOfEncodings,
+ init.send_encodings.size(), 0, 7, 8);
+
+ size_t num_rids = absl::c_count_if(init.send_encodings,
+ [](const RtpEncodingParameters& encoding) {
+ return !encoding.rid.empty();
+ });
+ if (num_rids > 0 && num_rids != init.send_encodings.size()) {
+ LOG_AND_RETURN_ERROR(
+ RTCErrorType::INVALID_PARAMETER,
+ "RIDs must be provided for either all or none of the send encodings.");
+ }
+
+ if (num_rids > 0 && absl::c_any_of(init.send_encodings,
+ [](const RtpEncodingParameters& encoding) {
+ return !IsLegalRsidName(encoding.rid);
+ })) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER,
+ "Invalid RID value provided.");
+ }
+
+ if (absl::c_any_of(init.send_encodings,
+ [](const RtpEncodingParameters& encoding) {
+ return encoding.ssrc.has_value();
+ })) {
+ LOG_AND_RETURN_ERROR(
+ RTCErrorType::UNSUPPORTED_PARAMETER,
+ "Attempted to set an unimplemented parameter of RtpParameters.");
+ }
+
+ RtpParameters parameters;
+ parameters.encodings = init.send_encodings;
+
+ // Encodings are dropped from the tail if too many are provided.
+ size_t max_simulcast_streams =
+ media_type == cricket::MEDIA_TYPE_VIDEO ? kMaxSimulcastStreams : 1u;
+ if (parameters.encodings.size() > max_simulcast_streams) {
+ parameters.encodings.erase(
+ parameters.encodings.begin() + max_simulcast_streams,
+ parameters.encodings.end());
+ }
+
+ // Single RID should be removed.
+ if (parameters.encodings.size() == 1 &&
+ !parameters.encodings[0].rid.empty()) {
+ RTC_LOG(LS_INFO) << "Removing RID: " << parameters.encodings[0].rid << ".";
+ parameters.encodings[0].rid.clear();
+ }
+
+ // If RIDs were not provided, they are generated for simulcast scenario.
+ if (parameters.encodings.size() > 1 && num_rids == 0) {
+ rtc::UniqueStringGenerator rid_generator;
+ for (RtpEncodingParameters& encoding : parameters.encodings) {
+ encoding.rid = rid_generator.GenerateString();
+ }
+ }
+
+ // If no encoding parameters were provided, a default entry is created.
+ if (parameters.encodings.empty()) {
+ parameters.encodings.push_back({});
+ }
+
+ if (UnimplementedRtpParameterHasValue(parameters)) {
+ LOG_AND_RETURN_ERROR(
+ RTCErrorType::UNSUPPORTED_PARAMETER,
+ "Attempted to set an unimplemented parameter of RtpParameters.");
+ }
+
+ std::vector<cricket::VideoCodec> codecs;
+ // Gather the current codec capabilities to allow checking scalabilityMode and
+ // codec selection against supported values.
+ if (media_type == cricket::MEDIA_TYPE_VIDEO) {
+ codecs = context_->media_engine()->video().send_codecs(false);
+ } else {
+ codecs = context_->media_engine()->voice().send_codecs();
+ }
+
+ auto result =
+ cricket::CheckRtpParametersValues(parameters, codecs, absl::nullopt);
+ if (!result.ok()) {
+ if (result.type() == RTCErrorType::INVALID_MODIFICATION) {
+ result.set_type(RTCErrorType::UNSUPPORTED_OPERATION);
+ }
+ LOG_AND_RETURN_ERROR(result.type(), result.message());
+ }
+
+ RTC_LOG(LS_INFO) << "Adding " << cricket::MediaTypeToString(media_type)
+ << " transceiver in response to a call to AddTransceiver.";
+ // Set the sender ID equal to the track ID if the track is specified unless
+ // that sender ID is already in use.
+ std::string sender_id = (track && !rtp_manager()->FindSenderById(track->id())
+ ? track->id()
+ : rtc::CreateRandomUuid());
+ auto sender = rtp_manager()->CreateSender(
+ media_type, sender_id, track, init.stream_ids, parameters.encodings);
+ auto receiver =
+ rtp_manager()->CreateReceiver(media_type, rtc::CreateRandomUuid());
+ auto transceiver = rtp_manager()->CreateAndAddTransceiver(sender, receiver);
+ transceiver->internal()->set_direction(init.direction);
+
+ if (update_negotiation_needed) {
+ sdp_handler_->UpdateNegotiationNeeded();
+ }
+
+ return rtc::scoped_refptr<RtpTransceiverInterface>(transceiver);
+}
+
+void PeerConnection::OnNegotiationNeeded() {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ RTC_DCHECK(!IsClosed());
+ sdp_handler_->UpdateNegotiationNeeded();
+}
+
+rtc::scoped_refptr<RtpSenderInterface> PeerConnection::CreateSender(
+ const std::string& kind,
+ const std::string& stream_id) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ if (!ConfiguredForMedia()) {
+ RTC_LOG(LS_ERROR) << "Not configured for media";
+ return nullptr;
+ }
+ RTC_CHECK(!IsUnifiedPlan()) << "CreateSender is not available with Unified "
+ "Plan SdpSemantics. Please use AddTransceiver "
+ "instead.";
+ TRACE_EVENT0("webrtc", "PeerConnection::CreateSender");
+ if (IsClosed()) {
+ return nullptr;
+ }
+
+ // Internally we need to have one stream with Plan B semantics, so we
+ // generate a random stream ID if not specified.
+ std::vector<std::string> stream_ids;
+ if (stream_id.empty()) {
+ stream_ids.push_back(rtc::CreateRandomUuid());
+ RTC_LOG(LS_INFO)
+ << "No stream_id specified for sender. Generated stream ID: "
+ << stream_ids[0];
+ } else {
+ stream_ids.push_back(stream_id);
+ }
+
+ // TODO(steveanton): Move construction of the RtpSenders to RtpTransceiver.
+ rtc::scoped_refptr<RtpSenderProxyWithInternal<RtpSenderInternal>> new_sender;
+ if (kind == MediaStreamTrackInterface::kAudioKind) {
+ auto audio_sender =
+ AudioRtpSender::Create(worker_thread(), rtc::CreateRandomUuid(),
+ legacy_stats_.get(), rtp_manager());
+ audio_sender->SetMediaChannel(rtp_manager()->voice_media_send_channel());
+ new_sender = RtpSenderProxyWithInternal<RtpSenderInternal>::Create(
+ signaling_thread(), audio_sender);
+ rtp_manager()->GetAudioTransceiver()->internal()->AddSender(new_sender);
+ } else if (kind == MediaStreamTrackInterface::kVideoKind) {
+ auto video_sender = VideoRtpSender::Create(
+ worker_thread(), rtc::CreateRandomUuid(), rtp_manager());
+ video_sender->SetMediaChannel(rtp_manager()->video_media_send_channel());
+ new_sender = RtpSenderProxyWithInternal<RtpSenderInternal>::Create(
+ signaling_thread(), video_sender);
+ rtp_manager()->GetVideoTransceiver()->internal()->AddSender(new_sender);
+ } else {
+ RTC_LOG(LS_ERROR) << "CreateSender called with invalid kind: " << kind;
+ return nullptr;
+ }
+ new_sender->internal()->set_stream_ids(stream_ids);
+
+ return new_sender;
+}
+
+std::vector<rtc::scoped_refptr<RtpSenderInterface>> PeerConnection::GetSenders()
+ const {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ std::vector<rtc::scoped_refptr<RtpSenderInterface>> ret;
+ if (ConfiguredForMedia()) {
+ for (const auto& sender : rtp_manager()->GetSendersInternal()) {
+ ret.push_back(sender);
+ }
+ }
+ return ret;
+}
+
+std::vector<rtc::scoped_refptr<RtpReceiverInterface>>
+PeerConnection::GetReceivers() const {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ std::vector<rtc::scoped_refptr<RtpReceiverInterface>> ret;
+ if (ConfiguredForMedia()) {
+ for (const auto& receiver : rtp_manager()->GetReceiversInternal()) {
+ ret.push_back(receiver);
+ }
+ }
+ return ret;
+}
+
+std::vector<rtc::scoped_refptr<RtpTransceiverInterface>>
+PeerConnection::GetTransceivers() const {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ RTC_CHECK(IsUnifiedPlan())
+ << "GetTransceivers is only supported with Unified Plan SdpSemantics.";
+ std::vector<rtc::scoped_refptr<RtpTransceiverInterface>> all_transceivers;
+ if (ConfiguredForMedia()) {
+ for (const auto& transceiver : rtp_manager()->transceivers()->List()) {
+ all_transceivers.push_back(transceiver);
+ }
+ }
+ return all_transceivers;
+}
+
+bool PeerConnection::GetStats(StatsObserver* observer,
+ MediaStreamTrackInterface* track,
+ StatsOutputLevel level) {
+ TRACE_EVENT0("webrtc", "PeerConnection::GetStats (legacy)");
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ if (!observer) {
+ RTC_LOG(LS_ERROR) << "Legacy GetStats - observer is NULL.";
+ return false;
+ }
+
+ RTC_LOG_THREAD_BLOCK_COUNT();
+
+ legacy_stats_->UpdateStats(level);
+
+ RTC_DCHECK_BLOCK_COUNT_NO_MORE_THAN(4);
+
+ // The LegacyStatsCollector is used to tell if a track is valid because it may
+ // remember tracks that the PeerConnection previously removed.
+ if (track && !legacy_stats_->IsValidTrack(track->id())) {
+ RTC_LOG(LS_WARNING) << "Legacy GetStats is called with an invalid track: "
+ << track->id();
+ return false;
+ }
+ message_handler_.PostGetStats(observer, legacy_stats_.get(), track);
+
+ return true;
+}
+
+void PeerConnection::GetStats(RTCStatsCollectorCallback* callback) {
+ TRACE_EVENT0("webrtc", "PeerConnection::GetStats");
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ RTC_DCHECK(stats_collector_);
+ RTC_DCHECK(callback);
+ RTC_LOG_THREAD_BLOCK_COUNT();
+ stats_collector_->GetStatsReport(
+ rtc::scoped_refptr<RTCStatsCollectorCallback>(callback));
+ RTC_DCHECK_BLOCK_COUNT_NO_MORE_THAN(2);
+}
+
+void PeerConnection::GetStats(
+ rtc::scoped_refptr<RtpSenderInterface> selector,
+ rtc::scoped_refptr<RTCStatsCollectorCallback> callback) {
+ TRACE_EVENT0("webrtc", "PeerConnection::GetStats");
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ RTC_DCHECK(callback);
+ RTC_DCHECK(stats_collector_);
+ RTC_LOG_THREAD_BLOCK_COUNT();
+ rtc::scoped_refptr<RtpSenderInternal> internal_sender;
+ if (selector) {
+ for (const auto& proxy_transceiver :
+ rtp_manager()->transceivers()->List()) {
+ for (const auto& proxy_sender :
+ proxy_transceiver->internal()->senders()) {
+ if (proxy_sender == selector) {
+ internal_sender = proxy_sender->internal();
+ break;
+ }
+ }
+ if (internal_sender)
+ break;
+ }
+ }
+ // If there is no `internal_sender` then `selector` is either null or does not
+ // belong to the PeerConnection (in Plan B, senders can be removed from the
+ // PeerConnection). This means that "all the stats objects representing the
+ // selector" is an empty set. Invoking GetStatsReport() with a null selector
+ // produces an empty stats report.
+ stats_collector_->GetStatsReport(internal_sender, callback);
+ RTC_DCHECK_BLOCK_COUNT_NO_MORE_THAN(2);
+}
+
+void PeerConnection::GetStats(
+ rtc::scoped_refptr<RtpReceiverInterface> selector,
+ rtc::scoped_refptr<RTCStatsCollectorCallback> callback) {
+ TRACE_EVENT0("webrtc", "PeerConnection::GetStats");
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ RTC_DCHECK(callback);
+ RTC_DCHECK(stats_collector_);
+ RTC_LOG_THREAD_BLOCK_COUNT();
+ rtc::scoped_refptr<RtpReceiverInternal> internal_receiver;
+ if (selector) {
+ for (const auto& proxy_transceiver :
+ rtp_manager()->transceivers()->List()) {
+ for (const auto& proxy_receiver :
+ proxy_transceiver->internal()->receivers()) {
+ if (proxy_receiver == selector) {
+ internal_receiver = proxy_receiver->internal();
+ break;
+ }
+ }
+ if (internal_receiver)
+ break;
+ }
+ }
+ // If there is no `internal_receiver` then `selector` is either null or does
+ // not belong to the PeerConnection (in Plan B, receivers can be removed from
+ // the PeerConnection). This means that "all the stats objects representing
+ // the selector" is an empty set. Invoking GetStatsReport() with a null
+ // selector produces an empty stats report.
+ stats_collector_->GetStatsReport(internal_receiver, callback);
+ RTC_DCHECK_BLOCK_COUNT_NO_MORE_THAN(2);
+}
+
+PeerConnectionInterface::SignalingState PeerConnection::signaling_state() {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ return sdp_handler_->signaling_state();
+}
+
+PeerConnectionInterface::IceConnectionState
+PeerConnection::ice_connection_state() {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ return ice_connection_state_;
+}
+
+PeerConnectionInterface::IceConnectionState
+PeerConnection::standardized_ice_connection_state() {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ return standardized_ice_connection_state_;
+}
+
+PeerConnectionInterface::PeerConnectionState
+PeerConnection::peer_connection_state() {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ return connection_state_;
+}
+
+PeerConnectionInterface::IceGatheringState
+PeerConnection::ice_gathering_state() {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ return ice_gathering_state_;
+}
+
+absl::optional<bool> PeerConnection::can_trickle_ice_candidates() {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ const SessionDescriptionInterface* description = current_remote_description();
+ if (!description) {
+ description = pending_remote_description();
+ }
+ if (!description) {
+ return absl::nullopt;
+ }
+ // TODO(bugs.webrtc.org/7443): Change to retrieve from session-level option.
+ if (description->description()->transport_infos().size() < 1) {
+ return absl::nullopt;
+ }
+ return description->description()->transport_infos()[0].description.HasOption(
+ "trickle");
+}
+
+RTCErrorOr<rtc::scoped_refptr<DataChannelInterface>>
+PeerConnection::CreateDataChannelOrError(const std::string& label,
+ const DataChannelInit* config) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ TRACE_EVENT0("webrtc", "PeerConnection::CreateDataChannel");
+
+ if (IsClosed()) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_STATE,
+ "CreateDataChannelOrError: PeerConnection is closed.");
+ }
+
+ bool first_datachannel = !data_channel_controller_.HasUsedDataChannels();
+
+ InternalDataChannelInit internal_config;
+ if (config) {
+ internal_config = InternalDataChannelInit(*config);
+ }
+
+ internal_config.fallback_ssl_role = sdp_handler_->GuessSslRole();
+ RTCErrorOr<rtc::scoped_refptr<DataChannelInterface>> ret =
+ data_channel_controller_.InternalCreateDataChannelWithProxy(
+ label, internal_config);
+ if (!ret.ok()) {
+ return ret.MoveError();
+ }
+
+ rtc::scoped_refptr<DataChannelInterface> channel = ret.MoveValue();
+
+ // Check the onRenegotiationNeeded event (with plan-b backward compat)
+ if (configuration_.sdp_semantics == SdpSemantics::kUnifiedPlan ||
+ (configuration_.sdp_semantics == SdpSemantics::kPlanB_DEPRECATED &&
+ first_datachannel)) {
+ sdp_handler_->UpdateNegotiationNeeded();
+ }
+ NoteUsageEvent(UsageEvent::DATA_ADDED);
+ return channel;
+}
+
+void PeerConnection::RestartIce() {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ sdp_handler_->RestartIce();
+}
+
+void PeerConnection::CreateOffer(CreateSessionDescriptionObserver* observer,
+ const RTCOfferAnswerOptions& options) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ sdp_handler_->CreateOffer(observer, options);
+}
+
+void PeerConnection::CreateAnswer(CreateSessionDescriptionObserver* observer,
+ const RTCOfferAnswerOptions& options) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ sdp_handler_->CreateAnswer(observer, options);
+}
+
+void PeerConnection::SetLocalDescription(
+ SetSessionDescriptionObserver* observer,
+ SessionDescriptionInterface* desc_ptr) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ sdp_handler_->SetLocalDescription(observer, desc_ptr);
+}
+
+void PeerConnection::SetLocalDescription(
+ std::unique_ptr<SessionDescriptionInterface> desc,
+ rtc::scoped_refptr<SetLocalDescriptionObserverInterface> observer) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ sdp_handler_->SetLocalDescription(std::move(desc), observer);
+}
+
+void PeerConnection::SetLocalDescription(
+ SetSessionDescriptionObserver* observer) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ sdp_handler_->SetLocalDescription(observer);
+}
+
+void PeerConnection::SetLocalDescription(
+ rtc::scoped_refptr<SetLocalDescriptionObserverInterface> observer) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ sdp_handler_->SetLocalDescription(observer);
+}
+
+void PeerConnection::SetRemoteDescription(
+ SetSessionDescriptionObserver* observer,
+ SessionDescriptionInterface* desc_ptr) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ sdp_handler_->SetRemoteDescription(observer, desc_ptr);
+}
+
+void PeerConnection::SetRemoteDescription(
+ std::unique_ptr<SessionDescriptionInterface> desc,
+ rtc::scoped_refptr<SetRemoteDescriptionObserverInterface> observer) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ sdp_handler_->SetRemoteDescription(std::move(desc), observer);
+}
+
+PeerConnectionInterface::RTCConfiguration PeerConnection::GetConfiguration() {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ return configuration_;
+}
+
+RTCError PeerConnection::SetConfiguration(
+ const RTCConfiguration& configuration) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ TRACE_EVENT0("webrtc", "PeerConnection::SetConfiguration");
+ if (IsClosed()) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_STATE,
+ "SetConfiguration: PeerConnection is closed.");
+ }
+
+ const bool has_local_description = local_description() != nullptr;
+
+ RTCError validate_error = ValidateIceCandidatePoolSize(
+ configuration.ice_candidate_pool_size,
+ has_local_description
+ ? absl::optional<int>(configuration_.ice_candidate_pool_size)
+ : absl::nullopt);
+ if (!validate_error.ok()) {
+ return validate_error;
+ }
+
+ if (has_local_description &&
+ configuration.crypto_options != configuration_.crypto_options) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_MODIFICATION,
+ "Can't change crypto_options after calling "
+ "SetLocalDescription.");
+ }
+
+ // Create a new, configuration object whose Ice config will have been
+ // validated..
+ RTCErrorOr<RTCConfiguration> validated_config =
+ ApplyConfiguration(configuration, configuration_);
+ if (!validated_config.ok()) {
+ return validated_config.error();
+ }
+
+ // Parse ICE servers before hopping to network thread.
+ cricket::ServerAddresses stun_servers;
+ std::vector<cricket::RelayServerConfig> turn_servers;
+ validate_error = ParseAndValidateIceServersFromConfiguration(
+ configuration, stun_servers, turn_servers, usage_pattern_);
+ if (!validate_error.ok()) {
+ return validate_error;
+ }
+
+ const RTCConfiguration& modified_config = validated_config.value();
+ const bool needs_ice_restart =
+ modified_config.servers != configuration_.servers ||
+ NeedIceRestart(
+ configuration_.surface_ice_candidates_on_ice_transport_type_changed,
+ configuration_.type, modified_config.type) ||
+ modified_config.GetTurnPortPrunePolicy() !=
+ configuration_.GetTurnPortPrunePolicy();
+ cricket::IceConfig ice_config = ParseIceConfig(modified_config);
+
+ // Apply part of the configuration on the network thread. In theory this
+ // shouldn't fail.
+ if (!network_thread()->BlockingCall(
+ [this, needs_ice_restart, &ice_config, &stun_servers, &turn_servers,
+ &modified_config, has_local_description] {
+ RTC_DCHECK_RUN_ON(network_thread());
+ // As described in JSEP, calling setConfiguration with new ICE
+ // servers or candidate policy must set a "needs-ice-restart" bit so
+ // that the next offer triggers an ICE restart which will pick up
+ // the changes.
+ if (needs_ice_restart)
+ transport_controller_->SetNeedsIceRestartFlag();
+
+ transport_controller_->SetIceConfig(ice_config);
+ transport_controller_->SetActiveResetSrtpParams(
+ modified_config.active_reset_srtp_params);
+ return ReconfigurePortAllocator_n(
+ stun_servers, turn_servers, modified_config.type,
+ modified_config.ice_candidate_pool_size,
+ modified_config.GetTurnPortPrunePolicy(),
+ modified_config.turn_customizer,
+ modified_config.stun_candidate_keepalive_interval,
+ has_local_description);
+ })) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR,
+ "Failed to apply configuration to PortAllocator.");
+ }
+
+ configuration_ = modified_config;
+ return RTCError::OK();
+}
+
+bool PeerConnection::AddIceCandidate(
+ const IceCandidateInterface* ice_candidate) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ ClearStatsCache();
+ return sdp_handler_->AddIceCandidate(ice_candidate);
+}
+
+void PeerConnection::AddIceCandidate(
+ std::unique_ptr<IceCandidateInterface> candidate,
+ std::function<void(RTCError)> callback) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ sdp_handler_->AddIceCandidate(std::move(candidate),
+ [this, callback](webrtc::RTCError result) {
+ ClearStatsCache();
+ callback(result);
+ });
+}
+
+bool PeerConnection::RemoveIceCandidates(
+ const std::vector<cricket::Candidate>& candidates) {
+ TRACE_EVENT0("webrtc", "PeerConnection::RemoveIceCandidates");
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ return sdp_handler_->RemoveIceCandidates(candidates);
+}
+
+RTCError PeerConnection::SetBitrate(const BitrateSettings& bitrate) {
+ if (!worker_thread()->IsCurrent()) {
+ return worker_thread()->BlockingCall([&]() { return SetBitrate(bitrate); });
+ }
+ RTC_DCHECK_RUN_ON(worker_thread());
+
+ const bool has_min = bitrate.min_bitrate_bps.has_value();
+ const bool has_start = bitrate.start_bitrate_bps.has_value();
+ const bool has_max = bitrate.max_bitrate_bps.has_value();
+ if (has_min && *bitrate.min_bitrate_bps < 0) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER,
+ "min_bitrate_bps <= 0");
+ }
+ if (has_start) {
+ if (has_min && *bitrate.start_bitrate_bps < *bitrate.min_bitrate_bps) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER,
+ "start_bitrate_bps < min_bitrate_bps");
+ } else if (*bitrate.start_bitrate_bps < 0) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER,
+ "curent_bitrate_bps < 0");
+ }
+ }
+ if (has_max) {
+ if (has_start && *bitrate.max_bitrate_bps < *bitrate.start_bitrate_bps) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER,
+ "max_bitrate_bps < start_bitrate_bps");
+ } else if (has_min && *bitrate.max_bitrate_bps < *bitrate.min_bitrate_bps) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER,
+ "max_bitrate_bps < min_bitrate_bps");
+ } else if (*bitrate.max_bitrate_bps < 0) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER,
+ "max_bitrate_bps < 0");
+ }
+ }
+
+ RTC_DCHECK(call_.get());
+ call_->SetClientBitratePreferences(bitrate);
+
+ return RTCError::OK();
+}
+
+void PeerConnection::SetAudioPlayout(bool playout) {
+ if (!worker_thread()->IsCurrent()) {
+ worker_thread()->BlockingCall(
+ [this, playout] { SetAudioPlayout(playout); });
+ return;
+ }
+ auto audio_state = context_->media_engine()->voice().GetAudioState();
+ audio_state->SetPlayout(playout);
+}
+
+void PeerConnection::SetAudioRecording(bool recording) {
+ if (!worker_thread()->IsCurrent()) {
+ worker_thread()->BlockingCall(
+ [this, recording] { SetAudioRecording(recording); });
+ return;
+ }
+ auto audio_state = context_->media_engine()->voice().GetAudioState();
+ audio_state->SetRecording(recording);
+}
+
+void PeerConnection::AddAdaptationResource(
+ rtc::scoped_refptr<Resource> resource) {
+ if (!worker_thread()->IsCurrent()) {
+ return worker_thread()->BlockingCall(
+ [this, resource]() { return AddAdaptationResource(resource); });
+ }
+ RTC_DCHECK_RUN_ON(worker_thread());
+ if (!call_) {
+ // The PeerConnection has been closed.
+ return;
+ }
+ call_->AddAdaptationResource(resource);
+}
+
+bool PeerConnection::ConfiguredForMedia() const {
+ return context_->media_engine();
+}
+
+bool PeerConnection::StartRtcEventLog(std::unique_ptr<RtcEventLogOutput> output,
+ int64_t output_period_ms) {
+ return worker_thread()->BlockingCall(
+ [this, output = std::move(output), output_period_ms]() mutable {
+ return StartRtcEventLog_w(std::move(output), output_period_ms);
+ });
+}
+
+bool PeerConnection::StartRtcEventLog(
+ std::unique_ptr<RtcEventLogOutput> output) {
+ int64_t output_period_ms = 5000;
+ if (trials().IsDisabled("WebRTC-RtcEventLogNewFormat")) {
+ output_period_ms = webrtc::RtcEventLog::kImmediateOutput;
+ }
+ return StartRtcEventLog(std::move(output), output_period_ms);
+}
+
+void PeerConnection::StopRtcEventLog() {
+ worker_thread()->BlockingCall([this] { StopRtcEventLog_w(); });
+}
+
+rtc::scoped_refptr<DtlsTransportInterface>
+PeerConnection::LookupDtlsTransportByMid(const std::string& mid) {
+ RTC_DCHECK_RUN_ON(network_thread());
+ return transport_controller_->LookupDtlsTransportByMid(mid);
+}
+
+rtc::scoped_refptr<DtlsTransport>
+PeerConnection::LookupDtlsTransportByMidInternal(const std::string& mid) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ // TODO(bugs.webrtc.org/9987): Avoid the thread jump.
+ // This might be done by caching the value on the signaling thread.
+ return network_thread()->BlockingCall([this, mid]() {
+ RTC_DCHECK_RUN_ON(network_thread());
+ return transport_controller_->LookupDtlsTransportByMid(mid);
+ });
+}
+
+rtc::scoped_refptr<SctpTransportInterface> PeerConnection::GetSctpTransport()
+ const {
+ RTC_DCHECK_RUN_ON(network_thread());
+ if (!sctp_mid_n_)
+ return nullptr;
+
+ return transport_controller_->GetSctpTransport(*sctp_mid_n_);
+}
+
+const SessionDescriptionInterface* PeerConnection::local_description() const {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ return sdp_handler_->local_description();
+}
+
+const SessionDescriptionInterface* PeerConnection::remote_description() const {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ return sdp_handler_->remote_description();
+}
+
+const SessionDescriptionInterface* PeerConnection::current_local_description()
+ const {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ return sdp_handler_->current_local_description();
+}
+
+const SessionDescriptionInterface* PeerConnection::current_remote_description()
+ const {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ return sdp_handler_->current_remote_description();
+}
+
+const SessionDescriptionInterface* PeerConnection::pending_local_description()
+ const {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ return sdp_handler_->pending_local_description();
+}
+
+const SessionDescriptionInterface* PeerConnection::pending_remote_description()
+ const {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ return sdp_handler_->pending_remote_description();
+}
+
+void PeerConnection::Close() {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ TRACE_EVENT0("webrtc", "PeerConnection::Close");
+
+ RTC_LOG_THREAD_BLOCK_COUNT();
+
+ if (IsClosed()) {
+ return;
+ }
+ // Update stats here so that we have the most recent stats for tracks and
+ // streams before the channels are closed.
+ legacy_stats_->UpdateStats(kStatsOutputLevelStandard);
+
+ ice_connection_state_ = PeerConnectionInterface::kIceConnectionClosed;
+ Observer()->OnIceConnectionChange(ice_connection_state_);
+ standardized_ice_connection_state_ =
+ PeerConnectionInterface::IceConnectionState::kIceConnectionClosed;
+ connection_state_ = PeerConnectionInterface::PeerConnectionState::kClosed;
+ Observer()->OnConnectionChange(connection_state_);
+
+ sdp_handler_->Close();
+
+ NoteUsageEvent(UsageEvent::CLOSE_CALLED);
+
+ if (ConfiguredForMedia()) {
+ for (const auto& transceiver : rtp_manager()->transceivers()->List()) {
+ transceiver->internal()->SetPeerConnectionClosed();
+ if (!transceiver->stopped())
+ transceiver->StopInternal();
+ }
+ }
+ // Ensure that all asynchronous stats requests are completed before destroying
+ // the transport controller below.
+ if (stats_collector_) {
+ stats_collector_->WaitForPendingRequest();
+ }
+
+ // Don't destroy BaseChannels until after stats has been cleaned up so that
+ // the last stats request can still read from the channels.
+ // TODO(tommi): The voice/video channels will be partially uninitialized on
+ // the network thread (see `RtpTransceiver::ClearChannel`), partially on the
+ // worker thread (see `PushNewMediaChannelAndDeleteChannel`) and then
+ // eventually freed on the signaling thread.
+ // It would be good to combine those steps with the teardown steps here.
+ sdp_handler_->DestroyMediaChannels();
+
+ // The event log is used in the transport controller, which must be outlived
+ // by the former. CreateOffer by the peer connection is implemented
+ // asynchronously and if the peer connection is closed without resetting the
+ // WebRTC session description factory, the session description factory would
+ // call the transport controller.
+ sdp_handler_->ResetSessionDescFactory();
+ if (ConfiguredForMedia()) {
+ rtp_manager_->Close();
+ }
+
+ network_thread()->BlockingCall([this] {
+ RTC_DCHECK_RUN_ON(network_thread());
+ TeardownDataChannelTransport_n({});
+ transport_controller_.reset();
+ port_allocator_->DiscardCandidatePool();
+ if (network_thread_safety_) {
+ network_thread_safety_->SetNotAlive();
+ }
+ });
+
+ sctp_mid_s_.reset();
+ SetSctpTransportName("");
+
+ worker_thread()->BlockingCall([this] {
+ RTC_DCHECK_RUN_ON(worker_thread());
+ worker_thread_safety_->SetNotAlive();
+ call_.reset();
+ // The event log must outlive call (and any other object that uses it).
+ event_log_.reset();
+ });
+ ReportUsagePattern();
+
+ // Signal shutdown to the sdp handler. This invalidates weak pointers for
+ // internal pending callbacks.
+ sdp_handler_->PrepareForShutdown();
+ data_channel_controller_.PrepareForShutdown();
+
+ // The .h file says that observer can be discarded after close() returns.
+ // Make sure this is true.
+ observer_ = nullptr;
+}
+
+void PeerConnection::SetIceConnectionState(IceConnectionState new_state) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ if (ice_connection_state_ == new_state) {
+ return;
+ }
+
+ // After transitioning to "closed", ignore any additional states from
+ // TransportController (such as "disconnected").
+ if (IsClosed()) {
+ return;
+ }
+
+ RTC_LOG(LS_INFO) << "Changing IceConnectionState " << ice_connection_state_
+ << " => " << new_state;
+ RTC_DCHECK(ice_connection_state_ !=
+ PeerConnectionInterface::kIceConnectionClosed);
+
+ ice_connection_state_ = new_state;
+ Observer()->OnIceConnectionChange(ice_connection_state_);
+}
+
+void PeerConnection::SetStandardizedIceConnectionState(
+ PeerConnectionInterface::IceConnectionState new_state) {
+ if (standardized_ice_connection_state_ == new_state) {
+ return;
+ }
+
+ if (IsClosed()) {
+ return;
+ }
+
+ RTC_LOG(LS_INFO) << "Changing standardized IceConnectionState "
+ << standardized_ice_connection_state_ << " => " << new_state;
+
+ standardized_ice_connection_state_ = new_state;
+ Observer()->OnStandardizedIceConnectionChange(new_state);
+}
+
+void PeerConnection::SetConnectionState(
+ PeerConnectionInterface::PeerConnectionState new_state) {
+ if (connection_state_ == new_state)
+ return;
+ if (IsClosed())
+ return;
+ connection_state_ = new_state;
+ Observer()->OnConnectionChange(new_state);
+
+ // The first connection state change to connected happens once per
+ // connection which makes it a good point to report metrics.
+ if (new_state == PeerConnectionState::kConnected && !was_ever_connected_) {
+ was_ever_connected_ = true;
+ ReportFirstConnectUsageMetrics();
+ }
+}
+
+void PeerConnection::ReportFirstConnectUsageMetrics() {
+ // Record bundle-policy from configuration. Done here from
+ // connectionStateChange to limit to actually established connections.
+ BundlePolicyUsage policy = kBundlePolicyUsageMax;
+ switch (configuration_.bundle_policy) {
+ case kBundlePolicyBalanced:
+ policy = kBundlePolicyUsageBalanced;
+ break;
+ case kBundlePolicyMaxBundle:
+ policy = kBundlePolicyUsageMaxBundle;
+ break;
+ case kBundlePolicyMaxCompat:
+ policy = kBundlePolicyUsageMaxCompat;
+ break;
+ }
+ RTC_HISTOGRAM_ENUMERATION("WebRTC.PeerConnection.BundlePolicy", policy,
+ kBundlePolicyUsageMax);
+
+ // Record whether there was a local or remote provisional answer.
+ ProvisionalAnswerUsage pranswer = kProvisionalAnswerNotUsed;
+ if (local_description()->GetType() == SdpType::kPrAnswer) {
+ pranswer = kProvisionalAnswerLocal;
+ } else if (remote_description()->GetType() == SdpType::kPrAnswer) {
+ pranswer = kProvisionalAnswerRemote;
+ }
+ RTC_HISTOGRAM_ENUMERATION("WebRTC.PeerConnection.ProvisionalAnswer", pranswer,
+ kProvisionalAnswerMax);
+
+ // Record the number of valid / invalid ice-ufrag. We do allow certain
+ // non-spec ice-char for backward-compat reasons. At this point we know
+ // that the ufrag/pwd consists of a valid ice-char or one of the four
+ // not allowed characters since we have passed the IsIceChar check done
+ // by the p2p transport description on setRemoteDescription calls.
+ auto transport_infos = remote_description()->description()->transport_infos();
+ if (transport_infos.size() > 0) {
+ auto ice_parameters = transport_infos[0].description.GetIceParameters();
+ auto is_invalid_char = [](char c) {
+ return c == '-' || c == '=' || c == '#' || c == '_';
+ };
+ bool isUsingInvalidIceCharInUfrag =
+ absl::c_any_of(ice_parameters.ufrag, is_invalid_char);
+ bool isUsingInvalidIceCharInPwd =
+ absl::c_any_of(ice_parameters.pwd, is_invalid_char);
+ RTC_HISTOGRAM_BOOLEAN(
+ "WebRTC.PeerConnection.ValidIceChars",
+ !(isUsingInvalidIceCharInUfrag || isUsingInvalidIceCharInPwd));
+ }
+
+ // Record RtcpMuxPolicy setting.
+ RtcpMuxPolicyUsage rtcp_mux_policy = kRtcpMuxPolicyUsageMax;
+ switch (configuration_.rtcp_mux_policy) {
+ case kRtcpMuxPolicyNegotiate:
+ rtcp_mux_policy = kRtcpMuxPolicyUsageNegotiate;
+ break;
+ case kRtcpMuxPolicyRequire:
+ rtcp_mux_policy = kRtcpMuxPolicyUsageRequire;
+ break;
+ }
+ RTC_HISTOGRAM_ENUMERATION("WebRTC.PeerConnection.RtcpMuxPolicy",
+ rtcp_mux_policy, kRtcpMuxPolicyUsageMax);
+}
+
+void PeerConnection::OnIceGatheringChange(
+ PeerConnectionInterface::IceGatheringState new_state) {
+ if (IsClosed()) {
+ return;
+ }
+ ice_gathering_state_ = new_state;
+ Observer()->OnIceGatheringChange(ice_gathering_state_);
+}
+
+void PeerConnection::OnIceCandidate(
+ std::unique_ptr<IceCandidateInterface> candidate) {
+ if (IsClosed()) {
+ return;
+ }
+ ReportIceCandidateCollected(candidate->candidate());
+ ClearStatsCache();
+ Observer()->OnIceCandidate(candidate.get());
+}
+
+void PeerConnection::OnIceCandidateError(const std::string& address,
+ int port,
+ const std::string& url,
+ int error_code,
+ const std::string& error_text) {
+ if (IsClosed()) {
+ return;
+ }
+ Observer()->OnIceCandidateError(address, port, url, error_code, error_text);
+}
+
+void PeerConnection::OnIceCandidatesRemoved(
+ const std::vector<cricket::Candidate>& candidates) {
+ if (IsClosed()) {
+ return;
+ }
+ Observer()->OnIceCandidatesRemoved(candidates);
+}
+
+void PeerConnection::OnSelectedCandidatePairChanged(
+ const cricket::CandidatePairChangeEvent& event) {
+ if (IsClosed()) {
+ return;
+ }
+
+ if (event.selected_candidate_pair.local_candidate().type() ==
+ LOCAL_PORT_TYPE &&
+ event.selected_candidate_pair.remote_candidate().type() ==
+ LOCAL_PORT_TYPE) {
+ NoteUsageEvent(UsageEvent::DIRECT_CONNECTION_SELECTED);
+ }
+
+ Observer()->OnIceSelectedCandidatePairChanged(event);
+}
+
+bool PeerConnection::CreateDataChannelTransport(absl::string_view mid) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ RTC_DCHECK(!sctp_mid().has_value() || mid == sctp_mid().value());
+ RTC_LOG(LS_INFO) << "Creating data channel, mid=" << mid;
+
+ absl::optional<std::string> transport_name =
+ network_thread()->BlockingCall([&] {
+ RTC_DCHECK_RUN_ON(network_thread());
+ return SetupDataChannelTransport_n(mid);
+ });
+ if (!transport_name)
+ return false;
+
+ sctp_mid_s_ = std::string(mid);
+ SetSctpTransportName(transport_name.value());
+
+ return true;
+}
+
+void PeerConnection::DestroyDataChannelTransport(RTCError error) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ network_thread()->BlockingCall([&] {
+ RTC_DCHECK_RUN_ON(network_thread());
+ TeardownDataChannelTransport_n(error);
+ });
+ sctp_mid_s_.reset();
+ SetSctpTransportName("");
+}
+
+void PeerConnection::OnSctpDataChannelStateChanged(
+ int channel_id,
+ DataChannelInterface::DataState state) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ if (stats_collector_)
+ stats_collector_->OnSctpDataChannelStateChanged(channel_id, state);
+}
+
+PeerConnection::InitializePortAllocatorResult
+PeerConnection::InitializePortAllocator_n(
+ const cricket::ServerAddresses& stun_servers,
+ const std::vector<cricket::RelayServerConfig>& turn_servers,
+ const RTCConfiguration& configuration) {
+ RTC_DCHECK_RUN_ON(network_thread());
+
+ port_allocator_->Initialize();
+ // To handle both internal and externally created port allocator, we will
+ // enable BUNDLE here.
+ int port_allocator_flags = port_allocator_->flags();
+ port_allocator_flags |= cricket::PORTALLOCATOR_ENABLE_SHARED_SOCKET |
+ cricket::PORTALLOCATOR_ENABLE_IPV6 |
+ cricket::PORTALLOCATOR_ENABLE_IPV6_ON_WIFI;
+ if (trials().IsDisabled("WebRTC-IPv6Default")) {
+ port_allocator_flags &= ~(cricket::PORTALLOCATOR_ENABLE_IPV6);
+ }
+ if (configuration.disable_ipv6_on_wifi) {
+ port_allocator_flags &= ~(cricket::PORTALLOCATOR_ENABLE_IPV6_ON_WIFI);
+ RTC_LOG(LS_INFO) << "IPv6 candidates on Wi-Fi are disabled.";
+ }
+
+ if (configuration.tcp_candidate_policy == kTcpCandidatePolicyDisabled) {
+ port_allocator_flags |= cricket::PORTALLOCATOR_DISABLE_TCP;
+ RTC_LOG(LS_INFO) << "TCP candidates are disabled.";
+ }
+
+ if (configuration.candidate_network_policy ==
+ kCandidateNetworkPolicyLowCost) {
+ port_allocator_flags |= cricket::PORTALLOCATOR_DISABLE_COSTLY_NETWORKS;
+ RTC_LOG(LS_INFO) << "Do not gather candidates on high-cost networks";
+ }
+
+ if (configuration.disable_link_local_networks) {
+ port_allocator_flags |= cricket::PORTALLOCATOR_DISABLE_LINK_LOCAL_NETWORKS;
+ RTC_LOG(LS_INFO) << "Disable candidates on link-local network interfaces.";
+ }
+
+ port_allocator_->set_flags(port_allocator_flags);
+ // No step delay is used while allocating ports.
+ port_allocator_->set_step_delay(cricket::kMinimumStepDelay);
+ port_allocator_->SetCandidateFilter(
+ ConvertIceTransportTypeToCandidateFilter(configuration.type));
+ port_allocator_->set_max_ipv6_networks(configuration.max_ipv6_networks);
+
+ auto turn_servers_copy = turn_servers;
+ for (auto& turn_server : turn_servers_copy) {
+ turn_server.tls_cert_verifier = tls_cert_verifier_.get();
+ }
+ // Call this last since it may create pooled allocator sessions using the
+ // properties set above.
+ port_allocator_->SetConfiguration(
+ stun_servers, std::move(turn_servers_copy),
+ configuration.ice_candidate_pool_size,
+ configuration.GetTurnPortPrunePolicy(), configuration.turn_customizer,
+ configuration.stun_candidate_keepalive_interval);
+
+ InitializePortAllocatorResult res;
+ res.enable_ipv6 = port_allocator_flags & cricket::PORTALLOCATOR_ENABLE_IPV6;
+ return res;
+}
+
+bool PeerConnection::ReconfigurePortAllocator_n(
+ const cricket::ServerAddresses& stun_servers,
+ const std::vector<cricket::RelayServerConfig>& turn_servers,
+ IceTransportsType type,
+ int candidate_pool_size,
+ PortPrunePolicy turn_port_prune_policy,
+ webrtc::TurnCustomizer* turn_customizer,
+ absl::optional<int> stun_candidate_keepalive_interval,
+ bool have_local_description) {
+ RTC_DCHECK_RUN_ON(network_thread());
+ port_allocator_->SetCandidateFilter(
+ ConvertIceTransportTypeToCandidateFilter(type));
+ // Add the custom tls turn servers if they exist.
+ auto turn_servers_copy = turn_servers;
+ for (auto& turn_server : turn_servers_copy) {
+ turn_server.tls_cert_verifier = tls_cert_verifier_.get();
+ }
+ // Call this last since it may create pooled allocator sessions using the
+ // candidate filter set above.
+ return port_allocator_->SetConfiguration(
+ stun_servers, std::move(turn_servers_copy), candidate_pool_size,
+ turn_port_prune_policy, turn_customizer,
+ stun_candidate_keepalive_interval);
+}
+
+bool PeerConnection::StartRtcEventLog_w(
+ std::unique_ptr<RtcEventLogOutput> output,
+ int64_t output_period_ms) {
+ RTC_DCHECK_RUN_ON(worker_thread());
+ if (!event_log_) {
+ return false;
+ }
+ return event_log_->StartLogging(std::move(output), output_period_ms);
+}
+
+void PeerConnection::StopRtcEventLog_w() {
+ RTC_DCHECK_RUN_ON(worker_thread());
+ if (event_log_) {
+ event_log_->StopLogging();
+ }
+}
+
+absl::optional<rtc::SSLRole> PeerConnection::GetSctpSslRole_n() {
+ RTC_DCHECK_RUN_ON(network_thread());
+ return sctp_mid_n_ ? transport_controller_->GetDtlsRole(*sctp_mid_n_)
+ : absl::nullopt;
+}
+
+bool PeerConnection::GetSslRole(const std::string& content_name,
+ rtc::SSLRole* role) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ if (!local_description() || !remote_description()) {
+ RTC_LOG(LS_INFO)
+ << "Local and Remote descriptions must be applied to get the "
+ "SSL Role of the session.";
+ return false;
+ }
+
+ auto dtls_role = network_thread()->BlockingCall([this, content_name]() {
+ RTC_DCHECK_RUN_ON(network_thread());
+ return transport_controller_->GetDtlsRole(content_name);
+ });
+ if (dtls_role) {
+ *role = *dtls_role;
+ return true;
+ }
+ return false;
+}
+
+bool PeerConnection::GetTransportDescription(
+ const SessionDescription* description,
+ const std::string& content_name,
+ cricket::TransportDescription* tdesc) {
+ if (!description || !tdesc) {
+ return false;
+ }
+ const TransportInfo* transport_info =
+ description->GetTransportInfoByName(content_name);
+ if (!transport_info) {
+ return false;
+ }
+ *tdesc = transport_info->description;
+ return true;
+}
+
+std::vector<DataChannelStats> PeerConnection::GetDataChannelStats() const {
+ RTC_DCHECK_RUN_ON(network_thread());
+ return data_channel_controller_.GetDataChannelStats();
+}
+
+absl::optional<std::string> PeerConnection::sctp_transport_name() const {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ if (sctp_mid_s_ && transport_controller_copy_)
+ return sctp_transport_name_s_;
+ return absl::optional<std::string>();
+}
+
+void PeerConnection::SetSctpTransportName(std::string sctp_transport_name) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ sctp_transport_name_s_ = std::move(sctp_transport_name);
+ ClearStatsCache();
+}
+
+absl::optional<std::string> PeerConnection::sctp_mid() const {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ return sctp_mid_s_;
+}
+
+cricket::CandidateStatsList PeerConnection::GetPooledCandidateStats() const {
+ RTC_DCHECK_RUN_ON(network_thread());
+ if (!network_thread_safety_->alive())
+ return {};
+ cricket::CandidateStatsList candidate_stats_list;
+ port_allocator_->GetCandidateStatsFromPooledSessions(&candidate_stats_list);
+ return candidate_stats_list;
+}
+
+std::map<std::string, cricket::TransportStats>
+PeerConnection::GetTransportStatsByNames(
+ const std::set<std::string>& transport_names) {
+ TRACE_EVENT0("webrtc", "PeerConnection::GetTransportStatsByNames");
+ RTC_DCHECK_RUN_ON(network_thread());
+ if (!network_thread_safety_->alive())
+ return {};
+
+ rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
+ std::map<std::string, cricket::TransportStats> transport_stats_by_name;
+ for (const std::string& transport_name : transport_names) {
+ cricket::TransportStats transport_stats;
+ bool success =
+ transport_controller_->GetStats(transport_name, &transport_stats);
+ if (success) {
+ transport_stats_by_name[transport_name] = std::move(transport_stats);
+ } else {
+ RTC_LOG(LS_ERROR) << "Failed to get transport stats for transport_name="
+ << transport_name;
+ }
+ }
+ return transport_stats_by_name;
+}
+
+bool PeerConnection::GetLocalCertificate(
+ const std::string& transport_name,
+ rtc::scoped_refptr<rtc::RTCCertificate>* certificate) {
+ RTC_DCHECK_RUN_ON(network_thread());
+ if (!network_thread_safety_->alive() || !certificate) {
+ return false;
+ }
+ *certificate = transport_controller_->GetLocalCertificate(transport_name);
+ return *certificate != nullptr;
+}
+
+std::unique_ptr<rtc::SSLCertChain> PeerConnection::GetRemoteSSLCertChain(
+ const std::string& transport_name) {
+ RTC_DCHECK_RUN_ON(network_thread());
+ return transport_controller_->GetRemoteSSLCertChain(transport_name);
+}
+
+bool PeerConnection::IceRestartPending(const std::string& content_name) const {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ return sdp_handler_->IceRestartPending(content_name);
+}
+
+bool PeerConnection::NeedsIceRestart(const std::string& content_name) const {
+ return network_thread()->BlockingCall([this, &content_name] {
+ RTC_DCHECK_RUN_ON(network_thread());
+ return transport_controller_->NeedsIceRestart(content_name);
+ });
+}
+
+void PeerConnection::OnTransportControllerConnectionState(
+ cricket::IceConnectionState state) {
+ switch (state) {
+ case cricket::kIceConnectionConnecting:
+ // If the current state is Connected or Completed, then there were
+ // writable channels but now there are not, so the next state must
+ // be Disconnected.
+ // kIceConnectionConnecting is currently used as the default,
+ // un-connected state by the TransportController, so its only use is
+ // detecting disconnections.
+ if (ice_connection_state_ ==
+ PeerConnectionInterface::kIceConnectionConnected ||
+ ice_connection_state_ ==
+ PeerConnectionInterface::kIceConnectionCompleted) {
+ SetIceConnectionState(
+ PeerConnectionInterface::kIceConnectionDisconnected);
+ }
+ break;
+ case cricket::kIceConnectionFailed:
+ SetIceConnectionState(PeerConnectionInterface::kIceConnectionFailed);
+ break;
+ case cricket::kIceConnectionConnected:
+ RTC_LOG(LS_INFO) << "Changing to ICE connected state because "
+ "all transports are writable.";
+ {
+ std::vector<RtpTransceiverProxyRefPtr> transceivers;
+ if (ConfiguredForMedia()) {
+ transceivers = rtp_manager()->transceivers()->List();
+ }
+
+ network_thread()->PostTask(
+ SafeTask(network_thread_safety_,
+ [this, transceivers = std::move(transceivers)] {
+ RTC_DCHECK_RUN_ON(network_thread());
+ ReportTransportStats(std::move(transceivers));
+ }));
+ }
+
+ SetIceConnectionState(PeerConnectionInterface::kIceConnectionConnected);
+ NoteUsageEvent(UsageEvent::ICE_STATE_CONNECTED);
+ break;
+ case cricket::kIceConnectionCompleted:
+ RTC_LOG(LS_INFO) << "Changing to ICE completed state because "
+ "all transports are complete.";
+ if (ice_connection_state_ !=
+ PeerConnectionInterface::kIceConnectionConnected) {
+ // If jumping directly from "checking" to "connected",
+ // signal "connected" first.
+ SetIceConnectionState(PeerConnectionInterface::kIceConnectionConnected);
+ }
+ SetIceConnectionState(PeerConnectionInterface::kIceConnectionCompleted);
+
+ NoteUsageEvent(UsageEvent::ICE_STATE_CONNECTED);
+ break;
+ default:
+ RTC_DCHECK_NOTREACHED();
+ }
+}
+
+void PeerConnection::OnTransportControllerCandidatesGathered(
+ const std::string& transport_name,
+ const cricket::Candidates& candidates) {
+ // TODO(bugs.webrtc.org/12427): Expect this to come in on the network thread
+ // (not signaling as it currently does), handle appropriately.
+ int sdp_mline_index;
+ if (!GetLocalCandidateMediaIndex(transport_name, &sdp_mline_index)) {
+ RTC_LOG(LS_ERROR)
+ << "OnTransportControllerCandidatesGathered: content name "
+ << transport_name << " not found";
+ return;
+ }
+
+ for (cricket::Candidates::const_iterator citer = candidates.begin();
+ citer != candidates.end(); ++citer) {
+ // Use transport_name as the candidate media id.
+ std::unique_ptr<JsepIceCandidate> candidate(
+ new JsepIceCandidate(transport_name, sdp_mline_index, *citer));
+ sdp_handler_->AddLocalIceCandidate(candidate.get());
+ OnIceCandidate(std::move(candidate));
+ }
+}
+
+void PeerConnection::OnTransportControllerCandidateError(
+ const cricket::IceCandidateErrorEvent& event) {
+ OnIceCandidateError(event.address, event.port, event.url, event.error_code,
+ event.error_text);
+}
+
+void PeerConnection::OnTransportControllerCandidatesRemoved(
+ const std::vector<cricket::Candidate>& candidates) {
+ // Sanity check.
+ for (const cricket::Candidate& candidate : candidates) {
+ if (candidate.transport_name().empty()) {
+ RTC_LOG(LS_ERROR) << "OnTransportControllerCandidatesRemoved: "
+ "empty content name in candidate "
+ << candidate.ToString();
+ return;
+ }
+ }
+ sdp_handler_->RemoveLocalIceCandidates(candidates);
+ OnIceCandidatesRemoved(candidates);
+}
+
+void PeerConnection::OnTransportControllerCandidateChanged(
+ const cricket::CandidatePairChangeEvent& event) {
+ OnSelectedCandidatePairChanged(event);
+}
+
+void PeerConnection::OnTransportControllerDtlsHandshakeError(
+ rtc::SSLHandshakeError error) {
+ RTC_HISTOGRAM_ENUMERATION(
+ "WebRTC.PeerConnection.DtlsHandshakeError", static_cast<int>(error),
+ static_cast<int>(rtc::SSLHandshakeError::MAX_VALUE));
+}
+
+// Returns the media index for a local ice candidate given the content name.
+bool PeerConnection::GetLocalCandidateMediaIndex(
+ const std::string& content_name,
+ int* sdp_mline_index) {
+ if (!local_description() || !sdp_mline_index) {
+ return false;
+ }
+
+ bool content_found = false;
+ const ContentInfos& contents = local_description()->description()->contents();
+ for (size_t index = 0; index < contents.size(); ++index) {
+ if (contents[index].name == content_name) {
+ *sdp_mline_index = static_cast<int>(index);
+ content_found = true;
+ break;
+ }
+ }
+ return content_found;
+}
+
+Call::Stats PeerConnection::GetCallStats() {
+ if (!worker_thread()->IsCurrent()) {
+ return worker_thread()->BlockingCall([this] { return GetCallStats(); });
+ }
+ RTC_DCHECK_RUN_ON(worker_thread());
+ rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
+ if (call_) {
+ return call_->GetStats();
+ } else {
+ return Call::Stats();
+ }
+}
+
+absl::optional<AudioDeviceModule::Stats> PeerConnection::GetAudioDeviceStats() {
+ if (context_->media_engine()) {
+ return context_->media_engine()->voice().GetAudioDeviceStats();
+ }
+ return absl::nullopt;
+}
+
+absl::optional<std::string> PeerConnection::SetupDataChannelTransport_n(
+ absl::string_view mid) {
+ sctp_mid_n_ = std::string(mid);
+ DataChannelTransportInterface* transport =
+ transport_controller_->GetDataChannelTransport(*sctp_mid_n_);
+ if (!transport) {
+ RTC_LOG(LS_ERROR)
+ << "Data channel transport is not available for data channels, mid="
+ << mid;
+ sctp_mid_n_ = absl::nullopt;
+ return absl::nullopt;
+ }
+
+ absl::optional<std::string> transport_name;
+ cricket::DtlsTransportInternal* dtls_transport =
+ transport_controller_->GetDtlsTransport(*sctp_mid_n_);
+ if (dtls_transport) {
+ transport_name = dtls_transport->transport_name();
+ } else {
+ // Make sure we still set a valid string.
+ transport_name = std::string("");
+ }
+
+ data_channel_controller_.SetupDataChannelTransport_n(transport);
+
+ return transport_name;
+}
+
+void PeerConnection::TeardownDataChannelTransport_n(RTCError error) {
+ if (sctp_mid_n_) {
+ // `sctp_mid_` may still be active through an SCTP transport. If not, unset
+ // it.
+ RTC_LOG(LS_INFO) << "Tearing down data channel transport for mid="
+ << *sctp_mid_n_;
+ sctp_mid_n_.reset();
+ }
+
+ data_channel_controller_.TeardownDataChannelTransport_n(error);
+}
+
+// Returns false if bundle is enabled and rtcp_mux is disabled.
+bool PeerConnection::ValidateBundleSettings(
+ const SessionDescription* desc,
+ const std::map<std::string, const cricket::ContentGroup*>&
+ bundle_groups_by_mid) {
+ if (bundle_groups_by_mid.empty())
+ return true;
+
+ const cricket::ContentInfos& contents = desc->contents();
+ for (cricket::ContentInfos::const_iterator citer = contents.begin();
+ citer != contents.end(); ++citer) {
+ const cricket::ContentInfo* content = (&*citer);
+ RTC_DCHECK(content != NULL);
+ auto it = bundle_groups_by_mid.find(content->name);
+ if (it != bundle_groups_by_mid.end() &&
+ !(content->rejected || content->bundle_only) &&
+ content->type == MediaProtocolType::kRtp) {
+ if (!HasRtcpMuxEnabled(content))
+ return false;
+ }
+ }
+ // RTCP-MUX is enabled in all the contents.
+ return true;
+}
+
+void PeerConnection::ReportSdpBundleUsage(
+ const SessionDescriptionInterface& remote_description) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+
+ bool using_bundle =
+ remote_description.description()->HasGroup(cricket::GROUP_TYPE_BUNDLE);
+ int num_audio_mlines = 0;
+ int num_video_mlines = 0;
+ int num_data_mlines = 0;
+ for (const ContentInfo& content :
+ remote_description.description()->contents()) {
+ cricket::MediaType media_type = content.media_description()->type();
+ if (media_type == cricket::MEDIA_TYPE_AUDIO) {
+ num_audio_mlines += 1;
+ } else if (media_type == cricket::MEDIA_TYPE_VIDEO) {
+ num_video_mlines += 1;
+ } else if (media_type == cricket::MEDIA_TYPE_DATA) {
+ num_data_mlines += 1;
+ }
+ }
+ bool simple = num_audio_mlines <= 1 && num_video_mlines <= 1;
+ BundleUsage usage = kBundleUsageMax;
+ if (num_audio_mlines == 0 && num_video_mlines == 0) {
+ if (num_data_mlines > 0) {
+ usage = using_bundle ? kBundleUsageBundleDatachannelOnly
+ : kBundleUsageNoBundleDatachannelOnly;
+ } else {
+ usage = kBundleUsageEmpty;
+ }
+ } else if (configuration_.sdp_semantics == SdpSemantics::kPlanB_DEPRECATED) {
+ // In plan-b, simple/complex usage will not show up in the number of
+ // m-lines or BUNDLE.
+ usage = using_bundle ? kBundleUsageBundlePlanB : kBundleUsageNoBundlePlanB;
+ } else {
+ if (simple) {
+ usage =
+ using_bundle ? kBundleUsageBundleSimple : kBundleUsageNoBundleSimple;
+ } else {
+ usage = using_bundle ? kBundleUsageBundleComplex
+ : kBundleUsageNoBundleComplex;
+ }
+ }
+ RTC_HISTOGRAM_ENUMERATION("WebRTC.PeerConnection.BundleUsage", usage,
+ kBundleUsageMax);
+}
+
+void PeerConnection::ReportIceCandidateCollected(
+ const cricket::Candidate& candidate) {
+ NoteUsageEvent(UsageEvent::CANDIDATE_COLLECTED);
+ if (candidate.address().IsPrivateIP()) {
+ NoteUsageEvent(UsageEvent::PRIVATE_CANDIDATE_COLLECTED);
+ }
+ if (candidate.address().IsUnresolvedIP()) {
+ NoteUsageEvent(UsageEvent::MDNS_CANDIDATE_COLLECTED);
+ }
+ if (candidate.address().family() == AF_INET6) {
+ NoteUsageEvent(UsageEvent::IPV6_CANDIDATE_COLLECTED);
+ }
+}
+
+void PeerConnection::NoteUsageEvent(UsageEvent event) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ usage_pattern_.NoteUsageEvent(event);
+}
+
+// Asynchronously adds remote candidates on the network thread.
+void PeerConnection::AddRemoteCandidate(const std::string& mid,
+ const cricket::Candidate& candidate) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+
+ if (candidate.network_type() != rtc::ADAPTER_TYPE_UNKNOWN) {
+ RTC_DLOG(LS_WARNING) << "Using candidate with adapter type set - this "
+ "should only happen in test";
+ }
+
+ // Clear fields that do not make sense as remote candidates.
+ cricket::Candidate new_candidate(candidate);
+ new_candidate.set_network_type(rtc::ADAPTER_TYPE_UNKNOWN);
+ new_candidate.set_relay_protocol("");
+ new_candidate.set_underlying_type_for_vpn(rtc::ADAPTER_TYPE_UNKNOWN);
+
+ network_thread()->PostTask(SafeTask(
+ network_thread_safety_, [this, mid = mid, candidate = new_candidate] {
+ RTC_DCHECK_RUN_ON(network_thread());
+ std::vector<cricket::Candidate> candidates = {candidate};
+ RTCError error =
+ transport_controller_->AddRemoteCandidates(mid, candidates);
+ if (error.ok()) {
+ signaling_thread()->PostTask(SafeTask(
+ signaling_thread_safety_.flag(),
+ [this, candidate = std::move(candidate)] {
+ ReportRemoteIceCandidateAdded(candidate);
+ // Candidates successfully submitted for checking.
+ if (ice_connection_state() ==
+ PeerConnectionInterface::kIceConnectionNew ||
+ ice_connection_state() ==
+ PeerConnectionInterface::kIceConnectionDisconnected) {
+ // If state is New, then the session has just gotten its first
+ // remote ICE candidates, so go to Checking. If state is
+ // Disconnected, the session is re-using old candidates or
+ // receiving additional ones, so go to Checking. If state is
+ // Connected, stay Connected.
+ // TODO(bemasc): If state is Connected, and the new candidates
+ // are for a newly added transport, then the state actually
+ // _should_ move to checking. Add a way to distinguish that
+ // case.
+ SetIceConnectionState(
+ PeerConnectionInterface::kIceConnectionChecking);
+ }
+ // TODO(bemasc): If state is Completed, go back to Connected.
+ }));
+ } else {
+ RTC_LOG(LS_WARNING) << error.message();
+ }
+ }));
+}
+
+void PeerConnection::ReportUsagePattern() const {
+ usage_pattern_.ReportUsagePattern(observer_);
+}
+
+void PeerConnection::ReportRemoteIceCandidateAdded(
+ const cricket::Candidate& candidate) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+
+ NoteUsageEvent(UsageEvent::REMOTE_CANDIDATE_ADDED);
+
+ if (candidate.address().IsPrivateIP()) {
+ NoteUsageEvent(UsageEvent::REMOTE_PRIVATE_CANDIDATE_ADDED);
+ }
+ if (candidate.address().IsUnresolvedIP()) {
+ NoteUsageEvent(UsageEvent::REMOTE_MDNS_CANDIDATE_ADDED);
+ }
+ if (candidate.address().family() == AF_INET6) {
+ NoteUsageEvent(UsageEvent::REMOTE_IPV6_CANDIDATE_ADDED);
+ }
+}
+
+bool PeerConnection::SrtpRequired() const {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ return (dtls_enabled_ ||
+ sdp_handler_->webrtc_session_desc_factory()->SdesPolicy() ==
+ cricket::SEC_REQUIRED);
+}
+
+void PeerConnection::OnTransportControllerGatheringState(
+ cricket::IceGatheringState state) {
+ RTC_DCHECK(signaling_thread()->IsCurrent());
+ if (state == cricket::kIceGatheringGathering) {
+ OnIceGatheringChange(PeerConnectionInterface::kIceGatheringGathering);
+ } else if (state == cricket::kIceGatheringComplete) {
+ OnIceGatheringChange(PeerConnectionInterface::kIceGatheringComplete);
+ } else if (state == cricket::kIceGatheringNew) {
+ OnIceGatheringChange(PeerConnectionInterface::kIceGatheringNew);
+ } else {
+ RTC_LOG(LS_ERROR) << "Unknown state received: " << state;
+ RTC_DCHECK_NOTREACHED();
+ }
+}
+
+// Runs on network_thread().
+void PeerConnection::ReportTransportStats(
+ std::vector<RtpTransceiverProxyRefPtr> transceivers) {
+ TRACE_EVENT0("webrtc", "PeerConnection::ReportTransportStats");
+ rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
+ std::map<std::string, std::set<cricket::MediaType>>
+ media_types_by_transport_name;
+ for (const auto& transceiver : transceivers) {
+ if (transceiver->internal()->channel()) {
+ std::string transport_name(
+ transceiver->internal()->channel()->transport_name());
+ media_types_by_transport_name[transport_name].insert(
+ transceiver->media_type());
+ }
+ }
+
+ if (sctp_mid_n_) {
+ cricket::DtlsTransportInternal* dtls_transport =
+ transport_controller_->GetDtlsTransport(*sctp_mid_n_);
+ if (dtls_transport) {
+ media_types_by_transport_name[dtls_transport->transport_name()].insert(
+ cricket::MEDIA_TYPE_DATA);
+ }
+ }
+
+ for (const auto& entry : media_types_by_transport_name) {
+ const std::string& transport_name = entry.first;
+ const std::set<cricket::MediaType> media_types = entry.second;
+ cricket::TransportStats stats;
+ if (transport_controller_->GetStats(transport_name, &stats)) {
+ ReportBestConnectionState(stats);
+ ReportNegotiatedCiphers(dtls_enabled_, stats, media_types);
+ }
+ }
+}
+
+// Walk through the ConnectionInfos to gather best connection usage
+// for IPv4 and IPv6.
+// static (no member state required)
+void PeerConnection::ReportBestConnectionState(
+ const cricket::TransportStats& stats) {
+ for (const cricket::TransportChannelStats& channel_stats :
+ stats.channel_stats) {
+ for (const cricket::ConnectionInfo& connection_info :
+ channel_stats.ice_transport_stats.connection_infos) {
+ if (!connection_info.best_connection) {
+ continue;
+ }
+
+ const cricket::Candidate& local = connection_info.local_candidate;
+ const cricket::Candidate& remote = connection_info.remote_candidate;
+
+ // Increment the counter for IceCandidatePairType.
+ if (local.protocol() == cricket::TCP_PROTOCOL_NAME ||
+ (local.type() == RELAY_PORT_TYPE &&
+ local.relay_protocol() == cricket::TCP_PROTOCOL_NAME)) {
+ RTC_HISTOGRAM_ENUMERATION("WebRTC.PeerConnection.CandidatePairType_TCP",
+ GetIceCandidatePairCounter(local, remote),
+ kIceCandidatePairMax);
+ } else if (local.protocol() == cricket::UDP_PROTOCOL_NAME) {
+ RTC_HISTOGRAM_ENUMERATION("WebRTC.PeerConnection.CandidatePairType_UDP",
+ GetIceCandidatePairCounter(local, remote),
+ kIceCandidatePairMax);
+ } else {
+ RTC_CHECK_NOTREACHED();
+ }
+
+ // Increment the counter for IP type.
+ if (local.address().family() == AF_INET) {
+ RTC_HISTOGRAM_ENUMERATION("WebRTC.PeerConnection.IPMetrics",
+ kBestConnections_IPv4,
+ kPeerConnectionAddressFamilyCounter_Max);
+ } else if (local.address().family() == AF_INET6) {
+ RTC_HISTOGRAM_ENUMERATION("WebRTC.PeerConnection.IPMetrics",
+ kBestConnections_IPv6,
+ kPeerConnectionAddressFamilyCounter_Max);
+ } else {
+ RTC_CHECK(!local.address().hostname().empty() &&
+ local.address().IsUnresolvedIP());
+ }
+
+ return;
+ }
+ }
+}
+
+// static
+void PeerConnection::ReportNegotiatedCiphers(
+ bool dtls_enabled,
+ const cricket::TransportStats& stats,
+ const std::set<cricket::MediaType>& media_types) {
+ if (!dtls_enabled || stats.channel_stats.empty()) {
+ return;
+ }
+
+ int srtp_crypto_suite = stats.channel_stats[0].srtp_crypto_suite;
+ int ssl_cipher_suite = stats.channel_stats[0].ssl_cipher_suite;
+ if (srtp_crypto_suite == rtc::kSrtpInvalidCryptoSuite &&
+ ssl_cipher_suite == rtc::kTlsNullWithNullNull) {
+ return;
+ }
+
+ if (ssl_cipher_suite != rtc::kTlsNullWithNullNull) {
+ for (cricket::MediaType media_type : media_types) {
+ switch (media_type) {
+ case cricket::MEDIA_TYPE_AUDIO:
+ RTC_HISTOGRAM_ENUMERATION_SPARSE(
+ "WebRTC.PeerConnection.SslCipherSuite.Audio", ssl_cipher_suite,
+ rtc::kSslCipherSuiteMaxValue);
+ break;
+ case cricket::MEDIA_TYPE_VIDEO:
+ RTC_HISTOGRAM_ENUMERATION_SPARSE(
+ "WebRTC.PeerConnection.SslCipherSuite.Video", ssl_cipher_suite,
+ rtc::kSslCipherSuiteMaxValue);
+ break;
+ case cricket::MEDIA_TYPE_DATA:
+ RTC_HISTOGRAM_ENUMERATION_SPARSE(
+ "WebRTC.PeerConnection.SslCipherSuite.Data", ssl_cipher_suite,
+ rtc::kSslCipherSuiteMaxValue);
+ break;
+ default:
+ RTC_DCHECK_NOTREACHED();
+ continue;
+ }
+ }
+ }
+
+ uint16_t ssl_peer_signature_algorithm =
+ stats.channel_stats[0].ssl_peer_signature_algorithm;
+ if (ssl_peer_signature_algorithm != rtc::kSslSignatureAlgorithmUnknown) {
+ for (cricket::MediaType media_type : media_types) {
+ switch (media_type) {
+ case cricket::MEDIA_TYPE_AUDIO:
+ RTC_HISTOGRAM_ENUMERATION_SPARSE(
+ "WebRTC.PeerConnection.SslPeerSignatureAlgorithm.Audio",
+ ssl_peer_signature_algorithm,
+ rtc::kSslSignatureAlgorithmMaxValue);
+ break;
+ case cricket::MEDIA_TYPE_VIDEO:
+ RTC_HISTOGRAM_ENUMERATION_SPARSE(
+ "WebRTC.PeerConnection.SslPeerSignatureAlgorithm.Video",
+ ssl_peer_signature_algorithm,
+ rtc::kSslSignatureAlgorithmMaxValue);
+ break;
+ case cricket::MEDIA_TYPE_DATA:
+ RTC_HISTOGRAM_ENUMERATION_SPARSE(
+ "WebRTC.PeerConnection.SslPeerSignatureAlgorithm.Data",
+ ssl_peer_signature_algorithm,
+ rtc::kSslSignatureAlgorithmMaxValue);
+ break;
+ default:
+ RTC_DCHECK_NOTREACHED();
+ continue;
+ }
+ }
+ }
+}
+
+bool PeerConnection::OnTransportChanged(
+ const std::string& mid,
+ RtpTransportInternal* rtp_transport,
+ rtc::scoped_refptr<DtlsTransport> dtls_transport,
+ DataChannelTransportInterface* data_channel_transport) {
+ RTC_DCHECK_RUN_ON(network_thread());
+ bool ret = true;
+ if (ConfiguredForMedia()) {
+ for (const auto& transceiver :
+ rtp_manager()->transceivers()->UnsafeList()) {
+ cricket::ChannelInterface* channel = transceiver->internal()->channel();
+ if (channel && channel->mid() == mid) {
+ ret = channel->SetRtpTransport(rtp_transport);
+ }
+ }
+ }
+
+ if (mid == sctp_mid_n_) {
+ data_channel_controller_.OnTransportChanged(data_channel_transport);
+ if (dtls_transport) {
+ signaling_thread()->PostTask(SafeTask(
+ signaling_thread_safety_.flag(),
+ [this,
+ name = std::string(dtls_transport->internal()->transport_name())] {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ SetSctpTransportName(std::move(name));
+ }));
+ }
+ }
+
+ return ret;
+}
+
+PeerConnectionObserver* PeerConnection::Observer() const {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ RTC_DCHECK(observer_);
+ return observer_;
+}
+
+void PeerConnection::StartSctpTransport(int local_port,
+ int remote_port,
+ int max_message_size) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ if (!sctp_mid_s_)
+ return;
+
+ network_thread()->PostTask(SafeTask(
+ network_thread_safety_,
+ [this, mid = *sctp_mid_s_, local_port, remote_port, max_message_size] {
+ rtc::scoped_refptr<SctpTransport> sctp_transport =
+ transport_controller_n()->GetSctpTransport(mid);
+ if (sctp_transport)
+ sctp_transport->Start(local_port, remote_port, max_message_size);
+ }));
+}
+
+CryptoOptions PeerConnection::GetCryptoOptions() {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ // TODO(bugs.webrtc.org/9891) - Remove PeerConnectionFactory::CryptoOptions
+ // after it has been removed.
+ return configuration_.crypto_options.has_value()
+ ? *configuration_.crypto_options
+ : options_.crypto_options;
+}
+
+void PeerConnection::ClearStatsCache() {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ if (legacy_stats_) {
+ legacy_stats_->InvalidateCache();
+ }
+ if (stats_collector_) {
+ stats_collector_->ClearCachedStatsReport();
+ }
+}
+
+bool PeerConnection::ShouldFireNegotiationNeededEvent(uint32_t event_id) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ return sdp_handler_->ShouldFireNegotiationNeededEvent(event_id);
+}
+
+void PeerConnection::RequestUsagePatternReportForTesting() {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ message_handler_.RequestUsagePatternReport(
+ [this]() {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ ReportUsagePattern();
+ },
+ /* delay_ms= */ 0);
+}
+
+std::function<void(const rtc::CopyOnWriteBuffer& packet,
+ int64_t packet_time_us)>
+PeerConnection::InitializeRtcpCallback() {
+ RTC_DCHECK_RUN_ON(network_thread());
+ return [this](const rtc::CopyOnWriteBuffer& packet,
+ int64_t /*packet_time_us*/) {
+ worker_thread()->PostTask(SafeTask(worker_thread_safety_, [this, packet]() {
+ call_ptr_->Receiver()->DeliverRtcpPacket(packet);
+ }));
+ };
+}
+
+std::function<void(const RtpPacketReceived& parsed_packet)>
+PeerConnection::InitializeUnDemuxablePacketHandler() {
+ RTC_DCHECK_RUN_ON(network_thread());
+ return [this](const RtpPacketReceived& parsed_packet) {
+ worker_thread()->PostTask(
+ SafeTask(worker_thread_safety_, [this, parsed_packet]() {
+ // Deliver the packet anyway to Call to allow Call to do BWE.
+ // Even if there is no media receiver, the packet has still
+ // been received on the network and has been correcly parsed.
+ call_ptr_->Receiver()->DeliverRtpPacket(
+ MediaType::ANY, parsed_packet,
+ /*undemuxable_packet_handler=*/
+ [](const RtpPacketReceived& packet) { return false; });
+ }));
+ };
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/peer_connection.h b/third_party/libwebrtc/pc/peer_connection.h
new file mode 100644
index 0000000000..ea1a9d9d90
--- /dev/null
+++ b/third_party/libwebrtc/pc/peer_connection.h
@@ -0,0 +1,718 @@
+/*
+ * Copyright 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_PEER_CONNECTION_H_
+#define PC_PEER_CONNECTION_H_
+
+#include <stdint.h>
+
+#include <functional>
+#include <map>
+#include <memory>
+#include <set>
+#include <string>
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "api/adaptation/resource.h"
+#include "api/async_dns_resolver.h"
+#include "api/candidate.h"
+#include "api/crypto/crypto_options.h"
+#include "api/data_channel_interface.h"
+#include "api/dtls_transport_interface.h"
+#include "api/field_trials_view.h"
+#include "api/ice_transport_interface.h"
+#include "api/jsep.h"
+#include "api/media_stream_interface.h"
+#include "api/media_types.h"
+#include "api/peer_connection_interface.h"
+#include "api/rtc_error.h"
+#include "api/rtc_event_log/rtc_event_log.h"
+#include "api/rtc_event_log_output.h"
+#include "api/rtp_receiver_interface.h"
+#include "api/rtp_sender_interface.h"
+#include "api/rtp_transceiver_interface.h"
+#include "api/scoped_refptr.h"
+#include "api/sctp_transport_interface.h"
+#include "api/sequence_checker.h"
+#include "api/set_local_description_observer_interface.h"
+#include "api/set_remote_description_observer_interface.h"
+#include "api/stats/rtc_stats_collector_callback.h"
+#include "api/task_queue/pending_task_safety_flag.h"
+#include "api/transport/bitrate_settings.h"
+#include "api/transport/data_channel_transport_interface.h"
+#include "api/transport/enums.h"
+#include "api/turn_customizer.h"
+#include "call/call.h"
+#include "modules/rtp_rtcp/source/rtp_packet_received.h"
+#include "p2p/base/ice_transport_internal.h"
+#include "p2p/base/port.h"
+#include "p2p/base/port_allocator.h"
+#include "p2p/base/transport_description.h"
+#include "pc/channel_interface.h"
+#include "pc/connection_context.h"
+#include "pc/data_channel_controller.h"
+#include "pc/data_channel_utils.h"
+#include "pc/dtls_transport.h"
+#include "pc/jsep_transport_controller.h"
+#include "pc/legacy_stats_collector.h"
+#include "pc/peer_connection_internal.h"
+#include "pc/peer_connection_message_handler.h"
+#include "pc/rtc_stats_collector.h"
+#include "pc/rtp_transceiver.h"
+#include "pc/rtp_transmission_manager.h"
+#include "pc/rtp_transport_internal.h"
+#include "pc/sctp_data_channel.h"
+#include "pc/sdp_offer_answer.h"
+#include "pc/session_description.h"
+#include "pc/transceiver_list.h"
+#include "pc/transport_stats.h"
+#include "pc/usage_pattern.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/copy_on_write_buffer.h"
+#include "rtc_base/rtc_certificate.h"
+#include "rtc_base/ssl_certificate.h"
+#include "rtc_base/ssl_stream_adapter.h"
+#include "rtc_base/thread.h"
+#include "rtc_base/thread_annotations.h"
+#include "rtc_base/weak_ptr.h"
+
+namespace webrtc {
+
+// PeerConnection is the implementation of the PeerConnection object as defined
+// by the PeerConnectionInterface API surface.
+// The class currently is solely responsible for the following:
+// - Managing the session state machine (signaling state).
+// - Creating and initializing lower-level objects, like PortAllocator and
+// BaseChannels.
+// - Owning and managing the life cycle of the RtpSender/RtpReceiver and track
+// objects.
+// - Tracking the current and pending local/remote session descriptions.
+// The class currently is jointly responsible for the following:
+// - Parsing and interpreting SDP.
+// - Generating offers and answers based on the current state.
+// - The ICE state machine.
+// - Generating stats.
+class PeerConnection : public PeerConnectionInternal,
+ public JsepTransportController::Observer {
+ public:
+ // Creates a PeerConnection and initializes it with the given values.
+ // If the initialization fails, the function releases the PeerConnection
+ // and returns nullptr.
+ //
+ // Note that the function takes ownership of dependencies, and will
+ // either use them or release them, whether it succeeds or fails.
+ static RTCErrorOr<rtc::scoped_refptr<PeerConnection>> Create(
+ rtc::scoped_refptr<ConnectionContext> context,
+ const PeerConnectionFactoryInterface::Options& options,
+ std::unique_ptr<RtcEventLog> event_log,
+ std::unique_ptr<Call> call,
+ const PeerConnectionInterface::RTCConfiguration& configuration,
+ PeerConnectionDependencies dependencies);
+
+ rtc::scoped_refptr<StreamCollectionInterface> local_streams() override;
+ rtc::scoped_refptr<StreamCollectionInterface> remote_streams() override;
+ bool AddStream(MediaStreamInterface* local_stream) override;
+ void RemoveStream(MediaStreamInterface* local_stream) override;
+
+ RTCErrorOr<rtc::scoped_refptr<RtpSenderInterface>> AddTrack(
+ rtc::scoped_refptr<MediaStreamTrackInterface> track,
+ const std::vector<std::string>& stream_ids) override;
+ RTCErrorOr<rtc::scoped_refptr<RtpSenderInterface>> AddTrack(
+ rtc::scoped_refptr<MediaStreamTrackInterface> track,
+ const std::vector<std::string>& stream_ids,
+ const std::vector<RtpEncodingParameters>& init_send_encodings) override;
+ RTCErrorOr<rtc::scoped_refptr<RtpSenderInterface>> AddTrack(
+ rtc::scoped_refptr<MediaStreamTrackInterface> track,
+ const std::vector<std::string>& stream_ids,
+ const std::vector<RtpEncodingParameters>* init_send_encodings);
+ RTCError RemoveTrackOrError(
+ rtc::scoped_refptr<RtpSenderInterface> sender) override;
+
+ RTCErrorOr<rtc::scoped_refptr<RtpTransceiverInterface>> AddTransceiver(
+ rtc::scoped_refptr<MediaStreamTrackInterface> track) override;
+ RTCErrorOr<rtc::scoped_refptr<RtpTransceiverInterface>> AddTransceiver(
+ rtc::scoped_refptr<MediaStreamTrackInterface> track,
+ const RtpTransceiverInit& init) override;
+ RTCErrorOr<rtc::scoped_refptr<RtpTransceiverInterface>> AddTransceiver(
+ cricket::MediaType media_type) override;
+ RTCErrorOr<rtc::scoped_refptr<RtpTransceiverInterface>> AddTransceiver(
+ cricket::MediaType media_type,
+ const RtpTransceiverInit& init) override;
+
+ rtc::scoped_refptr<RtpSenderInterface> CreateSender(
+ const std::string& kind,
+ const std::string& stream_id) override;
+
+ std::vector<rtc::scoped_refptr<RtpSenderInterface>> GetSenders()
+ const override;
+ std::vector<rtc::scoped_refptr<RtpReceiverInterface>> GetReceivers()
+ const override;
+ std::vector<rtc::scoped_refptr<RtpTransceiverInterface>> GetTransceivers()
+ const override;
+
+ RTCErrorOr<rtc::scoped_refptr<DataChannelInterface>> CreateDataChannelOrError(
+ const std::string& label,
+ const DataChannelInit* config) override;
+ // WARNING: LEGACY. See peerconnectioninterface.h
+ bool GetStats(StatsObserver* observer,
+ webrtc::MediaStreamTrackInterface* track,
+ StatsOutputLevel level) override;
+ // Spec-complaint GetStats(). See peerconnectioninterface.h
+ void GetStats(RTCStatsCollectorCallback* callback) override;
+ void GetStats(
+ rtc::scoped_refptr<RtpSenderInterface> selector,
+ rtc::scoped_refptr<RTCStatsCollectorCallback> callback) override;
+ void GetStats(
+ rtc::scoped_refptr<RtpReceiverInterface> selector,
+ rtc::scoped_refptr<RTCStatsCollectorCallback> callback) override;
+ void ClearStatsCache() override;
+
+ SignalingState signaling_state() override;
+
+ IceConnectionState ice_connection_state() override;
+ IceConnectionState ice_connection_state_internal() override {
+ return ice_connection_state();
+ }
+ IceConnectionState standardized_ice_connection_state() override;
+ PeerConnectionState peer_connection_state() override;
+ IceGatheringState ice_gathering_state() override;
+ absl::optional<bool> can_trickle_ice_candidates() override;
+
+ const SessionDescriptionInterface* local_description() const override;
+ const SessionDescriptionInterface* remote_description() const override;
+ const SessionDescriptionInterface* current_local_description() const override;
+ const SessionDescriptionInterface* current_remote_description()
+ const override;
+ const SessionDescriptionInterface* pending_local_description() const override;
+ const SessionDescriptionInterface* pending_remote_description()
+ const override;
+
+ void RestartIce() override;
+
+ // JSEP01
+ void CreateOffer(CreateSessionDescriptionObserver* observer,
+ const RTCOfferAnswerOptions& options) override;
+ void CreateAnswer(CreateSessionDescriptionObserver* observer,
+ const RTCOfferAnswerOptions& options) override;
+
+ void SetLocalDescription(
+ std::unique_ptr<SessionDescriptionInterface> desc,
+ rtc::scoped_refptr<SetLocalDescriptionObserverInterface> observer)
+ override;
+ void SetLocalDescription(
+ rtc::scoped_refptr<SetLocalDescriptionObserverInterface> observer)
+ override;
+ // TODO(https://crbug.com/webrtc/11798): Delete these methods in favor of the
+ // ones taking SetLocalDescriptionObserverInterface as argument.
+ void SetLocalDescription(SetSessionDescriptionObserver* observer,
+ SessionDescriptionInterface* desc) override;
+ void SetLocalDescription(SetSessionDescriptionObserver* observer) override;
+
+ void SetRemoteDescription(
+ std::unique_ptr<SessionDescriptionInterface> desc,
+ rtc::scoped_refptr<SetRemoteDescriptionObserverInterface> observer)
+ override;
+ // TODO(https://crbug.com/webrtc/11798): Delete this methods in favor of the
+ // ones taking SetRemoteDescriptionObserverInterface as argument.
+ void SetRemoteDescription(SetSessionDescriptionObserver* observer,
+ SessionDescriptionInterface* desc) override;
+
+ PeerConnectionInterface::RTCConfiguration GetConfiguration() override;
+ RTCError SetConfiguration(
+ const PeerConnectionInterface::RTCConfiguration& configuration) override;
+ bool AddIceCandidate(const IceCandidateInterface* candidate) override;
+ void AddIceCandidate(std::unique_ptr<IceCandidateInterface> candidate,
+ std::function<void(RTCError)> callback) override;
+ bool RemoveIceCandidates(
+ const std::vector<cricket::Candidate>& candidates) override;
+
+ RTCError SetBitrate(const BitrateSettings& bitrate) override;
+
+ void SetAudioPlayout(bool playout) override;
+ void SetAudioRecording(bool recording) override;
+
+ rtc::scoped_refptr<DtlsTransportInterface> LookupDtlsTransportByMid(
+ const std::string& mid) override;
+ rtc::scoped_refptr<DtlsTransport> LookupDtlsTransportByMidInternal(
+ const std::string& mid);
+
+ rtc::scoped_refptr<SctpTransportInterface> GetSctpTransport() const override;
+
+ void AddAdaptationResource(rtc::scoped_refptr<Resource> resource) override;
+
+ bool StartRtcEventLog(std::unique_ptr<RtcEventLogOutput> output,
+ int64_t output_period_ms) override;
+ bool StartRtcEventLog(std::unique_ptr<RtcEventLogOutput> output) override;
+ void StopRtcEventLog() override;
+
+ void Close() override;
+
+ rtc::Thread* signaling_thread() const final {
+ return context_->signaling_thread();
+ }
+
+ rtc::Thread* network_thread() const final {
+ return context_->network_thread();
+ }
+ rtc::Thread* worker_thread() const final { return context_->worker_thread(); }
+
+ std::string session_id() const override { return session_id_; }
+
+ bool initial_offerer() const override {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ return sdp_handler_->initial_offerer();
+ }
+
+ std::vector<
+ rtc::scoped_refptr<RtpTransceiverProxyWithInternal<RtpTransceiver>>>
+ GetTransceiversInternal() const override {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ if (!ConfiguredForMedia()) {
+ return {};
+ }
+ return rtp_manager()->transceivers()->List();
+ }
+
+ std::vector<DataChannelStats> GetDataChannelStats() const override;
+
+ absl::optional<std::string> sctp_transport_name() const override;
+ absl::optional<std::string> sctp_mid() const override;
+
+ cricket::CandidateStatsList GetPooledCandidateStats() const override;
+ std::map<std::string, cricket::TransportStats> GetTransportStatsByNames(
+ const std::set<std::string>& transport_names) override;
+ Call::Stats GetCallStats() override;
+
+ absl::optional<AudioDeviceModule::Stats> GetAudioDeviceStats() override;
+
+ bool GetLocalCertificate(
+ const std::string& transport_name,
+ rtc::scoped_refptr<rtc::RTCCertificate>* certificate) override;
+ std::unique_ptr<rtc::SSLCertChain> GetRemoteSSLCertChain(
+ const std::string& transport_name) override;
+ bool IceRestartPending(const std::string& content_name) const override;
+ bool NeedsIceRestart(const std::string& content_name) const override;
+ bool GetSslRole(const std::string& content_name, rtc::SSLRole* role) override;
+
+ // Functions needed by DataChannelController
+ void NoteDataAddedEvent() override { NoteUsageEvent(UsageEvent::DATA_ADDED); }
+ // Returns the observer. Will crash on CHECK if the observer is removed.
+ PeerConnectionObserver* Observer() const override;
+ bool IsClosed() const override {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ return !sdp_handler_ ||
+ sdp_handler_->signaling_state() == PeerConnectionInterface::kClosed;
+ }
+ // Get current SSL role used by SCTP's underlying transport.
+ absl::optional<rtc::SSLRole> GetSctpSslRole_n() override;
+
+ void OnSctpDataChannelStateChanged(
+ int channel_id,
+ DataChannelInterface::DataState state) override;
+
+ bool ShouldFireNegotiationNeededEvent(uint32_t event_id) override;
+
+ // Functions needed by SdpOfferAnswerHandler
+ LegacyStatsCollector* legacy_stats() override {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ return legacy_stats_.get();
+ }
+ DataChannelController* data_channel_controller() override {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ return &data_channel_controller_;
+ }
+ bool dtls_enabled() const override {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ return dtls_enabled_;
+ }
+ const PeerConnectionInterface::RTCConfiguration* configuration()
+ const override {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ return &configuration_;
+ }
+ PeerConnectionMessageHandler* message_handler() override {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ return &message_handler_;
+ }
+
+ RtpTransmissionManager* rtp_manager() override { return rtp_manager_.get(); }
+ const RtpTransmissionManager* rtp_manager() const override {
+ return rtp_manager_.get();
+ }
+
+ JsepTransportController* transport_controller_s() override {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ return transport_controller_copy_;
+ }
+ JsepTransportController* transport_controller_n() override {
+ RTC_DCHECK_RUN_ON(network_thread());
+ return transport_controller_.get();
+ }
+ cricket::PortAllocator* port_allocator() override {
+ return port_allocator_.get();
+ }
+ Call* call_ptr() override { return call_ptr_; }
+
+ ConnectionContext* context() { return context_.get(); }
+ const PeerConnectionFactoryInterface::Options* options() const override {
+ return &options_;
+ }
+ void SetIceConnectionState(IceConnectionState new_state) override;
+ void NoteUsageEvent(UsageEvent event) override;
+
+ // Asynchronously adds a remote candidate on the network thread.
+ void AddRemoteCandidate(const std::string& mid,
+ const cricket::Candidate& candidate) override;
+
+ // Report the UMA metric BundleUsage for the given remote description.
+ void ReportSdpBundleUsage(
+ const SessionDescriptionInterface& remote_description) override;
+
+ // Report several UMA metrics on establishing the connection.
+ void ReportFirstConnectUsageMetrics() RTC_RUN_ON(signaling_thread());
+
+ // Returns true if the PeerConnection is configured to use Unified Plan
+ // semantics for creating offers/answers and setting local/remote
+ // descriptions. If this is true the RtpTransceiver API will also be available
+ // to the user. If this is false, Plan B semantics are assumed.
+ // TODO(bugs.webrtc.org/8530): Flip the default to be Unified Plan once
+ // sufficient time has passed.
+ bool IsUnifiedPlan() const override {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ return is_unified_plan_;
+ }
+ bool ValidateBundleSettings(
+ const cricket::SessionDescription* desc,
+ const std::map<std::string, const cricket::ContentGroup*>&
+ bundle_groups_by_mid) override;
+
+ bool CreateDataChannelTransport(absl::string_view mid) override;
+ void DestroyDataChannelTransport(RTCError error) override;
+
+ // Asynchronously calls SctpTransport::Start() on the network thread for
+ // `sctp_mid()` if set. Called as part of setting the local description.
+ void StartSctpTransport(int local_port,
+ int remote_port,
+ int max_message_size) override;
+
+ // Returns the CryptoOptions for this PeerConnection. This will always
+ // return the RTCConfiguration.crypto_options if set and will only default
+ // back to the PeerConnectionFactory settings if nothing was set.
+ CryptoOptions GetCryptoOptions() override;
+
+ // Internal implementation for AddTransceiver family of methods. If
+ // `fire_callback` is set, fires OnRenegotiationNeeded callback if successful.
+ RTCErrorOr<rtc::scoped_refptr<RtpTransceiverInterface>> AddTransceiver(
+ cricket::MediaType media_type,
+ rtc::scoped_refptr<MediaStreamTrackInterface> track,
+ const RtpTransceiverInit& init,
+ bool fire_callback = true) override;
+
+ // Returns rtp transport, result can not be nullptr.
+ RtpTransportInternal* GetRtpTransport(const std::string& mid);
+
+ // Returns true if SRTP (either using DTLS-SRTP or SDES) is required by
+ // this session.
+ bool SrtpRequired() const override;
+
+ absl::optional<std::string> SetupDataChannelTransport_n(absl::string_view mid)
+ RTC_RUN_ON(network_thread());
+ void TeardownDataChannelTransport_n(RTCError error)
+ RTC_RUN_ON(network_thread());
+
+ const FieldTrialsView& trials() const override { return *trials_; }
+
+ bool ConfiguredForMedia() const;
+
+ // Functions made public for testing.
+ void ReturnHistogramVeryQuicklyForTesting() {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ return_histogram_very_quickly_ = true;
+ }
+ void RequestUsagePatternReportForTesting();
+
+ protected:
+ // Available for rtc::scoped_refptr creation
+ PeerConnection(rtc::scoped_refptr<ConnectionContext> context,
+ const PeerConnectionFactoryInterface::Options& options,
+ bool is_unified_plan,
+ std::unique_ptr<RtcEventLog> event_log,
+ std::unique_ptr<Call> call,
+ PeerConnectionDependencies& dependencies,
+ bool dtls_enabled);
+
+ ~PeerConnection() override;
+
+ private:
+ RTCError Initialize(
+ const PeerConnectionInterface::RTCConfiguration& configuration,
+ PeerConnectionDependencies dependencies);
+ JsepTransportController* InitializeTransportController_n(
+ const RTCConfiguration& configuration,
+ const PeerConnectionDependencies& dependencies)
+ RTC_RUN_ON(network_thread());
+
+ rtc::scoped_refptr<RtpTransceiverProxyWithInternal<RtpTransceiver>>
+ FindTransceiverBySender(rtc::scoped_refptr<RtpSenderInterface> sender)
+ RTC_RUN_ON(signaling_thread());
+
+ void SetStandardizedIceConnectionState(
+ PeerConnectionInterface::IceConnectionState new_state)
+ RTC_RUN_ON(signaling_thread());
+ void SetConnectionState(
+ PeerConnectionInterface::PeerConnectionState new_state)
+ RTC_RUN_ON(signaling_thread());
+
+ // Called any time the IceGatheringState changes.
+ void OnIceGatheringChange(IceGatheringState new_state)
+ RTC_RUN_ON(signaling_thread());
+ // New ICE candidate has been gathered.
+ void OnIceCandidate(std::unique_ptr<IceCandidateInterface> candidate)
+ RTC_RUN_ON(signaling_thread());
+ // Gathering of an ICE candidate failed.
+ void OnIceCandidateError(const std::string& address,
+ int port,
+ const std::string& url,
+ int error_code,
+ const std::string& error_text)
+ RTC_RUN_ON(signaling_thread());
+ // Some local ICE candidates have been removed.
+ void OnIceCandidatesRemoved(const std::vector<cricket::Candidate>& candidates)
+ RTC_RUN_ON(signaling_thread());
+
+ void OnSelectedCandidatePairChanged(
+ const cricket::CandidatePairChangeEvent& event)
+ RTC_RUN_ON(signaling_thread());
+
+ void OnNegotiationNeeded();
+
+ // Called when first configuring the port allocator.
+ struct InitializePortAllocatorResult {
+ bool enable_ipv6;
+ };
+ InitializePortAllocatorResult InitializePortAllocator_n(
+ const cricket::ServerAddresses& stun_servers,
+ const std::vector<cricket::RelayServerConfig>& turn_servers,
+ const RTCConfiguration& configuration);
+ // Called when SetConfiguration is called to apply the supported subset
+ // of the configuration on the network thread.
+ bool ReconfigurePortAllocator_n(
+ const cricket::ServerAddresses& stun_servers,
+ const std::vector<cricket::RelayServerConfig>& turn_servers,
+ IceTransportsType type,
+ int candidate_pool_size,
+ PortPrunePolicy turn_port_prune_policy,
+ webrtc::TurnCustomizer* turn_customizer,
+ absl::optional<int> stun_candidate_keepalive_interval,
+ bool have_local_description);
+
+ // Starts output of an RTC event log to the given output object.
+ // This function should only be called from the worker thread.
+ bool StartRtcEventLog_w(std::unique_ptr<RtcEventLogOutput> output,
+ int64_t output_period_ms);
+
+ // Stops recording an RTC event log.
+ // This function should only be called from the worker thread.
+ void StopRtcEventLog_w();
+
+ // Returns true and the TransportInfo of the given `content_name`
+ // from `description`. Returns false if it's not available.
+ static bool GetTransportDescription(
+ const cricket::SessionDescription* description,
+ const std::string& content_name,
+ cricket::TransportDescription* info);
+
+ // Returns the media index for a local ice candidate given the content name.
+ // Returns false if the local session description does not have a media
+ // content called `content_name`.
+ bool GetLocalCandidateMediaIndex(const std::string& content_name,
+ int* sdp_mline_index)
+ RTC_RUN_ON(signaling_thread());
+
+ // JsepTransportController signal handlers.
+ void OnTransportControllerConnectionState(cricket::IceConnectionState state)
+ RTC_RUN_ON(signaling_thread());
+ void OnTransportControllerGatheringState(cricket::IceGatheringState state)
+ RTC_RUN_ON(signaling_thread());
+ void OnTransportControllerCandidatesGathered(
+ const std::string& transport_name,
+ const std::vector<cricket::Candidate>& candidates)
+ RTC_RUN_ON(signaling_thread());
+ void OnTransportControllerCandidateError(
+ const cricket::IceCandidateErrorEvent& event)
+ RTC_RUN_ON(signaling_thread());
+ void OnTransportControllerCandidatesRemoved(
+ const std::vector<cricket::Candidate>& candidates)
+ RTC_RUN_ON(signaling_thread());
+ void OnTransportControllerCandidateChanged(
+ const cricket::CandidatePairChangeEvent& event)
+ RTC_RUN_ON(signaling_thread());
+ void OnTransportControllerDtlsHandshakeError(rtc::SSLHandshakeError error);
+
+ // Invoked when TransportController connection completion is signaled.
+ // Reports stats for all transports in use.
+ void ReportTransportStats(std::vector<RtpTransceiverProxyRefPtr> transceivers)
+ RTC_RUN_ON(network_thread());
+
+ // Gather the usage of IPv4/IPv6 as best connection.
+ static void ReportBestConnectionState(const cricket::TransportStats& stats);
+
+ static void ReportNegotiatedCiphers(
+ bool dtls_enabled,
+ const cricket::TransportStats& stats,
+ const std::set<cricket::MediaType>& media_types);
+ void ReportIceCandidateCollected(const cricket::Candidate& candidate)
+ RTC_RUN_ON(signaling_thread());
+
+ void ReportUsagePattern() const RTC_RUN_ON(signaling_thread());
+
+ void ReportRemoteIceCandidateAdded(const cricket::Candidate& candidate);
+
+ // JsepTransportController::Observer override.
+ //
+ // Called by `transport_controller_` when processing transport information
+ // from a session description, and the mapping from m= sections to transports
+ // changed (as a result of BUNDLE negotiation, or m= sections being
+ // rejected).
+ bool OnTransportChanged(
+ const std::string& mid,
+ RtpTransportInternal* rtp_transport,
+ rtc::scoped_refptr<DtlsTransport> dtls_transport,
+ DataChannelTransportInterface* data_channel_transport) override;
+
+ void SetSctpTransportName(std::string sctp_transport_name);
+
+ std::function<void(const rtc::CopyOnWriteBuffer& packet,
+ int64_t packet_time_us)>
+ InitializeRtcpCallback();
+
+ std::function<void(const RtpPacketReceived& parsed_packet)>
+ InitializeUnDemuxablePacketHandler();
+
+ const rtc::scoped_refptr<ConnectionContext> context_;
+ // Field trials active for this PeerConnection is the first of:
+ // a) Specified in PeerConnectionDependencies (owned).
+ // b) Accessed via ConnectionContext (e.g PeerConnectionFactoryDependencies>
+ // c) Created as Default (FieldTrialBasedConfig).
+ const webrtc::AlwaysValidPointer<const FieldTrialsView, FieldTrialBasedConfig>
+ trials_;
+ const PeerConnectionFactoryInterface::Options options_;
+ PeerConnectionObserver* observer_ RTC_GUARDED_BY(signaling_thread()) =
+ nullptr;
+
+ const bool is_unified_plan_;
+
+ // The EventLog needs to outlive `call_` (and any other object that uses it).
+ std::unique_ptr<RtcEventLog> event_log_ RTC_GUARDED_BY(worker_thread());
+
+ // Points to the same thing as `event_log_`. Since it's const, we may read the
+ // pointer (but not touch the object) from any thread.
+ RtcEventLog* const event_log_ptr_ RTC_PT_GUARDED_BY(worker_thread());
+
+ IceConnectionState ice_connection_state_ RTC_GUARDED_BY(signaling_thread()) =
+ kIceConnectionNew;
+ PeerConnectionInterface::IceConnectionState standardized_ice_connection_state_
+ RTC_GUARDED_BY(signaling_thread()) = kIceConnectionNew;
+ PeerConnectionInterface::PeerConnectionState connection_state_
+ RTC_GUARDED_BY(signaling_thread()) = PeerConnectionState::kNew;
+
+ IceGatheringState ice_gathering_state_ RTC_GUARDED_BY(signaling_thread()) =
+ kIceGatheringNew;
+ PeerConnectionInterface::RTCConfiguration configuration_
+ RTC_GUARDED_BY(signaling_thread());
+
+ const std::unique_ptr<AsyncDnsResolverFactoryInterface>
+ async_dns_resolver_factory_;
+ std::unique_ptr<cricket::PortAllocator>
+ port_allocator_; // TODO(bugs.webrtc.org/9987): Accessed on both
+ // signaling and network thread.
+ const std::unique_ptr<webrtc::IceTransportFactory>
+ ice_transport_factory_; // TODO(bugs.webrtc.org/9987): Accessed on the
+ // signaling thread but the underlying raw
+ // pointer is given to
+ // `jsep_transport_controller_` and used on the
+ // network thread.
+ const std::unique_ptr<rtc::SSLCertificateVerifier> tls_cert_verifier_
+ RTC_GUARDED_BY(network_thread());
+
+ // The unique_ptr belongs to the worker thread, but the Call object manages
+ // its own thread safety.
+ std::unique_ptr<Call> call_ RTC_GUARDED_BY(worker_thread());
+ ScopedTaskSafety signaling_thread_safety_;
+ rtc::scoped_refptr<PendingTaskSafetyFlag> network_thread_safety_;
+ rtc::scoped_refptr<PendingTaskSafetyFlag> worker_thread_safety_;
+
+ // Points to the same thing as `call_`. Since it's const, we may read the
+ // pointer from any thread.
+ // TODO(bugs.webrtc.org/11992): Remove this workaround (and potential dangling
+ // pointer).
+ Call* const call_ptr_;
+
+ std::unique_ptr<LegacyStatsCollector> legacy_stats_
+ RTC_GUARDED_BY(signaling_thread()); // A pointer is passed to senders_
+ rtc::scoped_refptr<RTCStatsCollector> stats_collector_
+ RTC_GUARDED_BY(signaling_thread());
+
+ const std::string session_id_;
+
+ // The transport controller is set and used on the network thread.
+ // Some functions pass the value of the transport_controller_ pointer
+ // around as arguments while running on the signaling thread; these
+ // use the transport_controller_copy.
+ std::unique_ptr<JsepTransportController> transport_controller_
+ RTC_GUARDED_BY(network_thread());
+ JsepTransportController* transport_controller_copy_
+ RTC_GUARDED_BY(signaling_thread()) = nullptr;
+
+ // `sctp_mid_` is the content name (MID) in SDP.
+ // Note: this is used as the data channel MID by both SCTP and data channel
+ // transports. It is set when either transport is initialized and unset when
+ // both transports are deleted.
+ // There is one copy on the signaling thread and another copy on the
+ // networking thread. Changes are always initiated from the signaling
+ // thread, but applied first on the networking thread via an invoke().
+ absl::optional<std::string> sctp_mid_s_ RTC_GUARDED_BY(signaling_thread());
+ absl::optional<std::string> sctp_mid_n_ RTC_GUARDED_BY(network_thread());
+ std::string sctp_transport_name_s_ RTC_GUARDED_BY(signaling_thread());
+
+ // The machinery for handling offers and answers. Const after initialization.
+ std::unique_ptr<SdpOfferAnswerHandler> sdp_handler_
+ RTC_GUARDED_BY(signaling_thread()) RTC_PT_GUARDED_BY(signaling_thread());
+
+ const bool dtls_enabled_;
+
+ UsagePattern usage_pattern_ RTC_GUARDED_BY(signaling_thread());
+ bool return_histogram_very_quickly_ RTC_GUARDED_BY(signaling_thread()) =
+ false;
+
+ // The DataChannelController is accessed from both the signaling thread
+ // and networking thread. It is a thread-aware object.
+ DataChannelController data_channel_controller_;
+
+ // Machinery for handling messages posted to oneself
+ PeerConnectionMessageHandler message_handler_
+ RTC_GUARDED_BY(signaling_thread());
+
+ // Administration of senders, receivers and transceivers
+ // Accessed on both signaling and network thread. Const after Initialize().
+ std::unique_ptr<RtpTransmissionManager> rtp_manager_;
+
+ // Did the connectionState ever change to `connected`?
+ // Used to gather metrics only the first such state change.
+ bool was_ever_connected_ RTC_GUARDED_BY(signaling_thread()) = false;
+
+ // This variable needs to be the last one in the class.
+ rtc::WeakPtrFactory<PeerConnection> weak_factory_;
+};
+
+} // namespace webrtc
+
+#endif // PC_PEER_CONNECTION_H_
diff --git a/third_party/libwebrtc/pc/peer_connection_adaptation_integrationtest.cc b/third_party/libwebrtc/pc/peer_connection_adaptation_integrationtest.cc
new file mode 100644
index 0000000000..882fa36a57
--- /dev/null
+++ b/third_party/libwebrtc/pc/peer_connection_adaptation_integrationtest.cc
@@ -0,0 +1,172 @@
+/*
+ * Copyright 2020 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdint.h>
+
+#include <memory>
+#include <string>
+
+#include "absl/types/optional.h"
+#include "api/adaptation/resource.h"
+#include "api/audio_codecs/builtin_audio_decoder_factory.h"
+#include "api/audio_codecs/builtin_audio_encoder_factory.h"
+#include "api/media_stream_interface.h"
+#include "api/peer_connection_interface.h"
+#include "api/rtc_error.h"
+#include "api/rtp_parameters.h"
+#include "api/rtp_sender_interface.h"
+#include "api/scoped_refptr.h"
+#include "api/video/video_source_interface.h"
+#include "call/adaptation/test/fake_resource.h"
+#include "pc/test/fake_periodic_video_source.h"
+#include "pc/test/fake_periodic_video_track_source.h"
+#include "pc/test/peer_connection_test_wrapper.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/gunit.h"
+#include "rtc_base/thread.h"
+#include "rtc_base/time_utils.h"
+#include "rtc_base/virtual_socket_server.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+
+const int64_t kDefaultTimeoutMs = 5000;
+
+struct TrackWithPeriodicSource {
+ rtc::scoped_refptr<VideoTrackInterface> track;
+ rtc::scoped_refptr<FakePeriodicVideoTrackSource> periodic_track_source;
+};
+
+// Performs an O/A exchange and waits until the signaling state is stable again.
+void Negotiate(rtc::scoped_refptr<PeerConnectionTestWrapper> caller,
+ rtc::scoped_refptr<PeerConnectionTestWrapper> callee) {
+ // Wire up callbacks and listeners such that a full O/A is performed in
+ // response to CreateOffer().
+ PeerConnectionTestWrapper::Connect(caller.get(), callee.get());
+ caller->CreateOffer(PeerConnectionInterface::RTCOfferAnswerOptions());
+ caller->WaitForNegotiation();
+}
+
+TrackWithPeriodicSource CreateTrackWithPeriodicSource(
+ rtc::scoped_refptr<PeerConnectionFactoryInterface> factory) {
+ FakePeriodicVideoSource::Config periodic_track_source_config;
+ periodic_track_source_config.frame_interval_ms = 100;
+ periodic_track_source_config.timestamp_offset_ms = rtc::TimeMillis();
+ rtc::scoped_refptr<FakePeriodicVideoTrackSource> periodic_track_source =
+ rtc::make_ref_counted<FakePeriodicVideoTrackSource>(
+ periodic_track_source_config, /* remote */ false);
+ TrackWithPeriodicSource track_with_source;
+ track_with_source.track =
+ factory->CreateVideoTrack(periodic_track_source, "PeriodicTrack");
+ track_with_source.periodic_track_source = periodic_track_source;
+ return track_with_source;
+}
+
+// Triggers overuse and obtains VideoSinkWants. Adaptation processing happens in
+// parallel and this function makes no guarantee that the returnd VideoSinkWants
+// have yet to reflect the overuse signal. Used together with EXPECT_TRUE_WAIT
+// to "spam overuse until a change is observed".
+rtc::VideoSinkWants TriggerOveruseAndGetSinkWants(
+ rtc::scoped_refptr<FakeResource> fake_resource,
+ const FakePeriodicVideoSource& source) {
+ fake_resource->SetUsageState(ResourceUsageState::kOveruse);
+ return source.wants();
+}
+
+class PeerConnectionAdaptationIntegrationTest : public ::testing::Test {
+ public:
+ PeerConnectionAdaptationIntegrationTest()
+ : virtual_socket_server_(),
+ network_thread_(new rtc::Thread(&virtual_socket_server_)),
+ worker_thread_(rtc::Thread::Create()) {
+ RTC_CHECK(network_thread_->Start());
+ RTC_CHECK(worker_thread_->Start());
+ }
+
+ rtc::scoped_refptr<PeerConnectionTestWrapper> CreatePcWrapper(
+ const char* name) {
+ rtc::scoped_refptr<PeerConnectionTestWrapper> pc_wrapper =
+ rtc::make_ref_counted<PeerConnectionTestWrapper>(
+ name, &virtual_socket_server_, network_thread_.get(),
+ worker_thread_.get());
+ PeerConnectionInterface::RTCConfiguration config;
+ config.sdp_semantics = SdpSemantics::kUnifiedPlan;
+ EXPECT_TRUE(pc_wrapper->CreatePc(config, CreateBuiltinAudioEncoderFactory(),
+ CreateBuiltinAudioDecoderFactory()));
+ return pc_wrapper;
+ }
+
+ protected:
+ rtc::VirtualSocketServer virtual_socket_server_;
+ std::unique_ptr<rtc::Thread> network_thread_;
+ std::unique_ptr<rtc::Thread> worker_thread_;
+};
+
+TEST_F(PeerConnectionAdaptationIntegrationTest,
+ ResouceInjectedAfterNegotiationCausesReductionInResolution) {
+ auto caller_wrapper = CreatePcWrapper("caller");
+ auto caller = caller_wrapper->pc();
+ auto callee_wrapper = CreatePcWrapper("callee");
+
+ // Adding a track and negotiating ensures that a VideoSendStream exists.
+ TrackWithPeriodicSource track_with_source =
+ CreateTrackWithPeriodicSource(caller_wrapper->pc_factory());
+ auto sender = caller->AddTrack(track_with_source.track, {}).value();
+ Negotiate(caller_wrapper, callee_wrapper);
+ // Prefer degrading resolution.
+ auto parameters = sender->GetParameters();
+ parameters.degradation_preference = DegradationPreference::MAINTAIN_FRAMERATE;
+ sender->SetParameters(parameters);
+
+ const auto& source =
+ track_with_source.periodic_track_source->fake_periodic_source();
+ int pixel_count_before_overuse = source.wants().max_pixel_count;
+
+ // Inject a fake resource and spam kOveruse until resolution becomes limited.
+ auto fake_resource = FakeResource::Create("FakeResource");
+ caller->AddAdaptationResource(fake_resource);
+ EXPECT_TRUE_WAIT(
+ TriggerOveruseAndGetSinkWants(fake_resource, source).max_pixel_count <
+ pixel_count_before_overuse,
+ kDefaultTimeoutMs);
+}
+
+TEST_F(PeerConnectionAdaptationIntegrationTest,
+ ResouceInjectedBeforeNegotiationCausesReductionInResolution) {
+ auto caller_wrapper = CreatePcWrapper("caller");
+ auto caller = caller_wrapper->pc();
+ auto callee_wrapper = CreatePcWrapper("callee");
+
+ // Inject a fake resource before adding any tracks or negotiating.
+ auto fake_resource = FakeResource::Create("FakeResource");
+ caller->AddAdaptationResource(fake_resource);
+
+ // Adding a track and negotiating ensures that a VideoSendStream exists.
+ TrackWithPeriodicSource track_with_source =
+ CreateTrackWithPeriodicSource(caller_wrapper->pc_factory());
+ auto sender = caller->AddTrack(track_with_source.track, {}).value();
+ Negotiate(caller_wrapper, callee_wrapper);
+ // Prefer degrading resolution.
+ auto parameters = sender->GetParameters();
+ parameters.degradation_preference = DegradationPreference::MAINTAIN_FRAMERATE;
+ sender->SetParameters(parameters);
+
+ const auto& source =
+ track_with_source.periodic_track_source->fake_periodic_source();
+ int pixel_count_before_overuse = source.wants().max_pixel_count;
+
+ // Spam kOveruse until resolution becomes limited.
+ EXPECT_TRUE_WAIT(
+ TriggerOveruseAndGetSinkWants(fake_resource, source).max_pixel_count <
+ pixel_count_before_overuse,
+ kDefaultTimeoutMs);
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/peer_connection_bundle_unittest.cc b/third_party/libwebrtc/pc/peer_connection_bundle_unittest.cc
new file mode 100644
index 0000000000..0db401276a
--- /dev/null
+++ b/third_party/libwebrtc/pc/peer_connection_bundle_unittest.cc
@@ -0,0 +1,1074 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stddef.h>
+
+#include <cstdint>
+#include <memory>
+#include <ostream>
+#include <string>
+#include <tuple>
+#include <type_traits>
+#include <utility>
+#include <vector>
+
+#include "api/audio/audio_mixer.h"
+#include "api/audio_codecs/builtin_audio_decoder_factory.h"
+#include "api/audio_codecs/builtin_audio_encoder_factory.h"
+#include "api/candidate.h"
+#include "api/create_peerconnection_factory.h"
+#include "api/jsep.h"
+#include "api/media_types.h"
+#include "api/peer_connection_interface.h"
+#include "api/rtp_receiver_interface.h"
+#include "api/rtp_sender_interface.h"
+#include "api/rtp_transceiver_interface.h"
+#include "api/scoped_refptr.h"
+#include "api/stats/rtc_stats.h"
+#include "api/stats/rtc_stats_report.h"
+#include "api/stats/rtcstats_objects.h"
+#include "api/video_codecs/video_decoder_factory_template.h"
+#include "api/video_codecs/video_decoder_factory_template_dav1d_adapter.h"
+#include "api/video_codecs/video_decoder_factory_template_libvpx_vp8_adapter.h"
+#include "api/video_codecs/video_decoder_factory_template_libvpx_vp9_adapter.h"
+#include "api/video_codecs/video_decoder_factory_template_open_h264_adapter.h"
+#include "api/video_codecs/video_encoder_factory_template.h"
+#include "api/video_codecs/video_encoder_factory_template_libaom_av1_adapter.h"
+#include "api/video_codecs/video_encoder_factory_template_libvpx_vp8_adapter.h"
+#include "api/video_codecs/video_encoder_factory_template_libvpx_vp9_adapter.h"
+#include "api/video_codecs/video_encoder_factory_template_open_h264_adapter.h"
+#include "media/base/stream_params.h"
+#include "modules/audio_device/include/audio_device.h"
+#include "modules/audio_processing/include/audio_processing.h"
+#include "p2p/base/p2p_constants.h"
+#include "p2p/base/port.h"
+#include "p2p/base/port_allocator.h"
+#include "p2p/base/transport_info.h"
+#include "p2p/client/basic_port_allocator.h"
+#include "pc/channel.h"
+#include "pc/peer_connection.h"
+#include "pc/peer_connection_proxy.h"
+#include "pc/peer_connection_wrapper.h"
+#include "pc/rtp_transceiver.h"
+#include "pc/rtp_transport_internal.h"
+#include "pc/sdp_utils.h"
+#include "pc/session_description.h"
+#include "pc/test/integration_test_helpers.h"
+#include "pc/test/mock_peer_connection_observers.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/net_helper.h"
+#include "rtc_base/network.h"
+#include "rtc_base/rtc_certificate_generator.h"
+#include "rtc_base/socket_address.h"
+#include "rtc_base/thread.h"
+#include "test/gtest.h"
+#ifdef WEBRTC_ANDROID
+#include "pc/test/android_test_initializer.h"
+#endif
+#include "pc/test/fake_audio_capture_module.h"
+#include "rtc_base/fake_network.h"
+#include "rtc_base/gunit.h"
+#include "rtc_base/virtual_socket_server.h"
+#include "test/gmock.h"
+
+namespace webrtc {
+
+using BundlePolicy = PeerConnectionInterface::BundlePolicy;
+using RTCConfiguration = PeerConnectionInterface::RTCConfiguration;
+using RTCOfferAnswerOptions = PeerConnectionInterface::RTCOfferAnswerOptions;
+using RtcpMuxPolicy = PeerConnectionInterface::RtcpMuxPolicy;
+using rtc::SocketAddress;
+using ::testing::Combine;
+using ::testing::ElementsAre;
+using ::testing::UnorderedElementsAre;
+using ::testing::Values;
+
+// TODO(steveanton): These tests should be rewritten to use the standard
+// RtpSenderInterface/DtlsTransportInterface objects once they're available in
+// the API. The RtpSender can be used to determine which transport a given media
+// will use: https://www.w3.org/TR/webrtc/#dom-rtcrtpsender-transport
+// Should also be able to remove GetTransceiversForTesting at that point.
+
+class FakeNetworkManagerWithNoAnyNetwork : public rtc::FakeNetworkManager {
+ public:
+ std::vector<const rtc::Network*> GetAnyAddressNetworks() override {
+ // This function allocates networks that are owned by the
+ // NetworkManager. But some tests assume that they can release
+ // all networks independent of the network manager.
+ // In order to prevent use-after-free issues, don't allow this
+ // function to have any effect when run in tests.
+ RTC_LOG(LS_INFO) << "FakeNetworkManager::GetAnyAddressNetworks ignored";
+ return {};
+ }
+};
+
+class PeerConnectionWrapperForBundleTest : public PeerConnectionWrapper {
+ public:
+ using PeerConnectionWrapper::PeerConnectionWrapper;
+
+ bool AddIceCandidateToMedia(cricket::Candidate* candidate,
+ cricket::MediaType media_type) {
+ auto* desc = pc()->remote_description()->description();
+ for (size_t i = 0; i < desc->contents().size(); i++) {
+ const auto& content = desc->contents()[i];
+ if (content.media_description()->type() == media_type) {
+ candidate->set_transport_name(content.name);
+ std::unique_ptr<IceCandidateInterface> jsep_candidate =
+ CreateIceCandidate(content.name, i, *candidate);
+ return pc()->AddIceCandidate(jsep_candidate.get());
+ }
+ }
+ RTC_DCHECK_NOTREACHED();
+ return false;
+ }
+
+ RtpTransportInternal* voice_rtp_transport() {
+ return (voice_channel() ? voice_channel()->rtp_transport() : nullptr);
+ }
+
+ cricket::VoiceChannel* voice_channel() {
+ auto transceivers = GetInternalPeerConnection()->GetTransceiversInternal();
+ for (const auto& transceiver : transceivers) {
+ if (transceiver->media_type() == cricket::MEDIA_TYPE_AUDIO) {
+ return static_cast<cricket::VoiceChannel*>(
+ transceiver->internal()->channel());
+ }
+ }
+ return nullptr;
+ }
+
+ RtpTransportInternal* video_rtp_transport() {
+ return (video_channel() ? video_channel()->rtp_transport() : nullptr);
+ }
+
+ cricket::VideoChannel* video_channel() {
+ auto transceivers = GetInternalPeerConnection()->GetTransceiversInternal();
+ for (const auto& transceiver : transceivers) {
+ if (transceiver->media_type() == cricket::MEDIA_TYPE_VIDEO) {
+ return static_cast<cricket::VideoChannel*>(
+ transceiver->internal()->channel());
+ }
+ }
+ return nullptr;
+ }
+
+ PeerConnection* GetInternalPeerConnection() {
+ auto* pci =
+ static_cast<PeerConnectionProxyWithInternal<PeerConnectionInterface>*>(
+ pc());
+ return static_cast<PeerConnection*>(pci->internal());
+ }
+
+ // Returns true if the stats indicate that an ICE connection is either in
+ // progress or established with the given remote address.
+ bool HasConnectionWithRemoteAddress(const SocketAddress& address) {
+ auto report = GetStats();
+ if (!report) {
+ return false;
+ }
+ std::string matching_candidate_id;
+ for (auto* ice_candidate_stats :
+ report->GetStatsOfType<RTCRemoteIceCandidateStats>()) {
+ if (*ice_candidate_stats->ip == address.HostAsURIString() &&
+ *ice_candidate_stats->port == address.port()) {
+ matching_candidate_id = ice_candidate_stats->id();
+ break;
+ }
+ }
+ if (matching_candidate_id.empty()) {
+ return false;
+ }
+ for (auto* pair_stats :
+ report->GetStatsOfType<RTCIceCandidatePairStats>()) {
+ if (*pair_stats->remote_candidate_id == matching_candidate_id) {
+ if (*pair_stats->state == "in-progress" ||
+ *pair_stats->state == "succeeded") {
+ return true;
+ }
+ }
+ }
+ return false;
+ }
+
+ rtc::FakeNetworkManager* network() { return network_; }
+
+ void set_network(rtc::FakeNetworkManager* network) { network_ = network; }
+
+ private:
+ rtc::FakeNetworkManager* network_;
+};
+
+class PeerConnectionBundleBaseTest : public ::testing::Test {
+ protected:
+ typedef std::unique_ptr<PeerConnectionWrapperForBundleTest> WrapperPtr;
+
+ explicit PeerConnectionBundleBaseTest(SdpSemantics sdp_semantics)
+ : vss_(new rtc::VirtualSocketServer()),
+ socket_factory_(new rtc::BasicPacketSocketFactory(vss_.get())),
+ main_(vss_.get()),
+ sdp_semantics_(sdp_semantics) {
+#ifdef WEBRTC_ANDROID
+ InitializeAndroidObjects();
+#endif
+ pc_factory_ = CreatePeerConnectionFactory(
+ rtc::Thread::Current(), rtc::Thread::Current(), rtc::Thread::Current(),
+ rtc::scoped_refptr<AudioDeviceModule>(FakeAudioCaptureModule::Create()),
+ CreateBuiltinAudioEncoderFactory(), CreateBuiltinAudioDecoderFactory(),
+ std::make_unique<VideoEncoderFactoryTemplate<
+ LibvpxVp8EncoderTemplateAdapter, LibvpxVp9EncoderTemplateAdapter,
+ OpenH264EncoderTemplateAdapter, LibaomAv1EncoderTemplateAdapter>>(),
+ std::make_unique<VideoDecoderFactoryTemplate<
+ LibvpxVp8DecoderTemplateAdapter, LibvpxVp9DecoderTemplateAdapter,
+ OpenH264DecoderTemplateAdapter, Dav1dDecoderTemplateAdapter>>(),
+ nullptr /* audio_mixer */, nullptr /* audio_processing */);
+ }
+
+ WrapperPtr CreatePeerConnection() {
+ return CreatePeerConnection(RTCConfiguration());
+ }
+
+ WrapperPtr CreatePeerConnection(const RTCConfiguration& config) {
+ auto* fake_network = NewFakeNetwork();
+ auto port_allocator = std::make_unique<cricket::BasicPortAllocator>(
+ fake_network, socket_factory_.get());
+ port_allocator->set_flags(cricket::PORTALLOCATOR_DISABLE_TCP |
+ cricket::PORTALLOCATOR_DISABLE_RELAY);
+ port_allocator->set_step_delay(cricket::kMinimumStepDelay);
+ auto observer = std::make_unique<MockPeerConnectionObserver>();
+ RTCConfiguration modified_config = config;
+ modified_config.sdp_semantics = sdp_semantics_;
+ PeerConnectionDependencies pc_dependencies(observer.get());
+ pc_dependencies.allocator = std::move(port_allocator);
+ auto result = pc_factory_->CreatePeerConnectionOrError(
+ modified_config, std::move(pc_dependencies));
+ if (!result.ok()) {
+ return nullptr;
+ }
+
+ auto wrapper = std::make_unique<PeerConnectionWrapperForBundleTest>(
+ pc_factory_, result.MoveValue(), std::move(observer));
+ wrapper->set_network(fake_network);
+ return wrapper;
+ }
+
+ // Accepts the same arguments as CreatePeerConnection and adds default audio
+ // and video tracks.
+ template <typename... Args>
+ WrapperPtr CreatePeerConnectionWithAudioVideo(Args&&... args) {
+ auto wrapper = CreatePeerConnection(std::forward<Args>(args)...);
+ if (!wrapper) {
+ return nullptr;
+ }
+ wrapper->AddAudioTrack("a");
+ wrapper->AddVideoTrack("v");
+ return wrapper;
+ }
+
+ cricket::Candidate CreateLocalUdpCandidate(
+ const rtc::SocketAddress& address) {
+ cricket::Candidate candidate;
+ candidate.set_component(cricket::ICE_CANDIDATE_COMPONENT_DEFAULT);
+ candidate.set_protocol(cricket::UDP_PROTOCOL_NAME);
+ candidate.set_address(address);
+ candidate.set_type(cricket::LOCAL_PORT_TYPE);
+ return candidate;
+ }
+
+ rtc::FakeNetworkManager* NewFakeNetwork() {
+ // The PeerConnection's port allocator is tied to the PeerConnection's
+ // lifetime and expects the underlying NetworkManager to outlive it. If
+ // PeerConnectionWrapper owned the NetworkManager, it would be destroyed
+ // before the PeerConnection (since subclass members are destroyed before
+ // base class members). Therefore, the test fixture will own all the fake
+ // networks even though tests should access the fake network through the
+ // PeerConnectionWrapper.
+ auto* fake_network = new FakeNetworkManagerWithNoAnyNetwork();
+ fake_networks_.emplace_back(fake_network);
+ return fake_network;
+ }
+
+ std::unique_ptr<rtc::VirtualSocketServer> vss_;
+ std::unique_ptr<rtc::BasicPacketSocketFactory> socket_factory_;
+ rtc::AutoSocketServerThread main_;
+ rtc::scoped_refptr<PeerConnectionFactoryInterface> pc_factory_;
+ std::vector<std::unique_ptr<rtc::FakeNetworkManager>> fake_networks_;
+ const SdpSemantics sdp_semantics_;
+};
+
+class PeerConnectionBundleTest
+ : public PeerConnectionBundleBaseTest,
+ public ::testing::WithParamInterface<SdpSemantics> {
+ protected:
+ PeerConnectionBundleTest() : PeerConnectionBundleBaseTest(GetParam()) {}
+};
+
+class PeerConnectionBundleTestUnifiedPlan
+ : public PeerConnectionBundleBaseTest {
+ protected:
+ PeerConnectionBundleTestUnifiedPlan()
+ : PeerConnectionBundleBaseTest(SdpSemantics::kUnifiedPlan) {}
+};
+
+SdpContentMutator RemoveRtcpMux() {
+ return [](cricket::ContentInfo* content, cricket::TransportInfo* transport) {
+ content->media_description()->set_rtcp_mux(false);
+ };
+}
+
+std::vector<int> GetCandidateComponents(
+ const std::vector<IceCandidateInterface*> candidates) {
+ std::vector<int> components;
+ components.reserve(candidates.size());
+ for (auto* candidate : candidates) {
+ components.push_back(candidate->candidate().component());
+ }
+ return components;
+}
+
+// Test that there are 2 local UDP candidates (1 RTP and 1 RTCP candidate) for
+// each media section when disabling bundling and disabling RTCP multiplexing.
+TEST_P(PeerConnectionBundleTest,
+ TwoCandidatesForEachTransportWhenNoBundleNoRtcpMux) {
+ const SocketAddress kCallerAddress("1.1.1.1", 0);
+ const SocketAddress kCalleeAddress("2.2.2.2", 0);
+
+ RTCConfiguration config;
+ config.rtcp_mux_policy = PeerConnectionInterface::kRtcpMuxPolicyNegotiate;
+ auto caller = CreatePeerConnectionWithAudioVideo(config);
+ caller->network()->AddInterface(kCallerAddress);
+ auto callee = CreatePeerConnectionWithAudioVideo(config);
+ callee->network()->AddInterface(kCalleeAddress);
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+ RTCOfferAnswerOptions options_no_bundle;
+ options_no_bundle.use_rtp_mux = false;
+ auto answer = callee->CreateAnswer(options_no_bundle);
+ SdpContentsForEach(RemoveRtcpMux(), answer->description());
+ ASSERT_TRUE(
+ callee->SetLocalDescription(CloneSessionDescription(answer.get())));
+ ASSERT_TRUE(caller->SetRemoteDescription(std::move(answer)));
+
+ // Check that caller has separate RTP and RTCP candidates for each media.
+ EXPECT_TRUE_WAIT(caller->IsIceGatheringDone(), kDefaultTimeout);
+ EXPECT_THAT(
+ GetCandidateComponents(caller->observer()->GetCandidatesByMline(0)),
+ UnorderedElementsAre(cricket::ICE_CANDIDATE_COMPONENT_RTP,
+ cricket::ICE_CANDIDATE_COMPONENT_RTCP));
+ EXPECT_THAT(
+ GetCandidateComponents(caller->observer()->GetCandidatesByMline(1)),
+ UnorderedElementsAre(cricket::ICE_CANDIDATE_COMPONENT_RTP,
+ cricket::ICE_CANDIDATE_COMPONENT_RTCP));
+
+ // Check that callee has separate RTP and RTCP candidates for each media.
+ EXPECT_TRUE_WAIT(callee->IsIceGatheringDone(), kDefaultTimeout);
+ EXPECT_THAT(
+ GetCandidateComponents(callee->observer()->GetCandidatesByMline(0)),
+ UnorderedElementsAre(cricket::ICE_CANDIDATE_COMPONENT_RTP,
+ cricket::ICE_CANDIDATE_COMPONENT_RTCP));
+ EXPECT_THAT(
+ GetCandidateComponents(callee->observer()->GetCandidatesByMline(1)),
+ UnorderedElementsAre(cricket::ICE_CANDIDATE_COMPONENT_RTP,
+ cricket::ICE_CANDIDATE_COMPONENT_RTCP));
+}
+
+// Test that there is 1 local UDP candidate for both RTP and RTCP for each media
+// section when disabling bundle but enabling RTCP multiplexing.
+TEST_P(PeerConnectionBundleTest,
+ OneCandidateForEachTransportWhenNoBundleButRtcpMux) {
+ const SocketAddress kCallerAddress("1.1.1.1", 0);
+
+ auto caller = CreatePeerConnectionWithAudioVideo();
+ caller->network()->AddInterface(kCallerAddress);
+ auto callee = CreatePeerConnectionWithAudioVideo();
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+ RTCOfferAnswerOptions options_no_bundle;
+ options_no_bundle.use_rtp_mux = false;
+ ASSERT_TRUE(
+ caller->SetRemoteDescription(callee->CreateAnswer(options_no_bundle)));
+
+ EXPECT_TRUE_WAIT(caller->IsIceGatheringDone(), kDefaultTimeout);
+
+ EXPECT_EQ(1u, caller->observer()->GetCandidatesByMline(0).size());
+ EXPECT_EQ(1u, caller->observer()->GetCandidatesByMline(1).size());
+}
+
+// Test that there is 1 local UDP candidate in only the first media section when
+// bundling and enabling RTCP multiplexing.
+TEST_P(PeerConnectionBundleTest,
+ OneCandidateOnlyOnFirstTransportWhenBundleAndRtcpMux) {
+ const SocketAddress kCallerAddress("1.1.1.1", 0);
+
+ RTCConfiguration config;
+ config.bundle_policy = BundlePolicy::kBundlePolicyMaxBundle;
+ auto caller = CreatePeerConnectionWithAudioVideo(config);
+ caller->network()->AddInterface(kCallerAddress);
+ auto callee = CreatePeerConnectionWithAudioVideo(config);
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+ ASSERT_TRUE(caller->SetRemoteDescription(callee->CreateAnswer()));
+
+ EXPECT_TRUE_WAIT(caller->IsIceGatheringDone(), kDefaultTimeout);
+
+ EXPECT_EQ(1u, caller->observer()->GetCandidatesByMline(0).size());
+ EXPECT_EQ(0u, caller->observer()->GetCandidatesByMline(1).size());
+}
+
+// It will fail if the offerer uses the mux-BUNDLE policy but the answerer
+// doesn't support BUNDLE.
+TEST_P(PeerConnectionBundleTest, MaxBundleNotSupportedInAnswer) {
+ RTCConfiguration config;
+ config.bundle_policy = BundlePolicy::kBundlePolicyMaxBundle;
+ auto caller = CreatePeerConnectionWithAudioVideo(config);
+ auto callee = CreatePeerConnectionWithAudioVideo();
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+ bool equal_before =
+ (caller->voice_rtp_transport() == caller->video_rtp_transport());
+ EXPECT_EQ(true, equal_before);
+ RTCOfferAnswerOptions options;
+ options.use_rtp_mux = false;
+ EXPECT_FALSE(
+ caller->SetRemoteDescription(callee->CreateAnswerAndSetAsLocal(options)));
+}
+
+// The following parameterized test verifies that an offer/answer with varying
+// bundle policies and either bundle in the answer or not will produce the
+// expected RTP transports for audio and video. In particular, for bundling we
+// care about whether they are separate transports or the same.
+
+enum class BundleIncluded { kBundleInAnswer, kBundleNotInAnswer };
+std::ostream& operator<<(std::ostream& out, BundleIncluded value) {
+ switch (value) {
+ case BundleIncluded::kBundleInAnswer:
+ return out << "bundle in answer";
+ case BundleIncluded::kBundleNotInAnswer:
+ return out << "bundle not in answer";
+ }
+ return out << "unknown";
+}
+
+class PeerConnectionBundleMatrixTest
+ : public PeerConnectionBundleBaseTest,
+ public ::testing::WithParamInterface<
+ std::tuple<SdpSemantics,
+ std::tuple<BundlePolicy, BundleIncluded, bool, bool>>> {
+ protected:
+ PeerConnectionBundleMatrixTest()
+ : PeerConnectionBundleBaseTest(std::get<0>(GetParam())) {
+ auto param = std::get<1>(GetParam());
+ bundle_policy_ = std::get<0>(param);
+ bundle_included_ = std::get<1>(param);
+ expected_same_before_ = std::get<2>(param);
+ expected_same_after_ = std::get<3>(param);
+ }
+
+ PeerConnectionInterface::BundlePolicy bundle_policy_;
+ BundleIncluded bundle_included_;
+ bool expected_same_before_;
+ bool expected_same_after_;
+};
+
+TEST_P(PeerConnectionBundleMatrixTest,
+ VerifyTransportsBeforeAndAfterSettingRemoteAnswer) {
+ RTCConfiguration config;
+ config.bundle_policy = bundle_policy_;
+ auto caller = CreatePeerConnectionWithAudioVideo(config);
+ auto callee = CreatePeerConnectionWithAudioVideo();
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+ bool equal_before =
+ (caller->voice_rtp_transport() == caller->video_rtp_transport());
+ EXPECT_EQ(expected_same_before_, equal_before);
+
+ RTCOfferAnswerOptions options;
+ options.use_rtp_mux = (bundle_included_ == BundleIncluded::kBundleInAnswer);
+ ASSERT_TRUE(
+ caller->SetRemoteDescription(callee->CreateAnswerAndSetAsLocal(options)));
+ bool equal_after =
+ (caller->voice_rtp_transport() == caller->video_rtp_transport());
+ EXPECT_EQ(expected_same_after_, equal_after);
+}
+
+// The max-bundle policy means we should anticipate bundling being negotiated,
+// and multiplex audio/video from the start.
+// For all other policies, bundling should only be enabled if negotiated by the
+// answer.
+INSTANTIATE_TEST_SUITE_P(
+ PeerConnectionBundleTest,
+ PeerConnectionBundleMatrixTest,
+ Combine(Values(SdpSemantics::kPlanB_DEPRECATED, SdpSemantics::kUnifiedPlan),
+ Values(std::make_tuple(BundlePolicy::kBundlePolicyBalanced,
+ BundleIncluded::kBundleInAnswer,
+ false,
+ true),
+ std::make_tuple(BundlePolicy::kBundlePolicyBalanced,
+ BundleIncluded::kBundleNotInAnswer,
+ false,
+ false),
+ std::make_tuple(BundlePolicy::kBundlePolicyMaxBundle,
+ BundleIncluded::kBundleInAnswer,
+ true,
+ true),
+ std::make_tuple(BundlePolicy::kBundlePolicyMaxCompat,
+ BundleIncluded::kBundleInAnswer,
+ false,
+ true),
+ std::make_tuple(BundlePolicy::kBundlePolicyMaxCompat,
+ BundleIncluded::kBundleNotInAnswer,
+ false,
+ false))));
+
+// Test that the audio/video transports on the callee side are the same before
+// and after setting a local answer when max BUNDLE is enabled and an offer with
+// BUNDLE is received.
+TEST_P(PeerConnectionBundleTest,
+ TransportsSameForMaxBundleWithBundleInRemoteOffer) {
+ auto caller = CreatePeerConnectionWithAudioVideo();
+ RTCConfiguration config;
+ config.bundle_policy = BundlePolicy::kBundlePolicyMaxBundle;
+ auto callee = CreatePeerConnectionWithAudioVideo(config);
+
+ RTCOfferAnswerOptions options_with_bundle;
+ options_with_bundle.use_rtp_mux = true;
+ ASSERT_TRUE(callee->SetRemoteDescription(
+ caller->CreateOfferAndSetAsLocal(options_with_bundle)));
+
+ EXPECT_EQ(callee->voice_rtp_transport(), callee->video_rtp_transport());
+
+ ASSERT_TRUE(callee->SetLocalDescription(callee->CreateAnswer()));
+
+ EXPECT_EQ(callee->voice_rtp_transport(), callee->video_rtp_transport());
+}
+
+TEST_P(PeerConnectionBundleTest,
+ FailToSetRemoteOfferWithNoBundleWhenBundlePolicyMaxBundle) {
+ auto caller = CreatePeerConnectionWithAudioVideo();
+ RTCConfiguration config;
+ config.bundle_policy = BundlePolicy::kBundlePolicyMaxBundle;
+ auto callee = CreatePeerConnectionWithAudioVideo(config);
+
+ RTCOfferAnswerOptions options_no_bundle;
+ options_no_bundle.use_rtp_mux = false;
+ EXPECT_FALSE(callee->SetRemoteDescription(
+ caller->CreateOfferAndSetAsLocal(options_no_bundle)));
+}
+
+// Test that if the media section which has the bundled transport is rejected,
+// then the peers still connect and the bundled transport switches to the other
+// media section.
+// Note: This is currently failing because of the following bug:
+// https://bugs.chromium.org/p/webrtc/issues/detail?id=6280
+TEST_P(PeerConnectionBundleTest,
+ DISABLED_SuccessfullyNegotiateMaxBundleIfBundleTransportMediaRejected) {
+ RTCConfiguration config;
+ config.bundle_policy = BundlePolicy::kBundlePolicyMaxBundle;
+ auto caller = CreatePeerConnectionWithAudioVideo(config);
+ auto callee = CreatePeerConnection();
+ callee->AddVideoTrack("v");
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+
+ RTCOfferAnswerOptions options;
+ options.offer_to_receive_audio = 0;
+ ASSERT_TRUE(
+ caller->SetRemoteDescription(callee->CreateAnswerAndSetAsLocal(options)));
+
+ EXPECT_FALSE(caller->voice_rtp_transport());
+ EXPECT_TRUE(caller->video_rtp_transport());
+}
+
+// When requiring RTCP multiplexing, the PeerConnection never makes RTCP
+// transport channels.
+TEST_P(PeerConnectionBundleTest, NeverCreateRtcpTransportWithRtcpMuxRequired) {
+ RTCConfiguration config;
+ config.rtcp_mux_policy = RtcpMuxPolicy::kRtcpMuxPolicyRequire;
+ auto caller = CreatePeerConnectionWithAudioVideo(config);
+ auto callee = CreatePeerConnectionWithAudioVideo();
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+
+ EXPECT_FALSE(caller->voice_rtp_transport()->rtcp_mux_enabled());
+ EXPECT_FALSE(caller->video_rtp_transport()->rtcp_mux_enabled());
+
+ ASSERT_TRUE(
+ caller->SetRemoteDescription(callee->CreateAnswerAndSetAsLocal()));
+
+ EXPECT_TRUE(caller->voice_rtp_transport()->rtcp_mux_enabled());
+ EXPECT_TRUE(caller->video_rtp_transport()->rtcp_mux_enabled());
+}
+
+// When negotiating RTCP multiplexing, the PeerConnection makes RTCP transports
+// when the offer is sent, but will destroy them once the remote answer is set.
+TEST_P(PeerConnectionBundleTest,
+ CreateRtcpTransportOnlyBeforeAnswerWithRtcpMuxNegotiate) {
+ RTCConfiguration config;
+ config.rtcp_mux_policy = RtcpMuxPolicy::kRtcpMuxPolicyNegotiate;
+ auto caller = CreatePeerConnectionWithAudioVideo(config);
+ auto callee = CreatePeerConnectionWithAudioVideo();
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+
+ EXPECT_FALSE(caller->voice_rtp_transport()->rtcp_mux_enabled());
+ EXPECT_FALSE(caller->video_rtp_transport()->rtcp_mux_enabled());
+
+ ASSERT_TRUE(
+ caller->SetRemoteDescription(callee->CreateAnswerAndSetAsLocal()));
+
+ EXPECT_TRUE(caller->voice_rtp_transport()->rtcp_mux_enabled());
+ EXPECT_TRUE(caller->video_rtp_transport()->rtcp_mux_enabled());
+}
+
+TEST_P(PeerConnectionBundleTest, FailToSetDescriptionWithBundleAndNoRtcpMux) {
+ auto caller = CreatePeerConnectionWithAudioVideo();
+ auto callee = CreatePeerConnectionWithAudioVideo();
+
+ RTCOfferAnswerOptions options;
+ options.use_rtp_mux = true;
+
+ auto offer = caller->CreateOffer(options);
+ SdpContentsForEach(RemoveRtcpMux(), offer->description());
+
+ std::string error;
+ EXPECT_FALSE(caller->SetLocalDescription(CloneSessionDescription(offer.get()),
+ &error));
+ EXPECT_EQ(
+ "Failed to set local offer sdp: rtcp-mux must be enabled when BUNDLE is "
+ "enabled.",
+ error);
+
+ EXPECT_FALSE(callee->SetRemoteDescription(std::move(offer), &error));
+ EXPECT_EQ(
+ "Failed to set remote offer sdp: rtcp-mux must be enabled when BUNDLE is "
+ "enabled.",
+ error);
+}
+
+// Test that candidates sent to the "video" transport do not get pushed down to
+// the "audio" transport channel when bundling.
+TEST_P(PeerConnectionBundleTest,
+ IgnoreCandidatesForUnusedTransportWhenBundling) {
+ const SocketAddress kAudioAddress1("1.1.1.1", 1111);
+ const SocketAddress kAudioAddress2("2.2.2.2", 2222);
+ const SocketAddress kVideoAddress("3.3.3.3", 3333);
+ const SocketAddress kCallerAddress("4.4.4.4", 0);
+ const SocketAddress kCalleeAddress("5.5.5.5", 0);
+
+ auto caller = CreatePeerConnectionWithAudioVideo();
+ auto callee = CreatePeerConnectionWithAudioVideo();
+
+ caller->network()->AddInterface(kCallerAddress);
+ callee->network()->AddInterface(kCalleeAddress);
+
+ RTCOfferAnswerOptions options;
+ options.use_rtp_mux = true;
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+ ASSERT_TRUE(
+ caller->SetRemoteDescription(callee->CreateAnswerAndSetAsLocal(options)));
+
+ // The way the *_WAIT checks work is they only wait if the condition fails,
+ // which does not help in the case where state is not changing. This is
+ // problematic in this test since we want to verify that adding a video
+ // candidate does _not_ change state. So we interleave candidates and assume
+ // that messages are executed in the order they were posted.
+
+ cricket::Candidate audio_candidate1 = CreateLocalUdpCandidate(kAudioAddress1);
+ ASSERT_TRUE(caller->AddIceCandidateToMedia(&audio_candidate1,
+ cricket::MEDIA_TYPE_AUDIO));
+
+ cricket::Candidate video_candidate = CreateLocalUdpCandidate(kVideoAddress);
+ ASSERT_TRUE(caller->AddIceCandidateToMedia(&video_candidate,
+ cricket::MEDIA_TYPE_VIDEO));
+
+ cricket::Candidate audio_candidate2 = CreateLocalUdpCandidate(kAudioAddress2);
+ ASSERT_TRUE(caller->AddIceCandidateToMedia(&audio_candidate2,
+ cricket::MEDIA_TYPE_AUDIO));
+
+ EXPECT_TRUE_WAIT(caller->HasConnectionWithRemoteAddress(kAudioAddress1),
+ kDefaultTimeout);
+ EXPECT_TRUE_WAIT(caller->HasConnectionWithRemoteAddress(kAudioAddress2),
+ kDefaultTimeout);
+ EXPECT_FALSE(caller->HasConnectionWithRemoteAddress(kVideoAddress));
+}
+
+// Test that the transport used by both audio and video is the transport
+// associated with the first MID in the answer BUNDLE group, even if it's in a
+// different order from the offer.
+TEST_P(PeerConnectionBundleTest, BundleOnFirstMidInAnswer) {
+ auto caller = CreatePeerConnectionWithAudioVideo();
+ auto callee = CreatePeerConnectionWithAudioVideo();
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+
+ auto* old_video_transport = caller->video_rtp_transport();
+
+ auto answer = callee->CreateAnswer();
+ auto* old_bundle_group =
+ answer->description()->GetGroupByName(cricket::GROUP_TYPE_BUNDLE);
+ std::string first_mid = old_bundle_group->content_names()[0];
+ std::string second_mid = old_bundle_group->content_names()[1];
+ answer->description()->RemoveGroupByName(cricket::GROUP_TYPE_BUNDLE);
+
+ cricket::ContentGroup new_bundle_group(cricket::GROUP_TYPE_BUNDLE);
+ new_bundle_group.AddContentName(second_mid);
+ new_bundle_group.AddContentName(first_mid);
+ answer->description()->AddGroup(new_bundle_group);
+
+ ASSERT_TRUE(caller->SetRemoteDescription(std::move(answer)));
+
+ EXPECT_EQ(old_video_transport, caller->video_rtp_transport());
+ EXPECT_EQ(caller->voice_rtp_transport(), caller->video_rtp_transport());
+}
+
+// This tests that applying description with conflicted RTP demuxing criteria
+// will fail when using BUNDLE.
+TEST_P(PeerConnectionBundleTest, ApplyDescriptionWithSameSsrcsBundledFails) {
+ auto caller = CreatePeerConnectionWithAudioVideo();
+ auto callee = CreatePeerConnectionWithAudioVideo();
+
+ RTCOfferAnswerOptions options;
+ options.use_rtp_mux = true;
+ auto offer = caller->CreateOffer(options);
+ EXPECT_TRUE(
+ caller->SetLocalDescription(CloneSessionDescription(offer.get())));
+ // Modify the remote SDP to make two m= sections have the same SSRC.
+ ASSERT_GE(offer->description()->contents().size(), 2U);
+ ReplaceFirstSsrc(offer->description()
+ ->contents()[0]
+ .media_description()
+ ->mutable_streams()[0],
+ 1111222);
+ ReplaceFirstSsrc(offer->description()
+ ->contents()[1]
+ .media_description()
+ ->mutable_streams()[0],
+ 1111222);
+
+ EXPECT_TRUE(callee->SetRemoteDescription(std::move(offer)));
+ // When BUNDLE is enabled, applying the description is expected to fail
+ // because the demuxing criteria can not be satisfied.
+ auto answer = callee->CreateAnswer(options);
+ EXPECT_FALSE(callee->SetLocalDescription(std::move(answer)));
+}
+
+// A variant of the above, without BUNDLE duplicate SSRCs are allowed.
+TEST_P(PeerConnectionBundleTest,
+ ApplyDescriptionWithSameSsrcsUnbundledSucceeds) {
+ auto caller = CreatePeerConnectionWithAudioVideo();
+ auto callee = CreatePeerConnectionWithAudioVideo();
+
+ RTCOfferAnswerOptions options;
+ options.use_rtp_mux = false;
+ auto offer = caller->CreateOffer(options);
+ EXPECT_TRUE(
+ caller->SetLocalDescription(CloneSessionDescription(offer.get())));
+ // Modify the remote SDP to make two m= sections have the same SSRC.
+ ASSERT_GE(offer->description()->contents().size(), 2U);
+ ReplaceFirstSsrc(offer->description()
+ ->contents()[0]
+ .media_description()
+ ->mutable_streams()[0],
+ 1111222);
+ ReplaceFirstSsrc(offer->description()
+ ->contents()[1]
+ .media_description()
+ ->mutable_streams()[0],
+ 1111222);
+ EXPECT_TRUE(callee->SetRemoteDescription(std::move(offer)));
+
+ // Without BUNDLE, demuxing is done per-transport.
+ auto answer = callee->CreateAnswer(options);
+ EXPECT_TRUE(callee->SetLocalDescription(std::move(answer)));
+}
+
+// This tests that changing the pre-negotiated BUNDLE tag is not supported.
+TEST_P(PeerConnectionBundleTest, RejectDescriptionChangingBundleTag) {
+ RTCConfiguration config;
+ config.bundle_policy = BundlePolicy::kBundlePolicyMaxBundle;
+ auto caller = CreatePeerConnectionWithAudioVideo(config);
+ auto callee = CreatePeerConnectionWithAudioVideo(config);
+
+ RTCOfferAnswerOptions options;
+ options.use_rtp_mux = true;
+ auto offer = caller->CreateOfferAndSetAsLocal(options);
+
+ // Create a new bundle-group with different bundled_mid.
+ auto* old_bundle_group =
+ offer->description()->GetGroupByName(cricket::GROUP_TYPE_BUNDLE);
+ std::string first_mid = old_bundle_group->content_names()[0];
+ std::string second_mid = old_bundle_group->content_names()[1];
+ cricket::ContentGroup new_bundle_group(cricket::GROUP_TYPE_BUNDLE);
+ new_bundle_group.AddContentName(second_mid);
+
+ auto re_offer = CloneSessionDescription(offer.get());
+ callee->SetRemoteDescription(std::move(offer));
+ auto answer = callee->CreateAnswer(options);
+ // Reject the first MID.
+ answer->description()->contents()[0].rejected = true;
+ // Remove the first MID from the bundle group.
+ answer->description()->RemoveGroupByName(cricket::GROUP_TYPE_BUNDLE);
+ answer->description()->AddGroup(new_bundle_group);
+ // The answer is expected to be rejected.
+ EXPECT_FALSE(caller->SetRemoteDescription(std::move(answer)));
+
+ // Do the same thing for re-offer.
+ re_offer->description()->contents()[0].rejected = true;
+ re_offer->description()->RemoveGroupByName(cricket::GROUP_TYPE_BUNDLE);
+ re_offer->description()->AddGroup(new_bundle_group);
+ // The re-offer is expected to be rejected.
+ EXPECT_FALSE(caller->SetLocalDescription(std::move(re_offer)));
+}
+
+// This tests that removing contents from BUNDLE group and reject the whole
+// BUNDLE group could work. This is a regression test for
+// (https://bugs.chromium.org/p/chromium/issues/detail?id=827917)
+#ifdef HAVE_SCTP
+TEST_P(PeerConnectionBundleTest, RemovingContentAndRejectBundleGroup) {
+ RTCConfiguration config;
+ config.bundle_policy = BundlePolicy::kBundlePolicyMaxBundle;
+ auto caller = CreatePeerConnectionWithAudioVideo(config);
+ caller->CreateDataChannel("dc");
+
+ auto offer = caller->CreateOfferAndSetAsLocal();
+ auto re_offer = CloneSessionDescription(offer.get());
+
+ // Removing the second MID from the BUNDLE group.
+ auto* old_bundle_group =
+ offer->description()->GetGroupByName(cricket::GROUP_TYPE_BUNDLE);
+ std::string first_mid = old_bundle_group->content_names()[0];
+ std::string third_mid = old_bundle_group->content_names()[2];
+ cricket::ContentGroup new_bundle_group(cricket::GROUP_TYPE_BUNDLE);
+ new_bundle_group.AddContentName(first_mid);
+ new_bundle_group.AddContentName(third_mid);
+
+ // Reject the entire new bundle group.
+ re_offer->description()->contents()[0].rejected = true;
+ re_offer->description()->contents()[2].rejected = true;
+ re_offer->description()->RemoveGroupByName(cricket::GROUP_TYPE_BUNDLE);
+ re_offer->description()->AddGroup(new_bundle_group);
+
+ EXPECT_TRUE(caller->SetLocalDescription(std::move(re_offer)));
+}
+#endif
+
+// This tests that the BUNDLE group in answer should be a subset of the offered
+// group.
+TEST_P(PeerConnectionBundleTest, AddContentToBundleGroupInAnswerNotSupported) {
+ auto caller = CreatePeerConnectionWithAudioVideo();
+ auto callee = CreatePeerConnectionWithAudioVideo();
+
+ auto offer = caller->CreateOffer();
+ std::string first_mid = offer->description()->contents()[0].name;
+ std::string second_mid = offer->description()->contents()[1].name;
+
+ cricket::ContentGroup bundle_group(cricket::GROUP_TYPE_BUNDLE);
+ bundle_group.AddContentName(first_mid);
+ offer->description()->RemoveGroupByName(cricket::GROUP_TYPE_BUNDLE);
+ offer->description()->AddGroup(bundle_group);
+ EXPECT_TRUE(
+ caller->SetLocalDescription(CloneSessionDescription(offer.get())));
+ EXPECT_TRUE(callee->SetRemoteDescription(std::move(offer)));
+
+ auto answer = callee->CreateAnswer();
+ bundle_group.AddContentName(second_mid);
+ answer->description()->RemoveGroupByName(cricket::GROUP_TYPE_BUNDLE);
+ answer->description()->AddGroup(bundle_group);
+
+ // The answer is expected to be rejected because second mid is not in the
+ // offered BUNDLE group.
+ EXPECT_FALSE(callee->SetLocalDescription(std::move(answer)));
+}
+
+// This tests that the BUNDLE group with non-existing MID should be rejectd.
+TEST_P(PeerConnectionBundleTest, RejectBundleGroupWithNonExistingMid) {
+ auto caller = CreatePeerConnectionWithAudioVideo();
+ auto callee = CreatePeerConnectionWithAudioVideo();
+
+ auto offer = caller->CreateOffer();
+ auto invalid_bundle_group =
+ *offer->description()->GetGroupByName(cricket::GROUP_TYPE_BUNDLE);
+ invalid_bundle_group.AddContentName("non-existing-MID");
+ offer->description()->RemoveGroupByName(cricket::GROUP_TYPE_BUNDLE);
+ offer->description()->AddGroup(invalid_bundle_group);
+
+ EXPECT_FALSE(
+ caller->SetLocalDescription(CloneSessionDescription(offer.get())));
+ EXPECT_FALSE(callee->SetRemoteDescription(std::move(offer)));
+}
+
+// This tests that an answer shouldn't be able to remove an m= section from an
+// established group without rejecting it.
+TEST_P(PeerConnectionBundleTest, RemoveContentFromBundleGroup) {
+ auto caller = CreatePeerConnectionWithAudioVideo();
+ auto callee = CreatePeerConnectionWithAudioVideo();
+
+ EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+ EXPECT_TRUE(
+ caller->SetRemoteDescription(callee->CreateAnswerAndSetAsLocal()));
+
+ EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+ auto answer = callee->CreateAnswer();
+ std::string second_mid = answer->description()->contents()[1].name;
+
+ auto invalid_bundle_group =
+ *answer->description()->GetGroupByName(cricket::GROUP_TYPE_BUNDLE);
+ invalid_bundle_group.RemoveContentName(second_mid);
+ answer->description()->RemoveGroupByName(cricket::GROUP_TYPE_BUNDLE);
+ answer->description()->AddGroup(invalid_bundle_group);
+
+ EXPECT_FALSE(
+ callee->SetLocalDescription(CloneSessionDescription(answer.get())));
+}
+
+INSTANTIATE_TEST_SUITE_P(PeerConnectionBundleTest,
+ PeerConnectionBundleTest,
+ Values(SdpSemantics::kPlanB_DEPRECATED,
+ SdpSemantics::kUnifiedPlan));
+
+// According to RFC5888, if an endpoint understands the semantics of an
+// "a=group", it MUST return an answer with that group. So, an empty BUNDLE
+// group is valid when the answerer rejects all m= sections (by stopping all
+// transceivers), meaning there's nothing to bundle.
+//
+// Only writing this test for Unified Plan mode, since there's no way to reject
+// m= sections in answers for Plan B without SDP munging.
+TEST_F(PeerConnectionBundleTestUnifiedPlan,
+ EmptyBundleGroupCreatedInAnswerWhenAppropriate) {
+ auto caller = CreatePeerConnectionWithAudioVideo();
+ auto callee = CreatePeerConnection();
+
+ EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+
+ // Stop all transceivers, causing all m= sections to be rejected.
+ for (const auto& transceiver : callee->pc()->GetTransceivers()) {
+ transceiver->StopInternal();
+ }
+ EXPECT_TRUE(
+ caller->SetRemoteDescription(callee->CreateAnswerAndSetAsLocal()));
+
+ // Verify that the answer actually contained an empty bundle group.
+ const SessionDescriptionInterface* desc = callee->pc()->local_description();
+ ASSERT_NE(nullptr, desc);
+ const cricket::ContentGroup* bundle_group =
+ desc->description()->GetGroupByName(cricket::GROUP_TYPE_BUNDLE);
+ ASSERT_NE(nullptr, bundle_group);
+ EXPECT_TRUE(bundle_group->content_names().empty());
+}
+
+TEST_F(PeerConnectionBundleTestUnifiedPlan, MultipleBundleGroups) {
+ auto caller = CreatePeerConnection();
+ caller->AddAudioTrack("0_audio");
+ caller->AddAudioTrack("1_audio");
+ caller->AddVideoTrack("2_audio");
+ caller->AddVideoTrack("3_audio");
+ auto callee = CreatePeerConnection();
+
+ auto offer = caller->CreateOffer(RTCOfferAnswerOptions());
+ // Modify the GROUP to have two BUNDLEs. We know that the MIDs will be 0,1,2,4
+ // because our implementation has predictable MIDs.
+ offer->description()->RemoveGroupByName(cricket::GROUP_TYPE_BUNDLE);
+ cricket::ContentGroup bundle_group1(cricket::GROUP_TYPE_BUNDLE);
+ bundle_group1.AddContentName("0");
+ bundle_group1.AddContentName("1");
+ cricket::ContentGroup bundle_group2(cricket::GROUP_TYPE_BUNDLE);
+ bundle_group2.AddContentName("2");
+ bundle_group2.AddContentName("3");
+ offer->description()->AddGroup(bundle_group1);
+ offer->description()->AddGroup(bundle_group2);
+
+ EXPECT_TRUE(
+ caller->SetLocalDescription(CloneSessionDescription(offer.get())));
+ EXPECT_TRUE(callee->SetRemoteDescription(std::move(offer)));
+ auto answer = callee->CreateAnswer();
+ EXPECT_TRUE(
+ callee->SetLocalDescription(CloneSessionDescription(answer.get())));
+ EXPECT_TRUE(caller->SetRemoteDescription(std::move(answer)));
+
+ // Verify bundling on sender side.
+ auto senders = caller->pc()->GetSenders();
+ ASSERT_EQ(senders.size(), 4u);
+ auto sender0_transport = senders[0]->dtls_transport();
+ auto sender1_transport = senders[1]->dtls_transport();
+ auto sender2_transport = senders[2]->dtls_transport();
+ auto sender3_transport = senders[3]->dtls_transport();
+ EXPECT_EQ(sender0_transport, sender1_transport);
+ EXPECT_EQ(sender2_transport, sender3_transport);
+ EXPECT_NE(sender0_transport, sender2_transport);
+
+ // Verify bundling on receiver side.
+ auto receivers = callee->pc()->GetReceivers();
+ ASSERT_EQ(receivers.size(), 4u);
+ auto receiver0_transport = receivers[0]->dtls_transport();
+ auto receiver1_transport = receivers[1]->dtls_transport();
+ auto receiver2_transport = receivers[2]->dtls_transport();
+ auto receiver3_transport = receivers[3]->dtls_transport();
+ EXPECT_EQ(receiver0_transport, receiver1_transport);
+ EXPECT_EQ(receiver2_transport, receiver3_transport);
+ EXPECT_NE(receiver0_transport, receiver2_transport);
+}
+
+// Test that, with the "max-compat" bundle policy, it's possible to add an m=
+// section that's not part of an existing bundle group.
+TEST_F(PeerConnectionBundleTestUnifiedPlan, AddNonBundledSection) {
+ RTCConfiguration config;
+ config.bundle_policy = PeerConnectionInterface::kBundlePolicyMaxCompat;
+ auto caller = CreatePeerConnection(config);
+ caller->AddAudioTrack("0_audio");
+ caller->AddAudioTrack("1_audio");
+ auto callee = CreatePeerConnection(config);
+
+ // Establish an existing BUNDLE group.
+ auto offer = caller->CreateOffer(RTCOfferAnswerOptions());
+ EXPECT_TRUE(
+ caller->SetLocalDescription(CloneSessionDescription(offer.get())));
+ EXPECT_TRUE(callee->SetRemoteDescription(std::move(offer)));
+ auto answer = callee->CreateAnswer();
+ EXPECT_TRUE(
+ callee->SetLocalDescription(CloneSessionDescription(answer.get())));
+ EXPECT_TRUE(caller->SetRemoteDescription(std::move(answer)));
+
+ // Add a track but munge SDP so it's not part of the bundle group.
+ caller->AddAudioTrack("3_audio");
+ offer = caller->CreateOffer(RTCOfferAnswerOptions());
+ offer->description()->RemoveGroupByName(cricket::GROUP_TYPE_BUNDLE);
+ cricket::ContentGroup bundle_group(cricket::GROUP_TYPE_BUNDLE);
+ bundle_group.AddContentName("0");
+ bundle_group.AddContentName("1");
+ offer->description()->AddGroup(bundle_group);
+ EXPECT_TRUE(
+ caller->SetLocalDescription(CloneSessionDescription(offer.get())));
+ EXPECT_TRUE(callee->SetRemoteDescription(std::move(offer)));
+ answer = callee->CreateAnswer();
+ EXPECT_TRUE(
+ callee->SetLocalDescription(CloneSessionDescription(answer.get())));
+ EXPECT_TRUE(caller->SetRemoteDescription(std::move(answer)));
+
+ // Verify bundling on the sender side.
+ auto senders = caller->pc()->GetSenders();
+ ASSERT_EQ(senders.size(), 3u);
+ auto sender0_transport = senders[0]->dtls_transport();
+ auto sender1_transport = senders[1]->dtls_transport();
+ auto sender2_transport = senders[2]->dtls_transport();
+ EXPECT_EQ(sender0_transport, sender1_transport);
+ EXPECT_NE(sender0_transport, sender2_transport);
+
+ // Verify bundling on receiver side.
+ auto receivers = callee->pc()->GetReceivers();
+ ASSERT_EQ(receivers.size(), 3u);
+ auto receiver0_transport = receivers[0]->dtls_transport();
+ auto receiver1_transport = receivers[1]->dtls_transport();
+ auto receiver2_transport = receivers[2]->dtls_transport();
+ EXPECT_EQ(receiver0_transport, receiver1_transport);
+ EXPECT_NE(receiver0_transport, receiver2_transport);
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/peer_connection_crypto_unittest.cc b/third_party/libwebrtc/pc/peer_connection_crypto_unittest.cc
new file mode 100644
index 0000000000..dc350b2be0
--- /dev/null
+++ b/third_party/libwebrtc/pc/peer_connection_crypto_unittest.cc
@@ -0,0 +1,809 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stddef.h>
+
+#include <memory>
+#include <ostream>
+#include <string>
+#include <tuple>
+#include <type_traits>
+#include <utility>
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "api/audio/audio_mixer.h"
+#include "api/audio_codecs/builtin_audio_decoder_factory.h"
+#include "api/audio_codecs/builtin_audio_encoder_factory.h"
+#include "api/create_peerconnection_factory.h"
+#include "api/crypto/crypto_options.h"
+#include "api/crypto_params.h"
+#include "api/jsep.h"
+#include "api/peer_connection_interface.h"
+#include "api/scoped_refptr.h"
+#include "api/video_codecs/video_decoder_factory_template.h"
+#include "api/video_codecs/video_decoder_factory_template_dav1d_adapter.h"
+#include "api/video_codecs/video_decoder_factory_template_libvpx_vp8_adapter.h"
+#include "api/video_codecs/video_decoder_factory_template_libvpx_vp9_adapter.h"
+#include "api/video_codecs/video_decoder_factory_template_open_h264_adapter.h"
+#include "api/video_codecs/video_encoder_factory_template.h"
+#include "api/video_codecs/video_encoder_factory_template_libaom_av1_adapter.h"
+#include "api/video_codecs/video_encoder_factory_template_libvpx_vp8_adapter.h"
+#include "api/video_codecs/video_encoder_factory_template_libvpx_vp9_adapter.h"
+#include "api/video_codecs/video_encoder_factory_template_open_h264_adapter.h"
+#include "modules/audio_device/include/audio_device.h"
+#include "modules/audio_processing/include/audio_processing.h"
+#include "p2p/base/fake_port_allocator.h"
+#include "p2p/base/port_allocator.h"
+#include "p2p/base/transport_description.h"
+#include "p2p/base/transport_info.h"
+#include "pc/media_protocol_names.h"
+#include "pc/media_session.h"
+#include "pc/peer_connection_wrapper.h"
+#include "pc/sdp_utils.h"
+#include "pc/session_description.h"
+#include "pc/test/mock_peer_connection_observers.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/rtc_certificate.h"
+#include "rtc_base/rtc_certificate_generator.h"
+#include "rtc_base/ssl_fingerprint.h"
+#include "rtc_base/thread.h"
+#include "test/gtest.h"
+#include "test/scoped_key_value_config.h"
+#ifdef WEBRTC_ANDROID
+#include "pc/test/android_test_initializer.h"
+#endif
+#include "pc/test/fake_audio_capture_module.h"
+#include "pc/test/fake_rtc_certificate_generator.h"
+#include "rtc_base/gunit.h"
+#include "rtc_base/virtual_socket_server.h"
+
+namespace webrtc {
+
+using RTCConfiguration = PeerConnectionInterface::RTCConfiguration;
+using RTCOfferAnswerOptions = PeerConnectionInterface::RTCOfferAnswerOptions;
+using ::testing::Combine;
+using ::testing::Values;
+
+constexpr int kGenerateCertTimeout = 1000;
+
+class PeerConnectionCryptoBaseTest : public ::testing::Test {
+ protected:
+ typedef std::unique_ptr<PeerConnectionWrapper> WrapperPtr;
+
+ explicit PeerConnectionCryptoBaseTest(SdpSemantics sdp_semantics)
+ : vss_(new rtc::VirtualSocketServer()),
+ main_(vss_.get()),
+ sdp_semantics_(sdp_semantics) {
+#ifdef WEBRTC_ANDROID
+ InitializeAndroidObjects();
+#endif
+ pc_factory_ = CreatePeerConnectionFactory(
+ rtc::Thread::Current(), rtc::Thread::Current(), rtc::Thread::Current(),
+ FakeAudioCaptureModule::Create(), CreateBuiltinAudioEncoderFactory(),
+ CreateBuiltinAudioDecoderFactory(),
+ std::make_unique<VideoEncoderFactoryTemplate<
+ LibvpxVp8EncoderTemplateAdapter, LibvpxVp9EncoderTemplateAdapter,
+ OpenH264EncoderTemplateAdapter, LibaomAv1EncoderTemplateAdapter>>(),
+ std::make_unique<VideoDecoderFactoryTemplate<
+ LibvpxVp8DecoderTemplateAdapter, LibvpxVp9DecoderTemplateAdapter,
+ OpenH264DecoderTemplateAdapter, Dav1dDecoderTemplateAdapter>>(),
+ nullptr /* audio_mixer */, nullptr /* audio_processing */);
+ }
+
+ WrapperPtr CreatePeerConnection() {
+ return CreatePeerConnection(RTCConfiguration());
+ }
+
+ WrapperPtr CreatePeerConnection(const RTCConfiguration& config) {
+ return CreatePeerConnection(config, nullptr);
+ }
+
+ WrapperPtr CreatePeerConnection(
+ const RTCConfiguration& config,
+ std::unique_ptr<rtc::RTCCertificateGeneratorInterface> cert_gen) {
+ auto fake_port_allocator = std::make_unique<cricket::FakePortAllocator>(
+ rtc::Thread::Current(),
+ std::make_unique<rtc::BasicPacketSocketFactory>(vss_.get()),
+ &field_trials_);
+ auto observer = std::make_unique<MockPeerConnectionObserver>();
+ RTCConfiguration modified_config = config;
+ modified_config.sdp_semantics = sdp_semantics_;
+ PeerConnectionDependencies pc_dependencies(observer.get());
+ pc_dependencies.allocator = std::move(fake_port_allocator);
+ pc_dependencies.cert_generator = std::move(cert_gen);
+ auto result = pc_factory_->CreatePeerConnectionOrError(
+ modified_config, std::move(pc_dependencies));
+ if (!result.ok()) {
+ return nullptr;
+ }
+
+ observer->SetPeerConnectionInterface(result.value().get());
+ return std::make_unique<PeerConnectionWrapper>(
+ pc_factory_, result.MoveValue(), std::move(observer));
+ }
+
+ // Accepts the same arguments as CreatePeerConnection and adds default audio
+ // and video tracks.
+ template <typename... Args>
+ WrapperPtr CreatePeerConnectionWithAudioVideo(Args&&... args) {
+ auto wrapper = CreatePeerConnection(std::forward<Args>(args)...);
+ if (!wrapper) {
+ return nullptr;
+ }
+ wrapper->AddAudioTrack("a");
+ wrapper->AddVideoTrack("v");
+ return wrapper;
+ }
+
+ cricket::ConnectionRole& AudioConnectionRole(
+ cricket::SessionDescription* desc) {
+ return ConnectionRoleFromContent(desc, cricket::GetFirstAudioContent(desc));
+ }
+
+ cricket::ConnectionRole& VideoConnectionRole(
+ cricket::SessionDescription* desc) {
+ return ConnectionRoleFromContent(desc, cricket::GetFirstVideoContent(desc));
+ }
+
+ cricket::ConnectionRole& ConnectionRoleFromContent(
+ cricket::SessionDescription* desc,
+ cricket::ContentInfo* content) {
+ RTC_DCHECK(content);
+ auto* transport_info = desc->GetTransportInfoByName(content->name);
+ RTC_DCHECK(transport_info);
+ return transport_info->description.connection_role;
+ }
+
+ webrtc::test::ScopedKeyValueConfig field_trials_;
+ std::unique_ptr<rtc::VirtualSocketServer> vss_;
+ rtc::AutoSocketServerThread main_;
+ rtc::scoped_refptr<PeerConnectionFactoryInterface> pc_factory_;
+ const SdpSemantics sdp_semantics_;
+};
+
+SdpContentPredicate HaveDtlsFingerprint() {
+ return [](const cricket::ContentInfo* content,
+ const cricket::TransportInfo* transport) {
+ return transport->description.identity_fingerprint != nullptr;
+ };
+}
+
+SdpContentPredicate HaveSdesCryptos() {
+ return [](const cricket::ContentInfo* content,
+ const cricket::TransportInfo* transport) {
+ return !content->media_description()->cryptos().empty();
+ };
+}
+
+SdpContentPredicate HaveProtocol(const std::string& protocol) {
+ return [protocol](const cricket::ContentInfo* content,
+ const cricket::TransportInfo* transport) {
+ return content->media_description()->protocol() == protocol;
+ };
+}
+
+SdpContentPredicate HaveSdesGcmCryptos(size_t num_crypto_suites) {
+ return [num_crypto_suites](const cricket::ContentInfo* content,
+ const cricket::TransportInfo* transport) {
+ const auto& cryptos = content->media_description()->cryptos();
+ if (cryptos.size() != num_crypto_suites) {
+ return false;
+ }
+ for (size_t i = 0; i < cryptos.size(); ++i) {
+ if (cryptos[i].key_params.size() == 67U &&
+ cryptos[i].crypto_suite == "AEAD_AES_256_GCM")
+ return true;
+ }
+ return false;
+ };
+}
+
+class PeerConnectionCryptoTest
+ : public PeerConnectionCryptoBaseTest,
+ public ::testing::WithParamInterface<SdpSemantics> {
+ protected:
+ PeerConnectionCryptoTest() : PeerConnectionCryptoBaseTest(GetParam()) {}
+};
+
+SdpContentMutator RemoveSdesCryptos() {
+ return [](cricket::ContentInfo* content, cricket::TransportInfo* transport) {
+ content->media_description()->set_cryptos({});
+ };
+}
+
+SdpContentMutator RemoveDtlsFingerprint() {
+ return [](cricket::ContentInfo* content, cricket::TransportInfo* transport) {
+ transport->description.identity_fingerprint.reset();
+ };
+}
+
+// When DTLS is enabled, the SDP offer/answer should have a DTLS fingerprint and
+// no SDES cryptos.
+TEST_P(PeerConnectionCryptoTest, CorrectCryptoInOfferWhenDtlsEnabled) {
+ RTCConfiguration config;
+ auto caller = CreatePeerConnectionWithAudioVideo(config);
+
+ auto offer = caller->CreateOffer();
+ ASSERT_TRUE(offer);
+
+ ASSERT_FALSE(offer->description()->contents().empty());
+ EXPECT_TRUE(SdpContentsAll(HaveDtlsFingerprint(), offer->description()));
+ EXPECT_TRUE(SdpContentsNone(HaveSdesCryptos(), offer->description()));
+ EXPECT_TRUE(SdpContentsAll(HaveProtocol(cricket::kMediaProtocolDtlsSavpf),
+ offer->description()));
+}
+TEST_P(PeerConnectionCryptoTest, CorrectCryptoInAnswerWhenDtlsEnabled) {
+ RTCConfiguration config;
+ auto caller = CreatePeerConnectionWithAudioVideo(config);
+ auto callee = CreatePeerConnectionWithAudioVideo(config);
+
+ callee->SetRemoteDescription(caller->CreateOffer());
+ auto answer = callee->CreateAnswer();
+ ASSERT_TRUE(answer);
+
+ ASSERT_FALSE(answer->description()->contents().empty());
+ EXPECT_TRUE(SdpContentsAll(HaveDtlsFingerprint(), answer->description()));
+ EXPECT_TRUE(SdpContentsNone(HaveSdesCryptos(), answer->description()));
+ EXPECT_TRUE(SdpContentsAll(HaveProtocol(cricket::kMediaProtocolDtlsSavpf),
+ answer->description()));
+}
+
+#if defined(WEBRTC_FUCHSIA)
+// When DTLS is disabled, the SDP offer/answer should include SDES cryptos and
+// should not have a DTLS fingerprint.
+TEST_P(PeerConnectionCryptoTest, CorrectCryptoInOfferWhenDtlsDisabled) {
+ RTCConfiguration config;
+ config.enable_dtls_srtp.emplace(false);
+ auto caller = CreatePeerConnectionWithAudioVideo(config);
+
+ auto offer = caller->CreateOffer();
+ ASSERT_TRUE(offer);
+
+ ASSERT_FALSE(offer->description()->contents().empty());
+ EXPECT_TRUE(SdpContentsAll(HaveSdesCryptos(), offer->description()));
+ EXPECT_TRUE(SdpContentsNone(HaveDtlsFingerprint(), offer->description()));
+ EXPECT_TRUE(SdpContentsAll(HaveProtocol(cricket::kMediaProtocolSavpf),
+ offer->description()));
+}
+
+TEST_P(PeerConnectionCryptoTest, CorrectCryptoInAnswerWhenDtlsDisabled) {
+ RTCConfiguration config;
+ config.enable_dtls_srtp.emplace(false);
+ auto caller = CreatePeerConnectionWithAudioVideo(config);
+ auto callee = CreatePeerConnectionWithAudioVideo(config);
+
+ callee->SetRemoteDescription(caller->CreateOffer());
+ auto answer = callee->CreateAnswer();
+ ASSERT_TRUE(answer);
+
+ ASSERT_FALSE(answer->description()->contents().empty());
+ EXPECT_TRUE(SdpContentsAll(HaveSdesCryptos(), answer->description()));
+ EXPECT_TRUE(SdpContentsNone(HaveDtlsFingerprint(), answer->description()));
+ EXPECT_TRUE(SdpContentsAll(HaveProtocol(cricket::kMediaProtocolSavpf),
+ answer->description()));
+}
+
+// When encryption is disabled, the SDP offer/answer should have neither a DTLS
+// fingerprint nor any SDES crypto options.
+TEST_P(PeerConnectionCryptoTest, CorrectCryptoInOfferWhenEncryptionDisabled) {
+ PeerConnectionFactoryInterface::Options options;
+ options.disable_encryption = true;
+ pc_factory_->SetOptions(options);
+
+ RTCConfiguration config;
+ config.enable_dtls_srtp.emplace(false);
+ auto caller = CreatePeerConnectionWithAudioVideo(config);
+
+ auto offer = caller->CreateOffer();
+ ASSERT_TRUE(offer);
+
+ ASSERT_FALSE(offer->description()->contents().empty());
+ EXPECT_TRUE(SdpContentsNone(HaveSdesCryptos(), offer->description()));
+ EXPECT_TRUE(SdpContentsNone(HaveDtlsFingerprint(), offer->description()));
+ EXPECT_TRUE(SdpContentsAll(HaveProtocol(cricket::kMediaProtocolAvpf),
+ offer->description()));
+}
+
+TEST_P(PeerConnectionCryptoTest, CorrectCryptoInAnswerWhenEncryptionDisabled) {
+ PeerConnectionFactoryInterface::Options options;
+ options.disable_encryption = true;
+ pc_factory_->SetOptions(options);
+
+ RTCConfiguration config;
+ config.enable_dtls_srtp.emplace(false);
+ auto caller = CreatePeerConnectionWithAudioVideo(config);
+ auto callee = CreatePeerConnectionWithAudioVideo(config);
+
+ callee->SetRemoteDescription(caller->CreateOffer());
+ auto answer = callee->CreateAnswer();
+ ASSERT_TRUE(answer);
+
+ ASSERT_FALSE(answer->description()->contents().empty());
+ EXPECT_TRUE(SdpContentsNone(HaveSdesCryptos(), answer->description()));
+ EXPECT_TRUE(SdpContentsNone(HaveDtlsFingerprint(), answer->description()));
+ EXPECT_TRUE(SdpContentsAll(HaveProtocol(cricket::kMediaProtocolAvpf),
+ answer->description()));
+}
+
+// CryptoOptions has been promoted to RTCConfiguration. As such if it is ever
+// set in the configuration it should overrite the settings set in the factory.
+TEST_P(PeerConnectionCryptoTest, RTCConfigurationCryptoOptionOverridesFactory) {
+ PeerConnectionFactoryInterface::Options options;
+ options.crypto_options.srtp.enable_gcm_crypto_suites = true;
+ pc_factory_->SetOptions(options);
+
+ RTCConfiguration config;
+ config.enable_dtls_srtp.emplace(false);
+ CryptoOptions crypto_options;
+ crypto_options.srtp.enable_gcm_crypto_suites = false;
+ config.crypto_options = crypto_options;
+ auto caller = CreatePeerConnectionWithAudioVideo(config);
+
+ auto offer = caller->CreateOffer();
+ ASSERT_TRUE(offer);
+
+ ASSERT_FALSE(offer->description()->contents().empty());
+ // This should exist if GCM is enabled see CorrectCryptoInOfferWithSdesAndGcm
+ EXPECT_FALSE(SdpContentsAll(HaveSdesGcmCryptos(3), offer->description()));
+}
+
+// When DTLS is disabled and GCM cipher suites are enabled, the SDP offer/answer
+// should have the correct ciphers in the SDES crypto options.
+// With GCM cipher suites enabled, there will be 3 cryptos in the offer and 1
+// in the answer.
+TEST_P(PeerConnectionCryptoTest, CorrectCryptoInOfferWithSdesAndGcm) {
+ PeerConnectionFactoryInterface::Options options;
+ options.crypto_options.srtp.enable_gcm_crypto_suites = true;
+ pc_factory_->SetOptions(options);
+
+ RTCConfiguration config;
+ config.enable_dtls_srtp.emplace(false);
+ auto caller = CreatePeerConnectionWithAudioVideo(config);
+
+ auto offer = caller->CreateOffer();
+ ASSERT_TRUE(offer);
+
+ ASSERT_FALSE(offer->description()->contents().empty());
+ EXPECT_TRUE(SdpContentsAll(HaveSdesGcmCryptos(3), offer->description()));
+}
+
+TEST_P(PeerConnectionCryptoTest, CorrectCryptoInAnswerWithSdesAndGcm) {
+ PeerConnectionFactoryInterface::Options options;
+ options.crypto_options.srtp.enable_gcm_crypto_suites = true;
+ pc_factory_->SetOptions(options);
+
+ RTCConfiguration config;
+ config.enable_dtls_srtp.emplace(false);
+ auto caller = CreatePeerConnectionWithAudioVideo(config);
+ auto callee = CreatePeerConnectionWithAudioVideo(config);
+
+ auto offer = caller->CreateOffer();
+ for (cricket::ContentInfo& content : offer->description()->contents()) {
+ auto cryptos = content.media_description()->cryptos();
+ cryptos.erase(cryptos.begin()); // Assumes that non-GCM is the default.
+ content.media_description()->set_cryptos(cryptos);
+ }
+
+ callee->SetRemoteDescription(std::move(offer));
+ auto answer = callee->CreateAnswer();
+ ASSERT_TRUE(answer);
+
+ ASSERT_FALSE(answer->description()->contents().empty());
+ EXPECT_TRUE(SdpContentsAll(HaveSdesGcmCryptos(1), answer->description()));
+}
+
+TEST_P(PeerConnectionCryptoTest, CanSetSdesGcmRemoteOfferAndLocalAnswer) {
+ PeerConnectionFactoryInterface::Options options;
+ options.crypto_options.srtp.enable_gcm_crypto_suites = true;
+ pc_factory_->SetOptions(options);
+
+ RTCConfiguration config;
+ config.enable_dtls_srtp.emplace(false);
+ auto caller = CreatePeerConnectionWithAudioVideo(config);
+ auto callee = CreatePeerConnectionWithAudioVideo(config);
+
+ auto offer = caller->CreateOffer();
+ ASSERT_TRUE(offer);
+ ASSERT_TRUE(callee->SetRemoteDescription(std::move(offer)));
+
+ auto answer = callee->CreateAnswer();
+ ASSERT_TRUE(answer);
+ ASSERT_TRUE(callee->SetLocalDescription(std::move(answer)));
+}
+
+// The following group tests that two PeerConnections can successfully exchange
+// an offer/answer when DTLS is off and that they will refuse any offer/answer
+// applied locally/remotely if it does not include SDES cryptos.
+TEST_P(PeerConnectionCryptoTest, ExchangeOfferAnswerWhenSdesOn) {
+ RTCConfiguration config;
+ config.enable_dtls_srtp.emplace(false);
+ auto caller = CreatePeerConnectionWithAudioVideo(config);
+ auto callee = CreatePeerConnectionWithAudioVideo(config);
+
+ auto offer = caller->CreateOfferAndSetAsLocal();
+ ASSERT_TRUE(offer);
+ ASSERT_TRUE(callee->SetRemoteDescription(std::move(offer)));
+
+ auto answer = callee->CreateAnswerAndSetAsLocal();
+ ASSERT_TRUE(answer);
+ ASSERT_TRUE(caller->SetRemoteDescription(std::move(answer)));
+}
+TEST_P(PeerConnectionCryptoTest, FailToSetLocalOfferWithNoCryptosWhenSdesOn) {
+ RTCConfiguration config;
+ config.enable_dtls_srtp.emplace(false);
+ auto caller = CreatePeerConnectionWithAudioVideo(config);
+
+ auto offer = caller->CreateOffer();
+ SdpContentsForEach(RemoveSdesCryptos(), offer->description());
+
+ EXPECT_FALSE(caller->SetLocalDescription(std::move(offer)));
+}
+TEST_P(PeerConnectionCryptoTest, FailToSetRemoteOfferWithNoCryptosWhenSdesOn) {
+ RTCConfiguration config;
+ config.enable_dtls_srtp.emplace(false);
+ auto caller = CreatePeerConnectionWithAudioVideo(config);
+ auto callee = CreatePeerConnectionWithAudioVideo(config);
+
+ auto offer = caller->CreateOffer();
+ SdpContentsForEach(RemoveSdesCryptos(), offer->description());
+
+ EXPECT_FALSE(callee->SetRemoteDescription(std::move(offer)));
+}
+TEST_P(PeerConnectionCryptoTest, FailToSetLocalAnswerWithNoCryptosWhenSdesOn) {
+ RTCConfiguration config;
+ config.enable_dtls_srtp.emplace(false);
+ auto caller = CreatePeerConnectionWithAudioVideo(config);
+ auto callee = CreatePeerConnectionWithAudioVideo(config);
+
+ callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal());
+ auto answer = callee->CreateAnswer();
+ SdpContentsForEach(RemoveSdesCryptos(), answer->description());
+
+ EXPECT_FALSE(callee->SetLocalDescription(std::move(answer)));
+}
+TEST_P(PeerConnectionCryptoTest, FailToSetRemoteAnswerWithNoCryptosWhenSdesOn) {
+ RTCConfiguration config;
+ config.enable_dtls_srtp.emplace(false);
+ auto caller = CreatePeerConnectionWithAudioVideo(config);
+ auto callee = CreatePeerConnectionWithAudioVideo(config);
+
+ callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal());
+ auto answer = callee->CreateAnswerAndSetAsLocal();
+ SdpContentsForEach(RemoveSdesCryptos(), answer->description());
+
+ EXPECT_FALSE(caller->SetRemoteDescription(std::move(answer)));
+}
+#endif
+
+// The following group tests that two PeerConnections can successfully exchange
+// an offer/answer when DTLS is on and that they will refuse any offer/answer
+// applied locally/remotely if it does not include a DTLS fingerprint.
+TEST_P(PeerConnectionCryptoTest, ExchangeOfferAnswerWhenDtlsOn) {
+ RTCConfiguration config;
+ auto caller = CreatePeerConnectionWithAudioVideo(config);
+ auto callee = CreatePeerConnectionWithAudioVideo(config);
+
+ auto offer = caller->CreateOfferAndSetAsLocal();
+ ASSERT_TRUE(offer);
+ ASSERT_TRUE(callee->SetRemoteDescription(std::move(offer)));
+
+ auto answer = callee->CreateAnswerAndSetAsLocal();
+ ASSERT_TRUE(answer);
+ ASSERT_TRUE(caller->SetRemoteDescription(std::move(answer)));
+}
+TEST_P(PeerConnectionCryptoTest,
+ FailToSetLocalOfferWithNoFingerprintWhenDtlsOn) {
+ RTCConfiguration config;
+ auto caller = CreatePeerConnectionWithAudioVideo(config);
+
+ auto offer = caller->CreateOffer();
+ SdpContentsForEach(RemoveDtlsFingerprint(), offer->description());
+
+ EXPECT_FALSE(caller->SetLocalDescription(std::move(offer)));
+}
+TEST_P(PeerConnectionCryptoTest,
+ FailToSetRemoteOfferWithNoFingerprintWhenDtlsOn) {
+ RTCConfiguration config;
+ auto caller = CreatePeerConnectionWithAudioVideo(config);
+ auto callee = CreatePeerConnectionWithAudioVideo(config);
+
+ auto offer = caller->CreateOffer();
+ SdpContentsForEach(RemoveDtlsFingerprint(), offer->description());
+
+ EXPECT_FALSE(callee->SetRemoteDescription(std::move(offer)));
+}
+TEST_P(PeerConnectionCryptoTest,
+ FailToSetLocalAnswerWithNoFingerprintWhenDtlsOn) {
+ RTCConfiguration config;
+ auto caller = CreatePeerConnectionWithAudioVideo(config);
+ auto callee = CreatePeerConnectionWithAudioVideo(config);
+
+ callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal());
+ auto answer = callee->CreateAnswer();
+ SdpContentsForEach(RemoveDtlsFingerprint(), answer->description());
+}
+TEST_P(PeerConnectionCryptoTest,
+ FailToSetRemoteAnswerWithNoFingerprintWhenDtlsOn) {
+ RTCConfiguration config;
+ auto caller = CreatePeerConnectionWithAudioVideo(config);
+ auto callee = CreatePeerConnectionWithAudioVideo(config);
+
+ callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal());
+ auto answer = callee->CreateAnswerAndSetAsLocal();
+ SdpContentsForEach(RemoveDtlsFingerprint(), answer->description());
+
+ EXPECT_FALSE(caller->SetRemoteDescription(std::move(answer)));
+}
+
+#if defined(WEBRTC_FUCHSIA)
+// Test that an offer/answer can be exchanged when encryption is disabled.
+TEST_P(PeerConnectionCryptoTest, ExchangeOfferAnswerWhenNoEncryption) {
+ PeerConnectionFactoryInterface::Options options;
+ options.disable_encryption = true;
+ pc_factory_->SetOptions(options);
+
+ RTCConfiguration config;
+ config.enable_dtls_srtp.emplace(false);
+ auto caller = CreatePeerConnectionWithAudioVideo(config);
+ auto callee = CreatePeerConnectionWithAudioVideo(config);
+
+ auto offer = caller->CreateOfferAndSetAsLocal();
+ ASSERT_TRUE(offer);
+ ASSERT_TRUE(callee->SetRemoteDescription(std::move(offer)));
+
+ auto answer = callee->CreateAnswerAndSetAsLocal();
+ ASSERT_TRUE(answer);
+ ASSERT_TRUE(caller->SetRemoteDescription(std::move(answer)));
+}
+#endif
+
+// Tests that a DTLS call can be established when the certificate is specified
+// in the PeerConnection config and no certificate generator is specified.
+TEST_P(PeerConnectionCryptoTest,
+ ExchangeOfferAnswerWhenDtlsCertificateInConfig) {
+ RTCConfiguration caller_config;
+ caller_config.certificates.push_back(
+ FakeRTCCertificateGenerator::GenerateCertificate());
+ auto caller = CreatePeerConnectionWithAudioVideo(caller_config);
+
+ RTCConfiguration callee_config;
+ callee_config.certificates.push_back(
+ FakeRTCCertificateGenerator::GenerateCertificate());
+ auto callee = CreatePeerConnectionWithAudioVideo(callee_config);
+
+ auto offer = caller->CreateOfferAndSetAsLocal();
+ ASSERT_TRUE(offer);
+ ASSERT_TRUE(callee->SetRemoteDescription(std::move(offer)));
+
+ auto answer = callee->CreateAnswerAndSetAsLocal();
+ ASSERT_TRUE(answer);
+ ASSERT_TRUE(caller->SetRemoteDescription(std::move(answer)));
+}
+
+// The following parameterized test verifies that CreateOffer/CreateAnswer
+// returns successfully (or with failure if the underlying certificate generator
+// fails) no matter when the DTLS certificate is generated. If multiple
+// CreateOffer/CreateAnswer calls are made while waiting for the certificate,
+// they all finish after the certificate is generated.
+
+// Whether the certificate will be generated before calling CreateOffer or
+// while CreateOffer is executing.
+enum class CertGenTime { kBefore, kDuring };
+std::ostream& operator<<(std::ostream& out, CertGenTime value) {
+ switch (value) {
+ case CertGenTime::kBefore:
+ return out << "before";
+ case CertGenTime::kDuring:
+ return out << "during";
+ default:
+ return out << "unknown";
+ }
+}
+
+// Whether the fake certificate generator will produce a certificate or fail.
+enum class CertGenResult { kSucceed, kFail };
+std::ostream& operator<<(std::ostream& out, CertGenResult value) {
+ switch (value) {
+ case CertGenResult::kSucceed:
+ return out << "succeed";
+ case CertGenResult::kFail:
+ return out << "fail";
+ default:
+ return out << "unknown";
+ }
+}
+
+class PeerConnectionCryptoDtlsCertGenTest
+ : public PeerConnectionCryptoBaseTest,
+ public ::testing::WithParamInterface<std::tuple<SdpSemantics,
+ SdpType,
+ CertGenTime,
+ CertGenResult,
+ size_t>> {
+ protected:
+ PeerConnectionCryptoDtlsCertGenTest()
+ : PeerConnectionCryptoBaseTest(std::get<0>(GetParam())) {
+ sdp_type_ = std::get<1>(GetParam());
+ cert_gen_time_ = std::get<2>(GetParam());
+ cert_gen_result_ = std::get<3>(GetParam());
+ concurrent_calls_ = std::get<4>(GetParam());
+ }
+
+ SdpType sdp_type_;
+ CertGenTime cert_gen_time_;
+ CertGenResult cert_gen_result_;
+ size_t concurrent_calls_;
+};
+
+TEST_P(PeerConnectionCryptoDtlsCertGenTest, TestCertificateGeneration) {
+ RTCConfiguration config;
+ auto owned_fake_certificate_generator =
+ std::make_unique<FakeRTCCertificateGenerator>();
+ auto* fake_certificate_generator = owned_fake_certificate_generator.get();
+ fake_certificate_generator->set_should_fail(cert_gen_result_ ==
+ CertGenResult::kFail);
+ fake_certificate_generator->set_should_wait(cert_gen_time_ ==
+ CertGenTime::kDuring);
+ WrapperPtr pc;
+ if (sdp_type_ == SdpType::kOffer) {
+ pc = CreatePeerConnectionWithAudioVideo(
+ config, std::move(owned_fake_certificate_generator));
+ } else {
+ auto caller = CreatePeerConnectionWithAudioVideo(config);
+ pc = CreatePeerConnectionWithAudioVideo(
+ config, std::move(owned_fake_certificate_generator));
+ pc->SetRemoteDescription(caller->CreateOfferAndSetAsLocal());
+ }
+ if (cert_gen_time_ == CertGenTime::kBefore) {
+ ASSERT_TRUE_WAIT(fake_certificate_generator->generated_certificates() +
+ fake_certificate_generator->generated_failures() >
+ 0,
+ kGenerateCertTimeout);
+ } else {
+ ASSERT_EQ(fake_certificate_generator->generated_certificates(), 0);
+ fake_certificate_generator->set_should_wait(false);
+ }
+ std::vector<rtc::scoped_refptr<MockCreateSessionDescriptionObserver>>
+ observers;
+ for (size_t i = 0; i < concurrent_calls_; i++) {
+ rtc::scoped_refptr<MockCreateSessionDescriptionObserver> observer =
+ rtc::make_ref_counted<MockCreateSessionDescriptionObserver>();
+ observers.push_back(observer);
+ if (sdp_type_ == SdpType::kOffer) {
+ pc->pc()->CreateOffer(observer.get(),
+ PeerConnectionInterface::RTCOfferAnswerOptions());
+ } else {
+ pc->pc()->CreateAnswer(observer.get(),
+ PeerConnectionInterface::RTCOfferAnswerOptions());
+ }
+ }
+ for (auto& observer : observers) {
+ EXPECT_TRUE_WAIT(observer->called(), 1000);
+ if (cert_gen_result_ == CertGenResult::kSucceed) {
+ EXPECT_TRUE(observer->result());
+ } else {
+ EXPECT_FALSE(observer->result());
+ }
+ }
+}
+
+INSTANTIATE_TEST_SUITE_P(
+ PeerConnectionCryptoTest,
+ PeerConnectionCryptoDtlsCertGenTest,
+ Combine(Values(SdpSemantics::kPlanB_DEPRECATED, SdpSemantics::kUnifiedPlan),
+ Values(SdpType::kOffer, SdpType::kAnswer),
+ Values(CertGenTime::kBefore, CertGenTime::kDuring),
+ Values(CertGenResult::kSucceed, CertGenResult::kFail),
+ Values(1, 3)));
+
+// Test that we can create and set an answer correctly when different
+// SSL roles have been negotiated for different transports.
+// See: https://bugs.chromium.org/p/webrtc/issues/detail?id=4525
+TEST_P(PeerConnectionCryptoTest, CreateAnswerWithDifferentSslRoles) {
+ auto caller = CreatePeerConnectionWithAudioVideo();
+ auto callee = CreatePeerConnectionWithAudioVideo();
+
+ RTCOfferAnswerOptions options_no_bundle;
+ options_no_bundle.use_rtp_mux = false;
+
+ // First, negotiate different SSL roles for audio and video.
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+ auto answer = callee->CreateAnswer(options_no_bundle);
+
+ AudioConnectionRole(answer->description()) = cricket::CONNECTIONROLE_ACTIVE;
+ VideoConnectionRole(answer->description()) = cricket::CONNECTIONROLE_PASSIVE;
+
+ ASSERT_TRUE(
+ callee->SetLocalDescription(CloneSessionDescription(answer.get())));
+ ASSERT_TRUE(caller->SetRemoteDescription(std::move(answer)));
+
+ // Now create an offer in the reverse direction, and ensure the initial
+ // offerer responds with an answer with the correct SSL roles.
+ ASSERT_TRUE(caller->SetRemoteDescription(callee->CreateOfferAndSetAsLocal()));
+ answer = caller->CreateAnswer(options_no_bundle);
+
+ EXPECT_EQ(cricket::CONNECTIONROLE_PASSIVE,
+ AudioConnectionRole(answer->description()));
+ EXPECT_EQ(cricket::CONNECTIONROLE_ACTIVE,
+ VideoConnectionRole(answer->description()));
+
+ ASSERT_TRUE(
+ caller->SetLocalDescription(CloneSessionDescription(answer.get())));
+ ASSERT_TRUE(callee->SetRemoteDescription(std::move(answer)));
+
+ // Lastly, start BUNDLE-ing on "audio", expecting that the "passive" role of
+ // audio is transferred over to video in the answer that completes the BUNDLE
+ // negotiation.
+ RTCOfferAnswerOptions options_bundle;
+ options_bundle.use_rtp_mux = true;
+
+ ASSERT_TRUE(caller->SetRemoteDescription(callee->CreateOfferAndSetAsLocal()));
+ answer = caller->CreateAnswer(options_bundle);
+
+ EXPECT_EQ(cricket::CONNECTIONROLE_PASSIVE,
+ AudioConnectionRole(answer->description()));
+ EXPECT_EQ(cricket::CONNECTIONROLE_PASSIVE,
+ VideoConnectionRole(answer->description()));
+
+ ASSERT_TRUE(
+ caller->SetLocalDescription(CloneSessionDescription(answer.get())));
+ ASSERT_TRUE(callee->SetRemoteDescription(std::move(answer)));
+}
+
+// Tests that if the DTLS fingerprint is invalid then all future calls to
+// SetLocalDescription and SetRemoteDescription will fail due to a session
+// error.
+// This is a regression test for crbug.com/800775
+TEST_P(PeerConnectionCryptoTest, SessionErrorIfFingerprintInvalid) {
+ auto callee_certificate = rtc::RTCCertificate::FromPEM(kRsaPems[0]);
+ auto other_certificate = rtc::RTCCertificate::FromPEM(kRsaPems[1]);
+
+ auto caller = CreatePeerConnectionWithAudioVideo();
+ RTCConfiguration callee_config;
+ callee_config.certificates.push_back(callee_certificate);
+ auto callee = CreatePeerConnectionWithAudioVideo(callee_config);
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+
+ // Create an invalid answer with the other certificate's fingerprint.
+ auto valid_answer = callee->CreateAnswer();
+ auto invalid_answer = CloneSessionDescription(valid_answer.get());
+ auto* audio_content =
+ cricket::GetFirstAudioContent(invalid_answer->description());
+ ASSERT_TRUE(audio_content);
+ auto* audio_transport_info =
+ invalid_answer->description()->GetTransportInfoByName(
+ audio_content->name);
+ ASSERT_TRUE(audio_transport_info);
+ audio_transport_info->description.identity_fingerprint =
+ rtc::SSLFingerprint::CreateFromCertificate(*other_certificate);
+
+ // Set the invalid answer and expect a fingerprint error.
+ std::string error;
+ ASSERT_FALSE(callee->SetLocalDescription(std::move(invalid_answer), &error));
+ EXPECT_PRED_FORMAT2(AssertStringContains, error,
+ "Local fingerprint does not match identity.");
+
+ // Make sure that setting a valid remote offer or local answer also fails now.
+ ASSERT_FALSE(callee->SetRemoteDescription(caller->CreateOffer(), &error));
+ EXPECT_PRED_FORMAT2(AssertStringContains, error,
+ "Session error code: ERROR_CONTENT.");
+ ASSERT_FALSE(callee->SetLocalDescription(std::move(valid_answer), &error));
+ EXPECT_PRED_FORMAT2(AssertStringContains, error,
+ "Session error code: ERROR_CONTENT.");
+}
+
+INSTANTIATE_TEST_SUITE_P(PeerConnectionCryptoTest,
+ PeerConnectionCryptoTest,
+ Values(SdpSemantics::kPlanB_DEPRECATED,
+ SdpSemantics::kUnifiedPlan));
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/peer_connection_data_channel_unittest.cc b/third_party/libwebrtc/pc/peer_connection_data_channel_unittest.cc
new file mode 100644
index 0000000000..3bb2088866
--- /dev/null
+++ b/third_party/libwebrtc/pc/peer_connection_data_channel_unittest.cc
@@ -0,0 +1,337 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <memory>
+#include <string>
+#include <type_traits>
+#include <utility>
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "api/call/call_factory_interface.h"
+#include "api/jsep.h"
+#include "api/media_types.h"
+#include "api/peer_connection_interface.h"
+#include "api/scoped_refptr.h"
+#include "api/sctp_transport_interface.h"
+#include "api/task_queue/default_task_queue_factory.h"
+#include "api/task_queue/task_queue_factory.h"
+#include "api/transport/sctp_transport_factory_interface.h"
+#include "media/base/fake_media_engine.h"
+#include "media/base/media_engine.h"
+#include "p2p/base/p2p_constants.h"
+#include "p2p/base/port_allocator.h"
+#include "pc/media_session.h"
+#include "pc/peer_connection.h"
+#include "pc/peer_connection_proxy.h"
+#include "pc/peer_connection_wrapper.h"
+#include "pc/sctp_transport.h"
+#include "pc/sdp_utils.h"
+#include "pc/session_description.h"
+#include "pc/test/mock_peer_connection_observers.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/rtc_certificate_generator.h"
+#include "rtc_base/thread.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+#ifdef WEBRTC_ANDROID
+#include "pc/test/android_test_initializer.h"
+#endif
+#include "rtc_base/virtual_socket_server.h"
+#include "test/pc/sctp/fake_sctp_transport.h"
+
+namespace webrtc {
+
+using RTCConfiguration = PeerConnectionInterface::RTCConfiguration;
+using RTCOfferAnswerOptions = PeerConnectionInterface::RTCOfferAnswerOptions;
+using ::testing::HasSubstr;
+using ::testing::Not;
+using ::testing::Values;
+
+namespace {
+
+PeerConnectionFactoryDependencies CreatePeerConnectionFactoryDependencies() {
+ PeerConnectionFactoryDependencies deps;
+ deps.network_thread = rtc::Thread::Current();
+ deps.worker_thread = rtc::Thread::Current();
+ deps.signaling_thread = rtc::Thread::Current();
+ deps.task_queue_factory = CreateDefaultTaskQueueFactory();
+ deps.media_engine = std::make_unique<cricket::FakeMediaEngine>();
+ deps.call_factory = CreateCallFactory();
+ deps.sctp_factory = std::make_unique<FakeSctpTransportFactory>();
+ return deps;
+}
+
+} // namespace
+
+class PeerConnectionWrapperForDataChannelTest : public PeerConnectionWrapper {
+ public:
+ using PeerConnectionWrapper::PeerConnectionWrapper;
+
+ FakeSctpTransportFactory* sctp_transport_factory() {
+ return sctp_transport_factory_;
+ }
+
+ void set_sctp_transport_factory(
+ FakeSctpTransportFactory* sctp_transport_factory) {
+ sctp_transport_factory_ = sctp_transport_factory;
+ }
+
+ absl::optional<std::string> sctp_mid() {
+ return GetInternalPeerConnection()->sctp_mid();
+ }
+
+ absl::optional<std::string> sctp_transport_name() {
+ return GetInternalPeerConnection()->sctp_transport_name();
+ }
+
+ PeerConnection* GetInternalPeerConnection() {
+ auto* pci =
+ static_cast<PeerConnectionProxyWithInternal<PeerConnectionInterface>*>(
+ pc());
+ return static_cast<PeerConnection*>(pci->internal());
+ }
+
+ private:
+ FakeSctpTransportFactory* sctp_transport_factory_ = nullptr;
+};
+
+class PeerConnectionDataChannelBaseTest : public ::testing::Test {
+ protected:
+ typedef std::unique_ptr<PeerConnectionWrapperForDataChannelTest> WrapperPtr;
+
+ explicit PeerConnectionDataChannelBaseTest(SdpSemantics sdp_semantics)
+ : vss_(new rtc::VirtualSocketServer()),
+ main_(vss_.get()),
+ sdp_semantics_(sdp_semantics) {
+#ifdef WEBRTC_ANDROID
+ InitializeAndroidObjects();
+#endif
+ }
+
+ WrapperPtr CreatePeerConnection() {
+ return CreatePeerConnection(RTCConfiguration());
+ }
+
+ WrapperPtr CreatePeerConnection(const RTCConfiguration& config) {
+ return CreatePeerConnection(config,
+ PeerConnectionFactoryInterface::Options());
+ }
+
+ WrapperPtr CreatePeerConnection(
+ const RTCConfiguration& config,
+ const PeerConnectionFactoryInterface::Options factory_options) {
+ auto factory_deps = CreatePeerConnectionFactoryDependencies();
+ FakeSctpTransportFactory* fake_sctp_transport_factory =
+ static_cast<FakeSctpTransportFactory*>(factory_deps.sctp_factory.get());
+ rtc::scoped_refptr<PeerConnectionFactoryInterface> pc_factory =
+ CreateModularPeerConnectionFactory(std::move(factory_deps));
+ pc_factory->SetOptions(factory_options);
+ auto observer = std::make_unique<MockPeerConnectionObserver>();
+ RTCConfiguration modified_config = config;
+ modified_config.sdp_semantics = sdp_semantics_;
+ auto result = pc_factory->CreatePeerConnectionOrError(
+ modified_config, PeerConnectionDependencies(observer.get()));
+ if (!result.ok()) {
+ return nullptr;
+ }
+
+ observer->SetPeerConnectionInterface(result.value().get());
+ auto wrapper = std::make_unique<PeerConnectionWrapperForDataChannelTest>(
+ pc_factory, result.MoveValue(), std::move(observer));
+ wrapper->set_sctp_transport_factory(fake_sctp_transport_factory);
+ return wrapper;
+ }
+
+ // Accepts the same arguments as CreatePeerConnection and adds a default data
+ // channel.
+ template <typename... Args>
+ WrapperPtr CreatePeerConnectionWithDataChannel(Args&&... args) {
+ auto wrapper = CreatePeerConnection(std::forward<Args>(args)...);
+ if (!wrapper) {
+ return nullptr;
+ }
+ EXPECT_TRUE(wrapper->pc()->CreateDataChannelOrError("dc", nullptr).ok());
+ return wrapper;
+ }
+
+ // Changes the SCTP data channel port on the given session description.
+ void ChangeSctpPortOnDescription(cricket::SessionDescription* desc,
+ int port) {
+ auto* data_content = cricket::GetFirstDataContent(desc);
+ RTC_DCHECK(data_content);
+ auto* data_desc = data_content->media_description()->as_sctp();
+ RTC_DCHECK(data_desc);
+ data_desc->set_port(port);
+ }
+
+ std::unique_ptr<rtc::VirtualSocketServer> vss_;
+ rtc::AutoSocketServerThread main_;
+ const SdpSemantics sdp_semantics_;
+};
+
+class PeerConnectionDataChannelTest
+ : public PeerConnectionDataChannelBaseTest,
+ public ::testing::WithParamInterface<SdpSemantics> {
+ protected:
+ PeerConnectionDataChannelTest()
+ : PeerConnectionDataChannelBaseTest(GetParam()) {}
+};
+
+class PeerConnectionDataChannelUnifiedPlanTest
+ : public PeerConnectionDataChannelBaseTest {
+ protected:
+ PeerConnectionDataChannelUnifiedPlanTest()
+ : PeerConnectionDataChannelBaseTest(SdpSemantics::kUnifiedPlan) {}
+};
+
+TEST_P(PeerConnectionDataChannelTest, InternalSctpTransportDeletedOnTeardown) {
+ auto caller = CreatePeerConnectionWithDataChannel();
+
+ ASSERT_TRUE(caller->SetLocalDescription(caller->CreateOffer()));
+ EXPECT_TRUE(caller->sctp_transport_factory()->last_fake_sctp_transport());
+
+ rtc::scoped_refptr<SctpTransportInterface> sctp_transport =
+ caller->GetInternalPeerConnection()->GetSctpTransport();
+
+ caller.reset();
+ EXPECT_EQ(static_cast<SctpTransport*>(sctp_transport.get())->internal(),
+ nullptr);
+}
+
+// Test that sctp_mid/sctp_transport_name (used for stats) are correct
+// before and after BUNDLE is negotiated.
+TEST_P(PeerConnectionDataChannelTest, SctpContentAndTransportNameSetCorrectly) {
+ auto caller = CreatePeerConnection();
+ auto callee = CreatePeerConnection();
+
+ // Initially these fields should be empty.
+ EXPECT_FALSE(caller->sctp_mid());
+ EXPECT_FALSE(caller->sctp_transport_name());
+
+ // Create offer with audio/video/data.
+ // Default bundle policy is "balanced", so data should be using its own
+ // transport.
+ caller->AddAudioTrack("a");
+ caller->AddVideoTrack("v");
+ caller->pc()->CreateDataChannelOrError("dc", nullptr);
+
+ auto offer = caller->CreateOffer();
+ const auto& offer_contents = offer->description()->contents();
+ ASSERT_EQ(cricket::MEDIA_TYPE_AUDIO,
+ offer_contents[0].media_description()->type());
+ std::string audio_mid = offer_contents[0].name;
+ ASSERT_EQ(cricket::MEDIA_TYPE_DATA,
+ offer_contents[2].media_description()->type());
+ std::string data_mid = offer_contents[2].name;
+
+ ASSERT_TRUE(
+ caller->SetLocalDescription(CloneSessionDescription(offer.get())));
+ ASSERT_TRUE(callee->SetRemoteDescription(std::move(offer)));
+
+ ASSERT_TRUE(caller->sctp_mid());
+ EXPECT_EQ(data_mid, *caller->sctp_mid());
+ ASSERT_TRUE(caller->sctp_transport_name());
+ EXPECT_EQ(data_mid, *caller->sctp_transport_name());
+
+ // Create answer that finishes BUNDLE negotiation, which means everything
+ // should be bundled on the first transport (audio).
+ RTCOfferAnswerOptions options;
+ options.use_rtp_mux = true;
+ ASSERT_TRUE(
+ caller->SetRemoteDescription(callee->CreateAnswerAndSetAsLocal()));
+
+ ASSERT_TRUE(caller->sctp_mid());
+ EXPECT_EQ(data_mid, *caller->sctp_mid());
+ ASSERT_TRUE(caller->sctp_transport_name());
+ EXPECT_EQ(audio_mid, *caller->sctp_transport_name());
+}
+
+TEST_P(PeerConnectionDataChannelTest,
+ CreateOfferWithNoDataChannelsGivesNoDataSection) {
+ auto caller = CreatePeerConnection();
+ auto offer = caller->CreateOffer();
+
+ EXPECT_FALSE(offer->description()->GetContentByName(cricket::CN_DATA));
+ EXPECT_FALSE(offer->description()->GetTransportInfoByName(cricket::CN_DATA));
+}
+
+TEST_P(PeerConnectionDataChannelTest,
+ CreateAnswerWithRemoteSctpDataChannelIncludesDataSection) {
+ auto caller = CreatePeerConnectionWithDataChannel();
+ auto callee = CreatePeerConnection();
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+
+ auto answer = callee->CreateAnswer();
+ ASSERT_TRUE(answer);
+ auto* data_content = cricket::GetFirstDataContent(answer->description());
+ ASSERT_TRUE(data_content);
+ EXPECT_FALSE(data_content->rejected);
+ EXPECT_TRUE(
+ answer->description()->GetTransportInfoByName(data_content->name));
+}
+
+TEST_P(PeerConnectionDataChannelTest, SctpPortPropagatedFromSdpToTransport) {
+ constexpr int kNewSendPort = 9998;
+ constexpr int kNewRecvPort = 7775;
+
+ auto caller = CreatePeerConnectionWithDataChannel();
+ auto callee = CreatePeerConnectionWithDataChannel();
+
+ auto offer = caller->CreateOffer();
+ ChangeSctpPortOnDescription(offer->description(), kNewSendPort);
+ ASSERT_TRUE(callee->SetRemoteDescription(std::move(offer)));
+
+ auto answer = callee->CreateAnswer();
+ ChangeSctpPortOnDescription(answer->description(), kNewRecvPort);
+ std::string sdp;
+ answer->ToString(&sdp);
+ ASSERT_TRUE(callee->SetLocalDescription(std::move(answer)));
+ auto* callee_transport =
+ callee->sctp_transport_factory()->last_fake_sctp_transport();
+ ASSERT_TRUE(callee_transport);
+ EXPECT_EQ(kNewSendPort, callee_transport->remote_port());
+ EXPECT_EQ(kNewRecvPort, callee_transport->local_port());
+}
+
+TEST_P(PeerConnectionDataChannelTest, ModernSdpSyntaxByDefault) {
+ PeerConnectionInterface::RTCOfferAnswerOptions options;
+ auto caller = CreatePeerConnectionWithDataChannel();
+ auto offer = caller->CreateOffer(options);
+ EXPECT_FALSE(cricket::GetFirstSctpDataContentDescription(offer->description())
+ ->use_sctpmap());
+ std::string sdp;
+ offer->ToString(&sdp);
+ RTC_LOG(LS_ERROR) << sdp;
+ EXPECT_THAT(sdp, HasSubstr(" UDP/DTLS/SCTP webrtc-datachannel"));
+ EXPECT_THAT(sdp, Not(HasSubstr("a=sctpmap:")));
+}
+
+TEST_P(PeerConnectionDataChannelTest, ObsoleteSdpSyntaxIfSet) {
+ PeerConnectionInterface::RTCOfferAnswerOptions options;
+ options.use_obsolete_sctp_sdp = true;
+ auto caller = CreatePeerConnectionWithDataChannel();
+ auto offer = caller->CreateOffer(options);
+ EXPECT_TRUE(cricket::GetFirstSctpDataContentDescription(offer->description())
+ ->use_sctpmap());
+ std::string sdp;
+ offer->ToString(&sdp);
+ EXPECT_THAT(sdp, Not(HasSubstr(" UDP/DTLS/SCTP webrtc-datachannel")));
+ EXPECT_THAT(sdp, HasSubstr("a=sctpmap:"));
+}
+
+INSTANTIATE_TEST_SUITE_P(PeerConnectionDataChannelTest,
+ PeerConnectionDataChannelTest,
+ Values(SdpSemantics::kPlanB_DEPRECATED,
+ SdpSemantics::kUnifiedPlan));
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/peer_connection_encodings_integrationtest.cc b/third_party/libwebrtc/pc/peer_connection_encodings_integrationtest.cc
new file mode 100644
index 0000000000..c7181c53ae
--- /dev/null
+++ b/third_party/libwebrtc/pc/peer_connection_encodings_integrationtest.cc
@@ -0,0 +1,2008 @@
+/*
+ * Copyright 2023 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <string>
+#include <vector>
+
+#include "absl/strings/match.h"
+#include "absl/types/optional.h"
+#include "api/audio_codecs/builtin_audio_decoder_factory.h"
+#include "api/audio_codecs/builtin_audio_encoder_factory.h"
+#include "api/audio_codecs/opus_audio_decoder_factory.h"
+#include "api/audio_codecs/opus_audio_encoder_factory.h"
+#include "api/media_types.h"
+#include "api/rtc_error.h"
+#include "api/rtp_parameters.h"
+#include "api/rtp_transceiver_direction.h"
+#include "api/rtp_transceiver_interface.h"
+#include "api/stats/rtcstats_objects.h"
+#include "api/units/data_rate.h"
+#include "api/video_codecs/video_decoder_factory_template.h"
+#include "api/video_codecs/video_decoder_factory_template_dav1d_adapter.h"
+#include "api/video_codecs/video_decoder_factory_template_libvpx_vp8_adapter.h"
+#include "api/video_codecs/video_decoder_factory_template_libvpx_vp9_adapter.h"
+#include "api/video_codecs/video_decoder_factory_template_open_h264_adapter.h"
+#include "api/video_codecs/video_encoder_factory_template.h"
+#include "api/video_codecs/video_encoder_factory_template_libaom_av1_adapter.h"
+#include "api/video_codecs/video_encoder_factory_template_libvpx_vp8_adapter.h"
+#include "api/video_codecs/video_encoder_factory_template_libvpx_vp9_adapter.h"
+#include "api/video_codecs/video_encoder_factory_template_open_h264_adapter.h"
+#include "pc/sdp_utils.h"
+#include "pc/simulcast_description.h"
+#include "pc/test/mock_peer_connection_observers.h"
+#include "pc/test/peer_connection_test_wrapper.h"
+#include "pc/test/simulcast_layer_util.h"
+#include "rtc_base/gunit.h"
+#include "rtc_base/physical_socket_server.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+
+using ::testing::Eq;
+using ::testing::Optional;
+using ::testing::SizeIs;
+using ::testing::StrCaseEq;
+using ::testing::StrEq;
+
+namespace webrtc {
+
+namespace {
+
+constexpr TimeDelta kDefaultTimeout = TimeDelta::Seconds(5);
+// Most tests pass in 20-30 seconds, but some tests take longer such as AV1
+// requiring additional ramp-up time (https://crbug.com/webrtc/15006) or SVC
+// (LxTx_KEY) being slower than simulcast to send top spatial layer.
+// TODO(https://crbug.com/webrtc/15076): Remove need for long rampup timeouts by
+// using simulated time.
+constexpr TimeDelta kLongTimeoutForRampingUp = TimeDelta::Minutes(1);
+
+// The max bitrate 1500 kbps may be subject to change in the future. What we're
+// interested in here is that all code paths that result in L1T3 result in the
+// same target bitrate which does not exceed this limit.
+constexpr DataRate kVp9ExpectedMaxBitrateForL1T3 =
+ DataRate::KilobitsPerSec(1500);
+
+struct StringParamToString {
+ std::string operator()(const ::testing::TestParamInfo<std::string>& info) {
+ return info.param;
+ }
+};
+
+// RTX, RED and FEC are reliability mechanisms used in combinations with other
+// codecs, but are not themselves a specific codec. Typically you don't want to
+// filter these out of the list of codec preferences.
+bool IsReliabilityMechanism(const webrtc::RtpCodecCapability& codec) {
+ return absl::EqualsIgnoreCase(codec.name, cricket::kRtxCodecName) ||
+ absl::EqualsIgnoreCase(codec.name, cricket::kRedCodecName) ||
+ absl::EqualsIgnoreCase(codec.name, cricket::kUlpfecCodecName);
+}
+
+std::string GetCurrentCodecMimeType(
+ rtc::scoped_refptr<const webrtc::RTCStatsReport> report,
+ const webrtc::RTCOutboundRtpStreamStats& outbound_rtp) {
+ return outbound_rtp.codec_id.is_defined()
+ ? *report->GetAs<webrtc::RTCCodecStats>(*outbound_rtp.codec_id)
+ ->mime_type
+ : "";
+}
+
+struct RidAndResolution {
+ std::string rid;
+ uint32_t width;
+ uint32_t height;
+};
+
+const webrtc::RTCOutboundRtpStreamStats* FindOutboundRtpByRid(
+ const std::vector<const webrtc::RTCOutboundRtpStreamStats*>& outbound_rtps,
+ const absl::string_view& rid) {
+ for (const auto* outbound_rtp : outbound_rtps) {
+ if (outbound_rtp->rid.is_defined() && *outbound_rtp->rid == rid) {
+ return outbound_rtp;
+ }
+ }
+ return nullptr;
+}
+
+} // namespace
+
+class PeerConnectionEncodingsIntegrationTest : public ::testing::Test {
+ public:
+ PeerConnectionEncodingsIntegrationTest()
+ : background_thread_(std::make_unique<rtc::Thread>(&pss_)) {
+ RTC_CHECK(background_thread_->Start());
+ }
+
+ rtc::scoped_refptr<PeerConnectionTestWrapper> CreatePc() {
+ auto pc_wrapper = rtc::make_ref_counted<PeerConnectionTestWrapper>(
+ "pc", &pss_, background_thread_.get(), background_thread_.get());
+ pc_wrapper->CreatePc({}, webrtc::CreateBuiltinAudioEncoderFactory(),
+ webrtc::CreateBuiltinAudioDecoderFactory());
+ return pc_wrapper;
+ }
+
+ rtc::scoped_refptr<RtpTransceiverInterface> AddTransceiverWithSimulcastLayers(
+ rtc::scoped_refptr<PeerConnectionTestWrapper> local,
+ rtc::scoped_refptr<PeerConnectionTestWrapper> remote,
+ std::vector<cricket::SimulcastLayer> init_layers) {
+ rtc::scoped_refptr<webrtc::MediaStreamInterface> stream =
+ local->GetUserMedia(
+ /*audio=*/false, cricket::AudioOptions(), /*video=*/true,
+ {.width = 1280, .height = 720});
+ rtc::scoped_refptr<VideoTrackInterface> track = stream->GetVideoTracks()[0];
+
+ RTCErrorOr<rtc::scoped_refptr<RtpTransceiverInterface>>
+ transceiver_or_error = local->pc()->AddTransceiver(
+ track, CreateTransceiverInit(init_layers));
+ EXPECT_TRUE(transceiver_or_error.ok());
+ return transceiver_or_error.value();
+ }
+
+ bool HasSenderVideoCodecCapability(
+ rtc::scoped_refptr<PeerConnectionTestWrapper> pc_wrapper,
+ absl::string_view codec_name) {
+ std::vector<RtpCodecCapability> codecs =
+ pc_wrapper->pc_factory()
+ ->GetRtpSenderCapabilities(cricket::MEDIA_TYPE_VIDEO)
+ .codecs;
+ return std::find_if(codecs.begin(), codecs.end(),
+ [&codec_name](const RtpCodecCapability& codec) {
+ return absl::EqualsIgnoreCase(codec.name, codec_name);
+ }) != codecs.end();
+ }
+
+ std::vector<RtpCodecCapability> GetCapabilitiesAndRestrictToCodec(
+ rtc::scoped_refptr<PeerConnectionTestWrapper> pc_wrapper,
+ absl::string_view codec_name) {
+ std::vector<RtpCodecCapability> codecs =
+ pc_wrapper->pc_factory()
+ ->GetRtpSenderCapabilities(cricket::MEDIA_TYPE_VIDEO)
+ .codecs;
+ codecs.erase(std::remove_if(codecs.begin(), codecs.end(),
+ [&codec_name](const RtpCodecCapability& codec) {
+ return !IsReliabilityMechanism(codec) &&
+ !absl::EqualsIgnoreCase(codec.name,
+ codec_name);
+ }),
+ codecs.end());
+ RTC_DCHECK(std::find_if(codecs.begin(), codecs.end(),
+ [&codec_name](const RtpCodecCapability& codec) {
+ return absl::EqualsIgnoreCase(codec.name,
+ codec_name);
+ }) != codecs.end());
+ return codecs;
+ }
+
+ void ExchangeIceCandidates(
+ rtc::scoped_refptr<PeerConnectionTestWrapper> local_pc_wrapper,
+ rtc::scoped_refptr<PeerConnectionTestWrapper> remote_pc_wrapper) {
+ local_pc_wrapper->SignalOnIceCandidateReady.connect(
+ remote_pc_wrapper.get(), &PeerConnectionTestWrapper::AddIceCandidate);
+ remote_pc_wrapper->SignalOnIceCandidateReady.connect(
+ local_pc_wrapper.get(), &PeerConnectionTestWrapper::AddIceCandidate);
+ }
+
+ void NegotiateWithSimulcastTweaks(
+ rtc::scoped_refptr<PeerConnectionTestWrapper> local_pc_wrapper,
+ rtc::scoped_refptr<PeerConnectionTestWrapper> remote_pc_wrapper) {
+ // Create and set offer for `local_pc_wrapper`.
+ std::unique_ptr<SessionDescriptionInterface> offer =
+ CreateOffer(local_pc_wrapper);
+ rtc::scoped_refptr<MockSetSessionDescriptionObserver> p1 =
+ SetLocalDescription(local_pc_wrapper, offer.get());
+ // Modify the offer before handoff because `remote_pc_wrapper` only supports
+ // receiving singlecast.
+ cricket::SimulcastDescription simulcast_description =
+ RemoveSimulcast(offer.get());
+ rtc::scoped_refptr<MockSetSessionDescriptionObserver> p2 =
+ SetRemoteDescription(remote_pc_wrapper, offer.get());
+ EXPECT_TRUE(Await({p1, p2}));
+
+ // Create and set answer for `remote_pc_wrapper`.
+ std::unique_ptr<SessionDescriptionInterface> answer =
+ CreateAnswer(remote_pc_wrapper);
+ p1 = SetLocalDescription(remote_pc_wrapper, answer.get());
+ // Modify the answer before handoff because `local_pc_wrapper` should still
+ // send simulcast.
+ cricket::MediaContentDescription* mcd_answer =
+ answer->description()->contents()[0].media_description();
+ mcd_answer->mutable_streams().clear();
+ std::vector<cricket::SimulcastLayer> simulcast_layers =
+ simulcast_description.send_layers().GetAllLayers();
+ cricket::SimulcastLayerList& receive_layers =
+ mcd_answer->simulcast_description().receive_layers();
+ for (const auto& layer : simulcast_layers) {
+ receive_layers.AddLayer(layer);
+ }
+ p2 = SetRemoteDescription(local_pc_wrapper, answer.get());
+ EXPECT_TRUE(Await({p1, p2}));
+ }
+
+ rtc::scoped_refptr<const RTCStatsReport> GetStats(
+ rtc::scoped_refptr<PeerConnectionTestWrapper> pc_wrapper) {
+ auto callback = rtc::make_ref_counted<MockRTCStatsCollectorCallback>();
+ pc_wrapper->pc()->GetStats(callback.get());
+ EXPECT_TRUE_WAIT(callback->called(), kDefaultTimeout.ms());
+ return callback->report();
+ }
+
+ bool IsCodecIdDifferent(
+ rtc::scoped_refptr<PeerConnectionTestWrapper> pc_wrapper,
+ size_t index,
+ const std::string& codec_id) {
+ return IsCodecIdDifferentWithScalabilityMode(pc_wrapper, index, codec_id,
+ absl::nullopt);
+ }
+
+ bool IsCodecIdDifferentWithScalabilityMode(
+ rtc::scoped_refptr<PeerConnectionTestWrapper> pc_wrapper,
+ size_t index,
+ const std::string& codec_id,
+ absl::optional<std::string> wanted_scalability_mode) {
+ rtc::scoped_refptr<const RTCStatsReport> report = GetStats(pc_wrapper);
+ std::vector<const RTCOutboundRtpStreamStats*> outbound_rtps =
+ report->GetStatsOfType<RTCOutboundRtpStreamStats>();
+ return outbound_rtps[index]->codec_id.value() != codec_id &&
+ (!wanted_scalability_mode ||
+ (outbound_rtps[index]->scalability_mode.has_value() &&
+ outbound_rtps[index]->scalability_mode.value() ==
+ wanted_scalability_mode));
+ }
+
+ bool HasOutboundRtpBytesSent(
+ rtc::scoped_refptr<PeerConnectionTestWrapper> pc_wrapper,
+ size_t num_layers) {
+ return HasOutboundRtpBytesSent(pc_wrapper, num_layers, num_layers);
+ }
+
+ bool HasOutboundRtpBytesSent(
+ rtc::scoped_refptr<PeerConnectionTestWrapper> pc_wrapper,
+ size_t num_layers,
+ size_t num_active_layers) {
+ rtc::scoped_refptr<const RTCStatsReport> report = GetStats(pc_wrapper);
+ std::vector<const RTCOutboundRtpStreamStats*> outbound_rtps =
+ report->GetStatsOfType<RTCOutboundRtpStreamStats>();
+ if (outbound_rtps.size() != num_layers) {
+ return false;
+ }
+ size_t num_sending_layers = 0;
+ for (const auto* outbound_rtp : outbound_rtps) {
+ if (outbound_rtp->bytes_sent.is_defined() &&
+ *outbound_rtp->bytes_sent > 0u) {
+ ++num_sending_layers;
+ }
+ }
+ return num_sending_layers == num_active_layers;
+ }
+
+ bool HasOutboundRtpWithRidAndScalabilityMode(
+ rtc::scoped_refptr<PeerConnectionTestWrapper> pc_wrapper,
+ absl::string_view rid,
+ absl::string_view expected_scalability_mode,
+ uint32_t frame_height) {
+ rtc::scoped_refptr<const RTCStatsReport> report = GetStats(pc_wrapper);
+ std::vector<const RTCOutboundRtpStreamStats*> outbound_rtps =
+ report->GetStatsOfType<RTCOutboundRtpStreamStats>();
+ auto* outbound_rtp = FindOutboundRtpByRid(outbound_rtps, rid);
+ if (!outbound_rtp || !outbound_rtp->scalability_mode.is_defined() ||
+ *outbound_rtp->scalability_mode != expected_scalability_mode) {
+ return false;
+ }
+ if (outbound_rtp->frame_height.is_defined()) {
+ RTC_LOG(LS_INFO) << "Waiting for target resolution (" << frame_height
+ << "p). Currently at " << *outbound_rtp->frame_height
+ << "p...";
+ } else {
+ RTC_LOG(LS_INFO)
+ << "Waiting for target resolution. No frames encoded yet...";
+ }
+ if (!outbound_rtp->frame_height.is_defined() ||
+ *outbound_rtp->frame_height != frame_height) {
+ // Sleep to avoid log spam when this is used in ASSERT_TRUE_WAIT().
+ rtc::Thread::Current()->SleepMs(1000);
+ return false;
+ }
+ return true;
+ }
+
+ bool OutboundRtpResolutionsAreLessThanOrEqualToExpectations(
+ rtc::scoped_refptr<PeerConnectionTestWrapper> pc_wrapper,
+ std::vector<RidAndResolution> resolutions) {
+ rtc::scoped_refptr<const RTCStatsReport> report = GetStats(pc_wrapper);
+ std::vector<const RTCOutboundRtpStreamStats*> outbound_rtps =
+ report->GetStatsOfType<RTCOutboundRtpStreamStats>();
+ for (const RidAndResolution& resolution : resolutions) {
+ const RTCOutboundRtpStreamStats* outbound_rtp = nullptr;
+ if (!resolution.rid.empty()) {
+ outbound_rtp = FindOutboundRtpByRid(outbound_rtps, resolution.rid);
+ } else if (outbound_rtps.size() == 1u) {
+ outbound_rtp = outbound_rtps[0];
+ }
+ if (!outbound_rtp || !outbound_rtp->frame_width.is_defined() ||
+ !outbound_rtp->frame_height.is_defined()) {
+ // RTP not found by rid or has not encoded a frame yet.
+ RTC_LOG(LS_ERROR) << "rid=" << resolution.rid << " does not have "
+ << "resolution metrics";
+ return false;
+ }
+ if (*outbound_rtp->frame_width > resolution.width ||
+ *outbound_rtp->frame_height > resolution.height) {
+ RTC_LOG(LS_ERROR) << "rid=" << resolution.rid << " is "
+ << *outbound_rtp->frame_width << "x"
+ << *outbound_rtp->frame_height
+ << ", this is greater than the "
+ << "expected " << resolution.width << "x"
+ << resolution.height;
+ return false;
+ }
+ }
+ return true;
+ }
+
+ protected:
+ std::unique_ptr<SessionDescriptionInterface> CreateOffer(
+ rtc::scoped_refptr<PeerConnectionTestWrapper> pc_wrapper) {
+ auto observer =
+ rtc::make_ref_counted<MockCreateSessionDescriptionObserver>();
+ pc_wrapper->pc()->CreateOffer(observer.get(), {});
+ EXPECT_EQ_WAIT(true, observer->called(), kDefaultTimeout.ms());
+ return observer->MoveDescription();
+ }
+
+ std::unique_ptr<SessionDescriptionInterface> CreateAnswer(
+ rtc::scoped_refptr<PeerConnectionTestWrapper> pc_wrapper) {
+ auto observer =
+ rtc::make_ref_counted<MockCreateSessionDescriptionObserver>();
+ pc_wrapper->pc()->CreateAnswer(observer.get(), {});
+ EXPECT_EQ_WAIT(true, observer->called(), kDefaultTimeout.ms());
+ return observer->MoveDescription();
+ }
+
+ rtc::scoped_refptr<MockSetSessionDescriptionObserver> SetLocalDescription(
+ rtc::scoped_refptr<PeerConnectionTestWrapper> pc_wrapper,
+ SessionDescriptionInterface* sdp) {
+ auto observer = rtc::make_ref_counted<MockSetSessionDescriptionObserver>();
+ pc_wrapper->pc()->SetLocalDescription(
+ observer.get(), CloneSessionDescription(sdp).release());
+ return observer;
+ }
+
+ rtc::scoped_refptr<MockSetSessionDescriptionObserver> SetRemoteDescription(
+ rtc::scoped_refptr<PeerConnectionTestWrapper> pc_wrapper,
+ SessionDescriptionInterface* sdp) {
+ auto observer = rtc::make_ref_counted<MockSetSessionDescriptionObserver>();
+ pc_wrapper->pc()->SetRemoteDescription(
+ observer.get(), CloneSessionDescription(sdp).release());
+ return observer;
+ }
+
+ // To avoid ICE candidates arriving before the remote endpoint has received
+ // the offer it is important to SetLocalDescription() and
+ // SetRemoteDescription() are kicked off without awaiting in-between. This
+ // helper is used to await multiple observers.
+ bool Await(std::vector<rtc::scoped_refptr<MockSetSessionDescriptionObserver>>
+ observers) {
+ for (auto& observer : observers) {
+ EXPECT_EQ_WAIT(true, observer->called(), kDefaultTimeout.ms());
+ if (!observer->result()) {
+ return false;
+ }
+ }
+ return true;
+ }
+
+ rtc::PhysicalSocketServer pss_;
+ std::unique_ptr<rtc::Thread> background_thread_;
+};
+
+TEST_F(PeerConnectionEncodingsIntegrationTest,
+ VP8_SingleEncodingDefaultsToL1T1) {
+ rtc::scoped_refptr<PeerConnectionTestWrapper> local_pc_wrapper = CreatePc();
+ rtc::scoped_refptr<PeerConnectionTestWrapper> remote_pc_wrapper = CreatePc();
+ ExchangeIceCandidates(local_pc_wrapper, remote_pc_wrapper);
+
+ std::vector<cricket::SimulcastLayer> layers =
+ CreateLayers({"f"}, /*active=*/true);
+ rtc::scoped_refptr<RtpTransceiverInterface> transceiver =
+ AddTransceiverWithSimulcastLayers(local_pc_wrapper, remote_pc_wrapper,
+ layers);
+ std::vector<RtpCodecCapability> codecs =
+ GetCapabilitiesAndRestrictToCodec(local_pc_wrapper, "VP8");
+ transceiver->SetCodecPreferences(codecs);
+
+ NegotiateWithSimulcastTweaks(local_pc_wrapper, remote_pc_wrapper);
+ local_pc_wrapper->WaitForConnection();
+ remote_pc_wrapper->WaitForConnection();
+
+ // Wait until media is flowing.
+ ASSERT_TRUE_WAIT(HasOutboundRtpBytesSent(local_pc_wrapper, 1u),
+ kDefaultTimeout.ms());
+ EXPECT_TRUE(OutboundRtpResolutionsAreLessThanOrEqualToExpectations(
+ local_pc_wrapper, {{"", 1280, 720}}));
+ // Verify codec and scalability mode.
+ rtc::scoped_refptr<const RTCStatsReport> report = GetStats(local_pc_wrapper);
+ std::vector<const RTCOutboundRtpStreamStats*> outbound_rtps =
+ report->GetStatsOfType<RTCOutboundRtpStreamStats>();
+ ASSERT_THAT(outbound_rtps, SizeIs(1u));
+ EXPECT_THAT(GetCurrentCodecMimeType(report, *outbound_rtps[0]),
+ StrCaseEq("video/VP8"));
+ EXPECT_THAT(*outbound_rtps[0]->scalability_mode, StrEq("L1T1"));
+}
+
+TEST_F(PeerConnectionEncodingsIntegrationTest,
+ VP8_RejectsSvcAndDefaultsToL1T1) {
+ rtc::scoped_refptr<PeerConnectionTestWrapper> local_pc_wrapper = CreatePc();
+ rtc::scoped_refptr<PeerConnectionTestWrapper> remote_pc_wrapper = CreatePc();
+ ExchangeIceCandidates(local_pc_wrapper, remote_pc_wrapper);
+
+ std::vector<cricket::SimulcastLayer> layers =
+ CreateLayers({"f"}, /*active=*/true);
+ rtc::scoped_refptr<RtpTransceiverInterface> transceiver =
+ AddTransceiverWithSimulcastLayers(local_pc_wrapper, remote_pc_wrapper,
+ layers);
+ // Restricting codecs restricts what SetParameters() will accept or reject.
+ std::vector<RtpCodecCapability> codecs =
+ GetCapabilitiesAndRestrictToCodec(local_pc_wrapper, "VP8");
+ transceiver->SetCodecPreferences(codecs);
+ // Attempt SVC (L3T3_KEY). This is not possible because only VP8 is up for
+ // negotiation and VP8 does not support it.
+ rtc::scoped_refptr<RtpSenderInterface> sender = transceiver->sender();
+ RtpParameters parameters = sender->GetParameters();
+ ASSERT_EQ(parameters.encodings.size(), 1u);
+ parameters.encodings[0].scalability_mode = "L3T3_KEY";
+ parameters.encodings[0].scale_resolution_down_by = 1;
+ EXPECT_FALSE(sender->SetParameters(parameters).ok());
+ // `scalability_mode` remains unset because SetParameters() failed.
+ parameters = sender->GetParameters();
+ ASSERT_EQ(parameters.encodings.size(), 1u);
+ EXPECT_THAT(parameters.encodings[0].scalability_mode, Eq(absl::nullopt));
+
+ NegotiateWithSimulcastTweaks(local_pc_wrapper, remote_pc_wrapper);
+ local_pc_wrapper->WaitForConnection();
+ remote_pc_wrapper->WaitForConnection();
+
+ // Wait until media is flowing.
+ ASSERT_TRUE_WAIT(HasOutboundRtpBytesSent(local_pc_wrapper, 1u),
+ kDefaultTimeout.ms());
+ // When `scalability_mode` is not set, VP8 defaults to L1T1.
+ rtc::scoped_refptr<const RTCStatsReport> report = GetStats(local_pc_wrapper);
+ std::vector<const RTCOutboundRtpStreamStats*> outbound_rtps =
+ report->GetStatsOfType<RTCOutboundRtpStreamStats>();
+ ASSERT_THAT(outbound_rtps, SizeIs(1u));
+ EXPECT_THAT(GetCurrentCodecMimeType(report, *outbound_rtps[0]),
+ StrCaseEq("video/VP8"));
+ EXPECT_THAT(*outbound_rtps[0]->scalability_mode, StrEq("L1T1"));
+ // GetParameters() confirms `scalability_mode` is still not set.
+ parameters = sender->GetParameters();
+ ASSERT_EQ(parameters.encodings.size(), 1u);
+ EXPECT_THAT(parameters.encodings[0].scalability_mode, Eq(absl::nullopt));
+}
+
+TEST_F(PeerConnectionEncodingsIntegrationTest,
+ VP8_FallbackFromSvcResultsInL1T2) {
+ rtc::scoped_refptr<PeerConnectionTestWrapper> local_pc_wrapper = CreatePc();
+ rtc::scoped_refptr<PeerConnectionTestWrapper> remote_pc_wrapper = CreatePc();
+ ExchangeIceCandidates(local_pc_wrapper, remote_pc_wrapper);
+
+ std::vector<cricket::SimulcastLayer> layers =
+ CreateLayers({"f"}, /*active=*/true);
+ rtc::scoped_refptr<RtpTransceiverInterface> transceiver =
+ AddTransceiverWithSimulcastLayers(local_pc_wrapper, remote_pc_wrapper,
+ layers);
+ // Verify test assumption that VP8 is first in the list, but don't modify the
+ // codec preferences because we want the sender to think SVC is a possibility.
+ std::vector<RtpCodecCapability> codecs =
+ local_pc_wrapper->pc_factory()
+ ->GetRtpSenderCapabilities(cricket::MEDIA_TYPE_VIDEO)
+ .codecs;
+ EXPECT_THAT(codecs[0].name, StrCaseEq("VP8"));
+ // Attempt SVC (L3T3_KEY), which is not possible with VP8, but the sender does
+ // not yet know which codec we'll use so the parameters will be accepted.
+ rtc::scoped_refptr<RtpSenderInterface> sender = transceiver->sender();
+ RtpParameters parameters = sender->GetParameters();
+ ASSERT_EQ(parameters.encodings.size(), 1u);
+ parameters.encodings[0].scalability_mode = "L3T3_KEY";
+ parameters.encodings[0].scale_resolution_down_by = 1;
+ EXPECT_TRUE(sender->SetParameters(parameters).ok());
+ // Verify fallback has not happened yet.
+ parameters = sender->GetParameters();
+ ASSERT_EQ(parameters.encodings.size(), 1u);
+ EXPECT_THAT(parameters.encodings[0].scalability_mode,
+ Optional(std::string("L3T3_KEY")));
+
+ // Negotiate, this results in VP8 being picked and fallback happening.
+ NegotiateWithSimulcastTweaks(local_pc_wrapper, remote_pc_wrapper);
+ local_pc_wrapper->WaitForConnection();
+ remote_pc_wrapper->WaitForConnection();
+ // `scalaiblity_mode` is assigned the fallback value "L1T2" which is different
+ // than the default of absl::nullopt.
+ parameters = sender->GetParameters();
+ ASSERT_EQ(parameters.encodings.size(), 1u);
+ EXPECT_THAT(parameters.encodings[0].scalability_mode,
+ Optional(std::string("L1T2")));
+
+ // Wait until media is flowing, no significant time needed because we only
+ // have one layer.
+ ASSERT_TRUE_WAIT(HasOutboundRtpBytesSent(local_pc_wrapper, 1u),
+ kDefaultTimeout.ms());
+ // GetStats() confirms "L1T2" is used which is different than the "L1T1"
+ // default or the "L3T3_KEY" that was attempted.
+ rtc::scoped_refptr<const RTCStatsReport> report = GetStats(local_pc_wrapper);
+ std::vector<const RTCOutboundRtpStreamStats*> outbound_rtps =
+ report->GetStatsOfType<RTCOutboundRtpStreamStats>();
+ ASSERT_THAT(outbound_rtps, SizeIs(1u));
+ EXPECT_THAT(GetCurrentCodecMimeType(report, *outbound_rtps[0]),
+ StrCaseEq("video/VP8"));
+ EXPECT_THAT(*outbound_rtps[0]->scalability_mode, StrEq("L1T2"));
+}
+
+// The legacy SVC path is triggered when VP9 us used, but `scalability_mode` has
+// not been specified.
+// TODO(https://crbug.com/webrtc/14889): When legacy VP9 SVC path has been
+// deprecated and removed, update this test to assert that simulcast is used
+// (i.e. VP9 is not treated differently than VP8).
+TEST_F(PeerConnectionEncodingsIntegrationTest,
+ VP9_LegacySvcWhenScalabilityModeNotSpecified) {
+ rtc::scoped_refptr<PeerConnectionTestWrapper> local_pc_wrapper = CreatePc();
+ rtc::scoped_refptr<PeerConnectionTestWrapper> remote_pc_wrapper = CreatePc();
+ ExchangeIceCandidates(local_pc_wrapper, remote_pc_wrapper);
+
+ std::vector<cricket::SimulcastLayer> layers =
+ CreateLayers({"f", "h", "q"}, /*active=*/true);
+ rtc::scoped_refptr<RtpTransceiverInterface> transceiver =
+ AddTransceiverWithSimulcastLayers(local_pc_wrapper, remote_pc_wrapper,
+ layers);
+ std::vector<RtpCodecCapability> codecs =
+ GetCapabilitiesAndRestrictToCodec(local_pc_wrapper, "VP9");
+ transceiver->SetCodecPreferences(codecs);
+
+ NegotiateWithSimulcastTweaks(local_pc_wrapper, remote_pc_wrapper);
+ local_pc_wrapper->WaitForConnection();
+ remote_pc_wrapper->WaitForConnection();
+
+ // Wait until media is flowing. We only expect a single RTP stream.
+ // We expect to see bytes flowing almost immediately on the lowest layer.
+ ASSERT_TRUE_WAIT(HasOutboundRtpBytesSent(local_pc_wrapper, 1u),
+ kDefaultTimeout.ms());
+ // Wait until scalability mode is reported and expected resolution reached.
+ // Ramp up time may be significant.
+ ASSERT_TRUE_WAIT(HasOutboundRtpWithRidAndScalabilityMode(
+ local_pc_wrapper, "f", "L3T3_KEY", 720),
+ kLongTimeoutForRampingUp.ms());
+
+ // Despite SVC being used on a single RTP stream, GetParameters() returns the
+ // three encodings that we configured earlier (this is not spec-compliant but
+ // it is how legacy SVC behaves).
+ rtc::scoped_refptr<RtpSenderInterface> sender = transceiver->sender();
+ std::vector<RtpEncodingParameters> encodings =
+ sender->GetParameters().encodings;
+ ASSERT_EQ(encodings.size(), 3u);
+ // When legacy SVC is used, `scalability_mode` is not specified.
+ EXPECT_FALSE(encodings[0].scalability_mode.has_value());
+ EXPECT_FALSE(encodings[1].scalability_mode.has_value());
+ EXPECT_FALSE(encodings[2].scalability_mode.has_value());
+}
+
+// The spec-compliant way to configure SVC for a single stream. The expected
+// outcome is the same as for the legacy SVC case except that we only have one
+// encoding in GetParameters().
+TEST_F(PeerConnectionEncodingsIntegrationTest,
+ VP9_StandardSvcWithOnlyOneEncoding) {
+ rtc::scoped_refptr<PeerConnectionTestWrapper> local_pc_wrapper = CreatePc();
+ rtc::scoped_refptr<PeerConnectionTestWrapper> remote_pc_wrapper = CreatePc();
+ ExchangeIceCandidates(local_pc_wrapper, remote_pc_wrapper);
+
+ std::vector<cricket::SimulcastLayer> layers =
+ CreateLayers({"f"}, /*active=*/true);
+ rtc::scoped_refptr<RtpTransceiverInterface> transceiver =
+ AddTransceiverWithSimulcastLayers(local_pc_wrapper, remote_pc_wrapper,
+ layers);
+ std::vector<RtpCodecCapability> codecs =
+ GetCapabilitiesAndRestrictToCodec(local_pc_wrapper, "VP9");
+ transceiver->SetCodecPreferences(codecs);
+ // Configure SVC, a.k.a. "L3T3_KEY".
+ rtc::scoped_refptr<RtpSenderInterface> sender = transceiver->sender();
+ RtpParameters parameters = sender->GetParameters();
+ ASSERT_EQ(parameters.encodings.size(), 1u);
+ parameters.encodings[0].scalability_mode = "L3T3_KEY";
+ parameters.encodings[0].scale_resolution_down_by = 1;
+ EXPECT_TRUE(sender->SetParameters(parameters).ok());
+
+ NegotiateWithSimulcastTweaks(local_pc_wrapper, remote_pc_wrapper);
+ local_pc_wrapper->WaitForConnection();
+ remote_pc_wrapper->WaitForConnection();
+
+ // Wait until media is flowing. We only expect a single RTP stream.
+ // We expect to see bytes flowing almost immediately on the lowest layer.
+ ASSERT_TRUE_WAIT(HasOutboundRtpBytesSent(local_pc_wrapper, 1u),
+ kDefaultTimeout.ms());
+ EXPECT_TRUE(OutboundRtpResolutionsAreLessThanOrEqualToExpectations(
+ local_pc_wrapper, {{"", 1280, 720}}));
+ // Verify codec and scalability mode.
+ rtc::scoped_refptr<const RTCStatsReport> report = GetStats(local_pc_wrapper);
+ std::vector<const RTCOutboundRtpStreamStats*> outbound_rtps =
+ report->GetStatsOfType<RTCOutboundRtpStreamStats>();
+ ASSERT_THAT(outbound_rtps, SizeIs(1u));
+ EXPECT_THAT(GetCurrentCodecMimeType(report, *outbound_rtps[0]),
+ StrCaseEq("video/VP9"));
+ EXPECT_THAT(*outbound_rtps[0]->scalability_mode, StrEq("L3T3_KEY"));
+
+ // GetParameters() is consistent with what we asked for and got.
+ parameters = sender->GetParameters();
+ ASSERT_EQ(parameters.encodings.size(), 1u);
+ EXPECT_THAT(parameters.encodings[0].scalability_mode,
+ Optional(std::string("L3T3_KEY")));
+}
+
+// The {active,inactive,inactive} case is technically simulcast but since we
+// only have one active stream, we're able to do SVC (multiple spatial layers
+// is not supported if multiple encodings are active). The expected outcome is
+// the same as above except we end up with two inactive RTP streams which are
+// observable in GetStats().
+TEST_F(PeerConnectionEncodingsIntegrationTest,
+ VP9_StandardSvcWithSingleActiveEncoding) {
+ rtc::scoped_refptr<PeerConnectionTestWrapper> local_pc_wrapper = CreatePc();
+ rtc::scoped_refptr<PeerConnectionTestWrapper> remote_pc_wrapper = CreatePc();
+ ExchangeIceCandidates(local_pc_wrapper, remote_pc_wrapper);
+
+ std::vector<cricket::SimulcastLayer> layers =
+ CreateLayers({"f", "h", "q"}, /*active=*/true);
+ rtc::scoped_refptr<RtpTransceiverInterface> transceiver =
+ AddTransceiverWithSimulcastLayers(local_pc_wrapper, remote_pc_wrapper,
+ layers);
+ std::vector<RtpCodecCapability> codecs =
+ GetCapabilitiesAndRestrictToCodec(local_pc_wrapper, "VP9");
+ transceiver->SetCodecPreferences(codecs);
+ // Configure SVC, a.k.a. "L3T3_KEY".
+ rtc::scoped_refptr<RtpSenderInterface> sender = transceiver->sender();
+ RtpParameters parameters = sender->GetParameters();
+ ASSERT_THAT(parameters.encodings, SizeIs(3));
+ parameters.encodings[0].scalability_mode = "L3T3_KEY";
+ parameters.encodings[0].scale_resolution_down_by = 1;
+ parameters.encodings[1].active = false;
+ parameters.encodings[2].active = false;
+ EXPECT_TRUE(sender->SetParameters(parameters).ok());
+
+ NegotiateWithSimulcastTweaks(local_pc_wrapper, remote_pc_wrapper);
+ local_pc_wrapper->WaitForConnection();
+ remote_pc_wrapper->WaitForConnection();
+
+ // Since the standard API is configuring simulcast we get three outbound-rtps,
+ // but only one is active.
+ ASSERT_TRUE_WAIT(HasOutboundRtpBytesSent(local_pc_wrapper, 3u, 1u),
+ kDefaultTimeout.ms());
+ // Wait until scalability mode is reported and expected resolution reached.
+ // Ramp up time is significant.
+ ASSERT_TRUE_WAIT(HasOutboundRtpWithRidAndScalabilityMode(
+ local_pc_wrapper, "f", "L3T3_KEY", 720),
+ kLongTimeoutForRampingUp.ms());
+
+ // GetParameters() is consistent with what we asked for and got.
+ parameters = sender->GetParameters();
+ ASSERT_THAT(parameters.encodings, SizeIs(3));
+ EXPECT_THAT(parameters.encodings[0].scalability_mode,
+ Optional(std::string("L3T3_KEY")));
+ EXPECT_FALSE(parameters.encodings[1].scalability_mode.has_value());
+ EXPECT_FALSE(parameters.encodings[2].scalability_mode.has_value());
+}
+
+// Exercise common path where `scalability_mode` is not specified until after
+// negotiation, requring us to recreate the stream when the number of streams
+// changes from 1 (legacy SVC) to 3 (standard simulcast).
+TEST_F(PeerConnectionEncodingsIntegrationTest,
+ VP9_SwitchFromLegacySvcToStandardSingleActiveEncodingSvc) {
+ rtc::scoped_refptr<PeerConnectionTestWrapper> local_pc_wrapper = CreatePc();
+ rtc::scoped_refptr<PeerConnectionTestWrapper> remote_pc_wrapper = CreatePc();
+ ExchangeIceCandidates(local_pc_wrapper, remote_pc_wrapper);
+
+ std::vector<cricket::SimulcastLayer> layers =
+ CreateLayers({"f", "h", "q"}, /*active=*/true);
+ rtc::scoped_refptr<RtpTransceiverInterface> transceiver =
+ AddTransceiverWithSimulcastLayers(local_pc_wrapper, remote_pc_wrapper,
+ layers);
+ std::vector<RtpCodecCapability> codecs =
+ GetCapabilitiesAndRestrictToCodec(local_pc_wrapper, "VP9");
+ transceiver->SetCodecPreferences(codecs);
+
+ // The original negotiation triggers legacy SVC because we didn't specify
+ // any scalability mode.
+ NegotiateWithSimulcastTweaks(local_pc_wrapper, remote_pc_wrapper);
+ local_pc_wrapper->WaitForConnection();
+ remote_pc_wrapper->WaitForConnection();
+
+ // Switch to the standard mode. Despite only having a single active stream in
+ // both cases, this internally reconfigures from 1 stream to 3 streams.
+ // Test coverage for https://crbug.com/webrtc/15016.
+ rtc::scoped_refptr<RtpSenderInterface> sender = transceiver->sender();
+ RtpParameters parameters = sender->GetParameters();
+ ASSERT_THAT(parameters.encodings, SizeIs(3));
+ parameters.encodings[0].active = true;
+ parameters.encodings[0].scalability_mode = "L2T2_KEY";
+ parameters.encodings[0].scale_resolution_down_by = 2.0;
+ parameters.encodings[1].active = false;
+ parameters.encodings[1].scalability_mode = absl::nullopt;
+ parameters.encodings[2].active = false;
+ parameters.encodings[2].scalability_mode = absl::nullopt;
+ sender->SetParameters(parameters);
+
+ // Since the standard API is configuring simulcast we get three outbound-rtps,
+ // but only one is active.
+ ASSERT_TRUE_WAIT(HasOutboundRtpBytesSent(local_pc_wrapper, 3u, 1u),
+ kDefaultTimeout.ms());
+ // Wait until scalability mode is reported and expected resolution reached.
+ // Ramp up time may be significant.
+ ASSERT_TRUE_WAIT(HasOutboundRtpWithRidAndScalabilityMode(
+ local_pc_wrapper, "f", "L2T2_KEY", 720 / 2),
+ kLongTimeoutForRampingUp.ms());
+
+ // GetParameters() does not report any fallback.
+ parameters = sender->GetParameters();
+ ASSERT_THAT(parameters.encodings, SizeIs(3));
+ EXPECT_THAT(parameters.encodings[0].scalability_mode,
+ Optional(std::string("L2T2_KEY")));
+ EXPECT_FALSE(parameters.encodings[1].scalability_mode.has_value());
+ EXPECT_FALSE(parameters.encodings[2].scalability_mode.has_value());
+}
+
+TEST_F(PeerConnectionEncodingsIntegrationTest,
+ VP9_AllLayersInactive_LegacySvc) {
+ rtc::scoped_refptr<PeerConnectionTestWrapper> local_pc_wrapper = CreatePc();
+ rtc::scoped_refptr<PeerConnectionTestWrapper> remote_pc_wrapper = CreatePc();
+ ExchangeIceCandidates(local_pc_wrapper, remote_pc_wrapper);
+
+ std::vector<cricket::SimulcastLayer> layers =
+ CreateLayers({"f", "h", "q"}, /*active=*/true);
+ rtc::scoped_refptr<RtpTransceiverInterface> transceiver =
+ AddTransceiverWithSimulcastLayers(local_pc_wrapper, remote_pc_wrapper,
+ layers);
+ std::vector<RtpCodecCapability> codecs =
+ GetCapabilitiesAndRestrictToCodec(local_pc_wrapper, "VP9");
+ transceiver->SetCodecPreferences(codecs);
+
+ // Legacy SVC mode and all layers inactive.
+ rtc::scoped_refptr<RtpSenderInterface> sender = transceiver->sender();
+ RtpParameters parameters = sender->GetParameters();
+ ASSERT_THAT(parameters.encodings, SizeIs(3));
+ parameters.encodings[0].active = false;
+ parameters.encodings[1].active = false;
+ parameters.encodings[2].active = false;
+ sender->SetParameters(parameters);
+
+ NegotiateWithSimulcastTweaks(local_pc_wrapper, remote_pc_wrapper);
+ local_pc_wrapper->WaitForConnection();
+ remote_pc_wrapper->WaitForConnection();
+
+ // Ensure no media is flowing (1 second should be enough).
+ rtc::Thread::Current()->SleepMs(1000);
+ rtc::scoped_refptr<const RTCStatsReport> report = GetStats(local_pc_wrapper);
+ std::vector<const RTCOutboundRtpStreamStats*> outbound_rtps =
+ report->GetStatsOfType<RTCOutboundRtpStreamStats>();
+ ASSERT_THAT(outbound_rtps, SizeIs(1u));
+ EXPECT_EQ(*outbound_rtps[0]->bytes_sent, 0u);
+}
+
+TEST_F(PeerConnectionEncodingsIntegrationTest,
+ VP9_AllLayersInactive_StandardSvc) {
+ rtc::scoped_refptr<PeerConnectionTestWrapper> local_pc_wrapper = CreatePc();
+ rtc::scoped_refptr<PeerConnectionTestWrapper> remote_pc_wrapper = CreatePc();
+ ExchangeIceCandidates(local_pc_wrapper, remote_pc_wrapper);
+
+ std::vector<cricket::SimulcastLayer> layers =
+ CreateLayers({"f", "h", "q"}, /*active=*/true);
+ rtc::scoped_refptr<RtpTransceiverInterface> transceiver =
+ AddTransceiverWithSimulcastLayers(local_pc_wrapper, remote_pc_wrapper,
+ layers);
+ std::vector<RtpCodecCapability> codecs =
+ GetCapabilitiesAndRestrictToCodec(local_pc_wrapper, "VP9");
+ transceiver->SetCodecPreferences(codecs);
+
+ // Standard mode and all layers inactive.
+ rtc::scoped_refptr<RtpSenderInterface> sender = transceiver->sender();
+ RtpParameters parameters = sender->GetParameters();
+ ASSERT_THAT(parameters.encodings, SizeIs(3));
+ parameters.encodings[0].scalability_mode = "L3T3_KEY";
+ parameters.encodings[0].scale_resolution_down_by = 1;
+ parameters.encodings[0].active = false;
+ parameters.encodings[1].active = false;
+ parameters.encodings[2].active = false;
+ sender->SetParameters(parameters);
+
+ NegotiateWithSimulcastTweaks(local_pc_wrapper, remote_pc_wrapper);
+ local_pc_wrapper->WaitForConnection();
+ remote_pc_wrapper->WaitForConnection();
+
+ // Ensure no media is flowing (1 second should be enough).
+ rtc::Thread::Current()->SleepMs(1000);
+ rtc::scoped_refptr<const RTCStatsReport> report = GetStats(local_pc_wrapper);
+ std::vector<const RTCOutboundRtpStreamStats*> outbound_rtps =
+ report->GetStatsOfType<RTCOutboundRtpStreamStats>();
+ ASSERT_THAT(outbound_rtps, SizeIs(3u));
+ EXPECT_EQ(*outbound_rtps[0]->bytes_sent, 0u);
+ EXPECT_EQ(*outbound_rtps[1]->bytes_sent, 0u);
+ EXPECT_EQ(*outbound_rtps[2]->bytes_sent, 0u);
+}
+
+TEST_F(PeerConnectionEncodingsIntegrationTest, VP9_TargetBitrate_LegacyL1T3) {
+ rtc::scoped_refptr<PeerConnectionTestWrapper> local_pc_wrapper = CreatePc();
+ rtc::scoped_refptr<PeerConnectionTestWrapper> remote_pc_wrapper = CreatePc();
+ ExchangeIceCandidates(local_pc_wrapper, remote_pc_wrapper);
+
+ std::vector<cricket::SimulcastLayer> layers =
+ CreateLayers({"f", "h", "q"}, /*active=*/true);
+ rtc::scoped_refptr<RtpTransceiverInterface> transceiver =
+ AddTransceiverWithSimulcastLayers(local_pc_wrapper, remote_pc_wrapper,
+ layers);
+ std::vector<RtpCodecCapability> codecs =
+ GetCapabilitiesAndRestrictToCodec(local_pc_wrapper, "VP9");
+ transceiver->SetCodecPreferences(codecs);
+
+ // In legacy SVC, disabling the bottom two layers encodings is interpreted as
+ // disabling the bottom two spatial layers resulting in L1T3.
+ rtc::scoped_refptr<RtpSenderInterface> sender = transceiver->sender();
+ RtpParameters parameters = sender->GetParameters();
+ parameters.encodings[0].active = false;
+ parameters.encodings[1].active = false;
+ parameters.encodings[2].active = true;
+ sender->SetParameters(parameters);
+
+ NegotiateWithSimulcastTweaks(local_pc_wrapper, remote_pc_wrapper);
+ local_pc_wrapper->WaitForConnection();
+ remote_pc_wrapper->WaitForConnection();
+
+ // Wait until 720p L1T3 has ramped up to 720p. It may take additional time
+ // for the target bitrate to reach its maximum.
+ ASSERT_TRUE_WAIT(HasOutboundRtpWithRidAndScalabilityMode(local_pc_wrapper,
+ "f", "L1T3", 720),
+ kLongTimeoutForRampingUp.ms());
+
+ // The target bitrate typically reaches `kVp9ExpectedMaxBitrateForL1T3`
+ // in a short period of time. However to reduce risk of flakiness in bot
+ // environments, this test only fails if we we exceed the expected target.
+ rtc::Thread::Current()->SleepMs(1000);
+ rtc::scoped_refptr<const RTCStatsReport> report = GetStats(local_pc_wrapper);
+ std::vector<const RTCOutboundRtpStreamStats*> outbound_rtps =
+ report->GetStatsOfType<RTCOutboundRtpStreamStats>();
+ ASSERT_THAT(outbound_rtps, SizeIs(1));
+ DataRate target_bitrate =
+ DataRate::BitsPerSec(*outbound_rtps[0]->target_bitrate);
+ EXPECT_LE(target_bitrate.kbps(), kVp9ExpectedMaxBitrateForL1T3.kbps());
+}
+
+// Test coverage for https://crbug.com/1455039.
+TEST_F(PeerConnectionEncodingsIntegrationTest, VP9_TargetBitrate_StandardL1T3) {
+ rtc::scoped_refptr<PeerConnectionTestWrapper> local_pc_wrapper = CreatePc();
+ rtc::scoped_refptr<PeerConnectionTestWrapper> remote_pc_wrapper = CreatePc();
+ ExchangeIceCandidates(local_pc_wrapper, remote_pc_wrapper);
+
+ std::vector<cricket::SimulcastLayer> layers =
+ CreateLayers({"f", "h", "q"}, /*active=*/true);
+ rtc::scoped_refptr<RtpTransceiverInterface> transceiver =
+ AddTransceiverWithSimulcastLayers(local_pc_wrapper, remote_pc_wrapper,
+ layers);
+ std::vector<RtpCodecCapability> codecs =
+ GetCapabilitiesAndRestrictToCodec(local_pc_wrapper, "VP9");
+ transceiver->SetCodecPreferences(codecs);
+
+ // With standard APIs, L1T3 is explicitly specified and the encodings refers
+ // to the RTP streams, not the spatial layers. The end result should be
+ // equivalent to the legacy L1T3 case.
+ rtc::scoped_refptr<RtpSenderInterface> sender = transceiver->sender();
+ RtpParameters parameters = sender->GetParameters();
+ parameters.encodings[0].active = true;
+ parameters.encodings[0].scale_resolution_down_by = 1.0;
+ parameters.encodings[0].scalability_mode = "L1T3";
+ parameters.encodings[1].active = false;
+ parameters.encodings[2].active = false;
+ sender->SetParameters(parameters);
+
+ NegotiateWithSimulcastTweaks(local_pc_wrapper, remote_pc_wrapper);
+ local_pc_wrapper->WaitForConnection();
+ remote_pc_wrapper->WaitForConnection();
+
+ // Wait until 720p L1T3 has ramped up to 720p. It may take additional time
+ // for the target bitrate to reach its maximum.
+ ASSERT_TRUE_WAIT(HasOutboundRtpWithRidAndScalabilityMode(local_pc_wrapper,
+ "f", "L1T3", 720),
+ kLongTimeoutForRampingUp.ms());
+
+ // The target bitrate typically reaches `kVp9ExpectedMaxBitrateForL1T3`
+ // in a short period of time. However to reduce risk of flakiness in bot
+ // environments, this test only fails if we we exceed the expected target.
+ rtc::Thread::Current()->SleepMs(1000);
+ rtc::scoped_refptr<const RTCStatsReport> report = GetStats(local_pc_wrapper);
+ std::vector<const RTCOutboundRtpStreamStats*> outbound_rtps =
+ report->GetStatsOfType<RTCOutboundRtpStreamStats>();
+ ASSERT_THAT(outbound_rtps, SizeIs(3));
+ auto* outbound_rtp = FindOutboundRtpByRid(outbound_rtps, "f");
+ ASSERT_TRUE(outbound_rtp);
+ DataRate target_bitrate = DataRate::BitsPerSec(*outbound_rtp->target_bitrate);
+ EXPECT_LE(target_bitrate.kbps(), kVp9ExpectedMaxBitrateForL1T3.kbps());
+}
+
+TEST_F(PeerConnectionEncodingsIntegrationTest,
+ SimulcastProducesUniqueSsrcAndRtxSsrcs) {
+ rtc::scoped_refptr<PeerConnectionTestWrapper> local_pc_wrapper = CreatePc();
+ rtc::scoped_refptr<PeerConnectionTestWrapper> remote_pc_wrapper = CreatePc();
+ ExchangeIceCandidates(local_pc_wrapper, remote_pc_wrapper);
+
+ std::vector<cricket::SimulcastLayer> layers =
+ CreateLayers({"f", "h", "q"}, /*active=*/true);
+ rtc::scoped_refptr<RtpTransceiverInterface> transceiver =
+ AddTransceiverWithSimulcastLayers(local_pc_wrapper, remote_pc_wrapper,
+ layers);
+ std::vector<RtpCodecCapability> codecs =
+ GetCapabilitiesAndRestrictToCodec(local_pc_wrapper, "VP8");
+ transceiver->SetCodecPreferences(codecs);
+
+ NegotiateWithSimulcastTweaks(local_pc_wrapper, remote_pc_wrapper);
+ local_pc_wrapper->WaitForConnection();
+ remote_pc_wrapper->WaitForConnection();
+
+ // Wait until media is flowing on all three layers.
+ // Ramp up time is needed before all three layers are sending.
+ ASSERT_TRUE_WAIT(HasOutboundRtpBytesSent(local_pc_wrapper, 3u),
+ kLongTimeoutForRampingUp.ms());
+ // Verify SSRCs and RTX SSRCs.
+ rtc::scoped_refptr<const RTCStatsReport> report = GetStats(local_pc_wrapper);
+ std::vector<const RTCOutboundRtpStreamStats*> outbound_rtps =
+ report->GetStatsOfType<RTCOutboundRtpStreamStats>();
+ ASSERT_THAT(outbound_rtps, SizeIs(3u));
+
+ std::set<uint32_t> ssrcs;
+ std::set<uint32_t> rtx_ssrcs;
+ for (const auto& outbound_rtp : outbound_rtps) {
+ ASSERT_TRUE(outbound_rtp->ssrc.has_value());
+ ASSERT_TRUE(outbound_rtp->rtx_ssrc.has_value());
+ ssrcs.insert(*outbound_rtp->ssrc);
+ rtx_ssrcs.insert(*outbound_rtp->rtx_ssrc);
+ }
+ EXPECT_EQ(ssrcs.size(), 3u);
+ EXPECT_EQ(rtx_ssrcs.size(), 3u);
+}
+
+TEST_F(PeerConnectionEncodingsIntegrationTest,
+ EncodingParameterCodecIsEmptyWhenCreatedAudio) {
+ rtc::scoped_refptr<PeerConnectionTestWrapper> local_pc_wrapper = CreatePc();
+
+ auto transceiver_or_error =
+ local_pc_wrapper->pc()->AddTransceiver(cricket::MEDIA_TYPE_AUDIO);
+ rtc::scoped_refptr<RtpTransceiverInterface> audio_transceiver =
+ transceiver_or_error.MoveValue();
+ webrtc::RtpParameters parameters =
+ audio_transceiver->sender()->GetParameters();
+ EXPECT_FALSE(parameters.encodings[0].codec.has_value());
+}
+
+TEST_F(PeerConnectionEncodingsIntegrationTest,
+ EncodingParameterCodecIsEmptyWhenCreatedVideo) {
+ rtc::scoped_refptr<PeerConnectionTestWrapper> local_pc_wrapper = CreatePc();
+
+ auto transceiver_or_error =
+ local_pc_wrapper->pc()->AddTransceiver(cricket::MEDIA_TYPE_VIDEO);
+ rtc::scoped_refptr<RtpTransceiverInterface> video_transceiver =
+ transceiver_or_error.MoveValue();
+ webrtc::RtpParameters parameters =
+ video_transceiver->sender()->GetParameters();
+ EXPECT_FALSE(parameters.encodings[0].codec.has_value());
+}
+
+TEST_F(PeerConnectionEncodingsIntegrationTest,
+ EncodingParameterCodecIsSetByAddTransceiverAudio) {
+ rtc::scoped_refptr<PeerConnectionTestWrapper> local_pc_wrapper = CreatePc();
+ rtc::scoped_refptr<PeerConnectionTestWrapper> remote_pc_wrapper = CreatePc();
+ ExchangeIceCandidates(local_pc_wrapper, remote_pc_wrapper);
+
+ rtc::scoped_refptr<webrtc::MediaStreamInterface> stream =
+ local_pc_wrapper->GetUserMedia(
+ /*audio=*/true, {}, /*video=*/false, {});
+ rtc::scoped_refptr<AudioTrackInterface> track = stream->GetAudioTracks()[0];
+
+ absl::optional<webrtc::RtpCodecCapability> pcmu =
+ local_pc_wrapper->FindFirstSendCodecWithName(cricket::MEDIA_TYPE_AUDIO,
+ "pcmu");
+ ASSERT_TRUE(pcmu);
+
+ webrtc::RtpTransceiverInit init;
+ init.direction = webrtc::RtpTransceiverDirection::kSendOnly;
+ webrtc::RtpEncodingParameters encoding_parameters;
+ encoding_parameters.codec = pcmu;
+ init.send_encodings.push_back(encoding_parameters);
+
+ auto transceiver_or_error =
+ local_pc_wrapper->pc()->AddTransceiver(track, init);
+ rtc::scoped_refptr<RtpTransceiverInterface> audio_transceiver =
+ transceiver_or_error.MoveValue();
+ webrtc::RtpParameters parameters =
+ audio_transceiver->sender()->GetParameters();
+ EXPECT_EQ(*parameters.encodings[0].codec, *pcmu);
+
+ NegotiateWithSimulcastTweaks(local_pc_wrapper, remote_pc_wrapper);
+ local_pc_wrapper->WaitForConnection();
+ remote_pc_wrapper->WaitForConnection();
+
+ rtc::scoped_refptr<const RTCStatsReport> report = GetStats(local_pc_wrapper);
+ std::vector<const RTCOutboundRtpStreamStats*> outbound_rtps =
+ report->GetStatsOfType<RTCOutboundRtpStreamStats>();
+ ASSERT_EQ(outbound_rtps.size(), 1u);
+ std::string codec_name = GetCurrentCodecMimeType(report, *outbound_rtps[0]);
+ EXPECT_STRCASEEQ(("audio/" + pcmu->name).c_str(), codec_name.c_str());
+}
+
+TEST_F(PeerConnectionEncodingsIntegrationTest,
+ EncodingParameterCodecIsSetByAddTransceiverVideo) {
+ rtc::scoped_refptr<PeerConnectionTestWrapper> local_pc_wrapper = CreatePc();
+ rtc::scoped_refptr<PeerConnectionTestWrapper> remote_pc_wrapper = CreatePc();
+ ExchangeIceCandidates(local_pc_wrapper, remote_pc_wrapper);
+
+ rtc::scoped_refptr<webrtc::MediaStreamInterface> stream =
+ local_pc_wrapper->GetUserMedia(
+ /*audio=*/false, {}, /*video=*/true, {.width = 1280, .height = 720});
+ rtc::scoped_refptr<VideoTrackInterface> track = stream->GetVideoTracks()[0];
+
+ absl::optional<webrtc::RtpCodecCapability> vp9 =
+ local_pc_wrapper->FindFirstSendCodecWithName(cricket::MEDIA_TYPE_VIDEO,
+ "vp9");
+ ASSERT_TRUE(vp9);
+
+ webrtc::RtpTransceiverInit init;
+ init.direction = webrtc::RtpTransceiverDirection::kSendOnly;
+ webrtc::RtpEncodingParameters encoding_parameters;
+ encoding_parameters.codec = vp9;
+ encoding_parameters.scalability_mode = "L3T3";
+ init.send_encodings.push_back(encoding_parameters);
+
+ auto transceiver_or_error =
+ local_pc_wrapper->pc()->AddTransceiver(track, init);
+ rtc::scoped_refptr<RtpTransceiverInterface> audio_transceiver =
+ transceiver_or_error.MoveValue();
+ webrtc::RtpParameters parameters =
+ audio_transceiver->sender()->GetParameters();
+ EXPECT_EQ(*parameters.encodings[0].codec, *vp9);
+
+ NegotiateWithSimulcastTweaks(local_pc_wrapper, remote_pc_wrapper);
+ local_pc_wrapper->WaitForConnection();
+ remote_pc_wrapper->WaitForConnection();
+
+ EXPECT_TRUE_WAIT(
+ IsCodecIdDifferentWithScalabilityMode(local_pc_wrapper, 0, "", "L3T3"),
+ kDefaultTimeout.ms());
+
+ rtc::scoped_refptr<const RTCStatsReport> report = GetStats(local_pc_wrapper);
+ std::vector<const RTCOutboundRtpStreamStats*> outbound_rtps =
+ report->GetStatsOfType<RTCOutboundRtpStreamStats>();
+ ASSERT_EQ(outbound_rtps.size(), 1u);
+ std::string codec_name = GetCurrentCodecMimeType(report, *outbound_rtps[0]);
+ EXPECT_STRCASEEQ(("video/" + vp9->name).c_str(), codec_name.c_str());
+ EXPECT_EQ(outbound_rtps[0]->scalability_mode.value(), "L3T3");
+}
+
+TEST_F(PeerConnectionEncodingsIntegrationTest,
+ EncodingParameterCodecIsSetBySetParametersBeforeNegotiationAudio) {
+ rtc::scoped_refptr<PeerConnectionTestWrapper> local_pc_wrapper = CreatePc();
+ rtc::scoped_refptr<PeerConnectionTestWrapper> remote_pc_wrapper = CreatePc();
+ ExchangeIceCandidates(local_pc_wrapper, remote_pc_wrapper);
+
+ rtc::scoped_refptr<webrtc::MediaStreamInterface> stream =
+ local_pc_wrapper->GetUserMedia(
+ /*audio=*/true, {}, /*video=*/false, {});
+ rtc::scoped_refptr<AudioTrackInterface> track = stream->GetAudioTracks()[0];
+
+ absl::optional<webrtc::RtpCodecCapability> pcmu =
+ local_pc_wrapper->FindFirstSendCodecWithName(cricket::MEDIA_TYPE_AUDIO,
+ "pcmu");
+
+ auto transceiver_or_error = local_pc_wrapper->pc()->AddTransceiver(track);
+ rtc::scoped_refptr<RtpTransceiverInterface> audio_transceiver =
+ transceiver_or_error.MoveValue();
+ webrtc::RtpParameters parameters =
+ audio_transceiver->sender()->GetParameters();
+ parameters.encodings[0].codec = pcmu;
+ EXPECT_TRUE(audio_transceiver->sender()->SetParameters(parameters).ok());
+
+ parameters = audio_transceiver->sender()->GetParameters();
+ EXPECT_EQ(parameters.encodings[0].codec, pcmu);
+
+ NegotiateWithSimulcastTweaks(local_pc_wrapper, remote_pc_wrapper);
+ local_pc_wrapper->WaitForConnection();
+ remote_pc_wrapper->WaitForConnection();
+
+ rtc::scoped_refptr<const RTCStatsReport> report = GetStats(local_pc_wrapper);
+ std::vector<const RTCOutboundRtpStreamStats*> outbound_rtps =
+ report->GetStatsOfType<RTCOutboundRtpStreamStats>();
+ ASSERT_EQ(outbound_rtps.size(), 1u);
+ std::string codec_name = GetCurrentCodecMimeType(report, *outbound_rtps[0]);
+ EXPECT_STRCASEEQ(("audio/" + pcmu->name).c_str(), codec_name.c_str());
+}
+
+TEST_F(PeerConnectionEncodingsIntegrationTest,
+ EncodingParameterCodecIsSetBySetParametersAfterNegotiationAudio) {
+ rtc::scoped_refptr<PeerConnectionTestWrapper> local_pc_wrapper = CreatePc();
+ rtc::scoped_refptr<PeerConnectionTestWrapper> remote_pc_wrapper = CreatePc();
+ ExchangeIceCandidates(local_pc_wrapper, remote_pc_wrapper);
+
+ rtc::scoped_refptr<webrtc::MediaStreamInterface> stream =
+ local_pc_wrapper->GetUserMedia(
+ /*audio=*/true, {}, /*video=*/false, {});
+ rtc::scoped_refptr<AudioTrackInterface> track = stream->GetAudioTracks()[0];
+
+ absl::optional<webrtc::RtpCodecCapability> pcmu =
+ local_pc_wrapper->FindFirstSendCodecWithName(cricket::MEDIA_TYPE_AUDIO,
+ "pcmu");
+
+ auto transceiver_or_error = local_pc_wrapper->pc()->AddTransceiver(track);
+ rtc::scoped_refptr<RtpTransceiverInterface> audio_transceiver =
+ transceiver_or_error.MoveValue();
+
+ NegotiateWithSimulcastTweaks(local_pc_wrapper, remote_pc_wrapper);
+ local_pc_wrapper->WaitForConnection();
+ remote_pc_wrapper->WaitForConnection();
+
+ rtc::scoped_refptr<const RTCStatsReport> report = GetStats(local_pc_wrapper);
+ std::vector<const RTCOutboundRtpStreamStats*> outbound_rtps =
+ report->GetStatsOfType<RTCOutboundRtpStreamStats>();
+ ASSERT_EQ(outbound_rtps.size(), 1u);
+ std::string codec_name = GetCurrentCodecMimeType(report, *outbound_rtps[0]);
+ EXPECT_STRCASENE(("audio/" + pcmu->name).c_str(), codec_name.c_str());
+ std::string last_codec_id = outbound_rtps[0]->codec_id.value();
+
+ webrtc::RtpParameters parameters =
+ audio_transceiver->sender()->GetParameters();
+ parameters.encodings[0].codec = pcmu;
+ EXPECT_TRUE(audio_transceiver->sender()->SetParameters(parameters).ok());
+
+ parameters = audio_transceiver->sender()->GetParameters();
+ EXPECT_EQ(parameters.encodings[0].codec, pcmu);
+
+ EXPECT_TRUE_WAIT(IsCodecIdDifferent(local_pc_wrapper, 0, last_codec_id),
+ kDefaultTimeout.ms());
+
+ report = GetStats(local_pc_wrapper);
+ outbound_rtps = report->GetStatsOfType<RTCOutboundRtpStreamStats>();
+ ASSERT_EQ(outbound_rtps.size(), 1u);
+ codec_name = GetCurrentCodecMimeType(report, *outbound_rtps[0]);
+ EXPECT_STRCASEEQ(("audio/" + pcmu->name).c_str(), codec_name.c_str());
+}
+
+TEST_F(PeerConnectionEncodingsIntegrationTest,
+ EncodingParameterCodecIsSetBySetParametersBeforeNegotiationVideo) {
+ rtc::scoped_refptr<PeerConnectionTestWrapper> local_pc_wrapper = CreatePc();
+ rtc::scoped_refptr<PeerConnectionTestWrapper> remote_pc_wrapper = CreatePc();
+ ExchangeIceCandidates(local_pc_wrapper, remote_pc_wrapper);
+
+ rtc::scoped_refptr<webrtc::MediaStreamInterface> stream =
+ local_pc_wrapper->GetUserMedia(
+ /*audio=*/false, {}, /*video=*/true, {.width = 1280, .height = 720});
+ rtc::scoped_refptr<VideoTrackInterface> track = stream->GetVideoTracks()[0];
+
+ absl::optional<webrtc::RtpCodecCapability> vp9 =
+ local_pc_wrapper->FindFirstSendCodecWithName(cricket::MEDIA_TYPE_VIDEO,
+ "vp9");
+
+ auto transceiver_or_error = local_pc_wrapper->pc()->AddTransceiver(track);
+ rtc::scoped_refptr<RtpTransceiverInterface> video_transceiver =
+ transceiver_or_error.MoveValue();
+ webrtc::RtpParameters parameters =
+ video_transceiver->sender()->GetParameters();
+ parameters.encodings[0].codec = vp9;
+ parameters.encodings[0].scalability_mode = "L3T3";
+ EXPECT_TRUE(video_transceiver->sender()->SetParameters(parameters).ok());
+
+ parameters = video_transceiver->sender()->GetParameters();
+ EXPECT_EQ(parameters.encodings[0].codec, vp9);
+ EXPECT_EQ(parameters.encodings[0].scalability_mode, "L3T3");
+
+ NegotiateWithSimulcastTweaks(local_pc_wrapper, remote_pc_wrapper);
+ local_pc_wrapper->WaitForConnection();
+ remote_pc_wrapper->WaitForConnection();
+
+ EXPECT_TRUE_WAIT(
+ IsCodecIdDifferentWithScalabilityMode(local_pc_wrapper, 0, "", "L3T3"),
+ kDefaultTimeout.ms());
+ rtc::scoped_refptr<const RTCStatsReport> report = GetStats(local_pc_wrapper);
+ std::vector<const RTCOutboundRtpStreamStats*> outbound_rtps =
+ report->GetStatsOfType<RTCOutboundRtpStreamStats>();
+ ASSERT_EQ(outbound_rtps.size(), 1u);
+ std::string codec_name = GetCurrentCodecMimeType(report, *outbound_rtps[0]);
+ EXPECT_STRCASEEQ(("video/" + vp9->name).c_str(), codec_name.c_str());
+ EXPECT_EQ(outbound_rtps[0]->scalability_mode.ValueOrDefault(""), "L3T3");
+}
+
+TEST_F(PeerConnectionEncodingsIntegrationTest,
+ EncodingParameterCodecIsSetBySetParametersAfterNegotiationVideo) {
+ rtc::scoped_refptr<PeerConnectionTestWrapper> local_pc_wrapper = CreatePc();
+ rtc::scoped_refptr<PeerConnectionTestWrapper> remote_pc_wrapper = CreatePc();
+ ExchangeIceCandidates(local_pc_wrapper, remote_pc_wrapper);
+
+ rtc::scoped_refptr<webrtc::MediaStreamInterface> stream =
+ local_pc_wrapper->GetUserMedia(
+ /*audio=*/false, {}, /*video=*/true, {.width = 1280, .height = 720});
+ rtc::scoped_refptr<VideoTrackInterface> track = stream->GetVideoTracks()[0];
+
+ absl::optional<webrtc::RtpCodecCapability> vp9 =
+ local_pc_wrapper->FindFirstSendCodecWithName(cricket::MEDIA_TYPE_VIDEO,
+ "vp9");
+
+ auto transceiver_or_error = local_pc_wrapper->pc()->AddTransceiver(track);
+ rtc::scoped_refptr<RtpTransceiverInterface> video_transceiver =
+ transceiver_or_error.MoveValue();
+
+ NegotiateWithSimulcastTweaks(local_pc_wrapper, remote_pc_wrapper);
+ local_pc_wrapper->WaitForConnection();
+ remote_pc_wrapper->WaitForConnection();
+
+ rtc::scoped_refptr<const RTCStatsReport> report = GetStats(local_pc_wrapper);
+ std::vector<const RTCOutboundRtpStreamStats*> outbound_rtps =
+ report->GetStatsOfType<RTCOutboundRtpStreamStats>();
+ ASSERT_EQ(outbound_rtps.size(), 1u);
+ std::string codec_name = GetCurrentCodecMimeType(report, *outbound_rtps[0]);
+ EXPECT_STRCASENE(("audio/" + vp9->name).c_str(), codec_name.c_str());
+ std::string last_codec_id = outbound_rtps[0]->codec_id.value();
+
+ webrtc::RtpParameters parameters =
+ video_transceiver->sender()->GetParameters();
+ parameters.encodings[0].codec = vp9;
+ parameters.encodings[0].scalability_mode = "L3T3";
+ EXPECT_TRUE(video_transceiver->sender()->SetParameters(parameters).ok());
+
+ parameters = video_transceiver->sender()->GetParameters();
+ EXPECT_EQ(parameters.encodings[0].codec, vp9);
+ EXPECT_EQ(parameters.encodings[0].scalability_mode, "L3T3");
+
+ EXPECT_TRUE_WAIT(IsCodecIdDifferentWithScalabilityMode(local_pc_wrapper, 0,
+ last_codec_id, "L3T3"),
+ kDefaultTimeout.ms());
+
+ report = GetStats(local_pc_wrapper);
+ outbound_rtps = report->GetStatsOfType<RTCOutboundRtpStreamStats>();
+ ASSERT_EQ(outbound_rtps.size(), 1u);
+ codec_name = GetCurrentCodecMimeType(report, *outbound_rtps[0]);
+ EXPECT_STRCASEEQ(("video/" + vp9->name).c_str(), codec_name.c_str());
+ EXPECT_EQ(outbound_rtps[0]->scalability_mode.value(), "L3T3");
+}
+
+TEST_F(PeerConnectionEncodingsIntegrationTest,
+ AddTransceiverRejectsUnknownCodecParameterAudio) {
+ rtc::scoped_refptr<PeerConnectionTestWrapper> local_pc_wrapper = CreatePc();
+
+ webrtc::RtpCodec dummy_codec;
+ dummy_codec.kind = cricket::MEDIA_TYPE_AUDIO;
+ dummy_codec.name = "FOOBAR";
+ dummy_codec.clock_rate = 90000;
+ dummy_codec.num_channels = 2;
+
+ webrtc::RtpTransceiverInit init;
+ init.direction = webrtc::RtpTransceiverDirection::kSendOnly;
+ webrtc::RtpEncodingParameters encoding_parameters;
+ encoding_parameters.codec = dummy_codec;
+ init.send_encodings.push_back(encoding_parameters);
+
+ auto transceiver_or_error =
+ local_pc_wrapper->pc()->AddTransceiver(cricket::MEDIA_TYPE_AUDIO, init);
+ EXPECT_FALSE(transceiver_or_error.ok());
+ EXPECT_EQ(transceiver_or_error.error().type(),
+ RTCErrorType::UNSUPPORTED_OPERATION);
+}
+
+TEST_F(PeerConnectionEncodingsIntegrationTest,
+ AddTransceiverRejectsUnknownCodecParameterVideo) {
+ rtc::scoped_refptr<PeerConnectionTestWrapper> local_pc_wrapper = CreatePc();
+
+ webrtc::RtpCodec dummy_codec;
+ dummy_codec.kind = cricket::MEDIA_TYPE_VIDEO;
+ dummy_codec.name = "FOOBAR";
+ dummy_codec.clock_rate = 90000;
+
+ webrtc::RtpTransceiverInit init;
+ init.direction = webrtc::RtpTransceiverDirection::kSendOnly;
+ webrtc::RtpEncodingParameters encoding_parameters;
+ encoding_parameters.codec = dummy_codec;
+ init.send_encodings.push_back(encoding_parameters);
+
+ auto transceiver_or_error =
+ local_pc_wrapper->pc()->AddTransceiver(cricket::MEDIA_TYPE_VIDEO, init);
+ EXPECT_FALSE(transceiver_or_error.ok());
+ EXPECT_EQ(transceiver_or_error.error().type(),
+ RTCErrorType::UNSUPPORTED_OPERATION);
+}
+
+TEST_F(PeerConnectionEncodingsIntegrationTest,
+ SetParametersRejectsUnknownCodecParameterAudio) {
+ rtc::scoped_refptr<PeerConnectionTestWrapper> local_pc_wrapper = CreatePc();
+
+ webrtc::RtpCodec dummy_codec;
+ dummy_codec.kind = cricket::MEDIA_TYPE_AUDIO;
+ dummy_codec.name = "FOOBAR";
+ dummy_codec.clock_rate = 90000;
+ dummy_codec.num_channels = 2;
+
+ auto transceiver_or_error =
+ local_pc_wrapper->pc()->AddTransceiver(cricket::MEDIA_TYPE_AUDIO);
+ ASSERT_TRUE(transceiver_or_error.ok());
+ rtc::scoped_refptr<RtpTransceiverInterface> audio_transceiver =
+ transceiver_or_error.MoveValue();
+
+ webrtc::RtpParameters parameters =
+ audio_transceiver->sender()->GetParameters();
+ parameters.encodings[0].codec = dummy_codec;
+ RTCError error = audio_transceiver->sender()->SetParameters(parameters);
+ EXPECT_EQ(error.type(), RTCErrorType::INVALID_MODIFICATION);
+}
+
+TEST_F(PeerConnectionEncodingsIntegrationTest,
+ SetParametersRejectsUnknownCodecParameterVideo) {
+ rtc::scoped_refptr<PeerConnectionTestWrapper> local_pc_wrapper = CreatePc();
+
+ webrtc::RtpCodec dummy_codec;
+ dummy_codec.kind = cricket::MEDIA_TYPE_VIDEO;
+ dummy_codec.name = "FOOBAR";
+ dummy_codec.clock_rate = 90000;
+
+ auto transceiver_or_error =
+ local_pc_wrapper->pc()->AddTransceiver(cricket::MEDIA_TYPE_VIDEO);
+ ASSERT_TRUE(transceiver_or_error.ok());
+ rtc::scoped_refptr<RtpTransceiverInterface> video_transceiver =
+ transceiver_or_error.MoveValue();
+
+ webrtc::RtpParameters parameters =
+ video_transceiver->sender()->GetParameters();
+ parameters.encodings[0].codec = dummy_codec;
+ RTCError error = video_transceiver->sender()->SetParameters(parameters);
+ EXPECT_EQ(error.type(), RTCErrorType::INVALID_MODIFICATION);
+}
+
+TEST_F(PeerConnectionEncodingsIntegrationTest,
+ SetParametersRejectsNonPreferredCodecParameterAudio) {
+ rtc::scoped_refptr<PeerConnectionTestWrapper> local_pc_wrapper = CreatePc();
+
+ absl::optional<webrtc::RtpCodecCapability> opus =
+ local_pc_wrapper->FindFirstSendCodecWithName(cricket::MEDIA_TYPE_AUDIO,
+ "opus");
+ ASSERT_TRUE(opus);
+
+ std::vector<webrtc::RtpCodecCapability> not_opus_codecs =
+ local_pc_wrapper->pc_factory()
+ ->GetRtpSenderCapabilities(cricket::MEDIA_TYPE_AUDIO)
+ .codecs;
+ not_opus_codecs.erase(
+ std::remove_if(not_opus_codecs.begin(), not_opus_codecs.end(),
+ [&](const auto& codec) {
+ return absl::EqualsIgnoreCase(codec.name, opus->name);
+ }),
+ not_opus_codecs.end());
+
+ auto transceiver_or_error =
+ local_pc_wrapper->pc()->AddTransceiver(cricket::MEDIA_TYPE_AUDIO);
+ ASSERT_TRUE(transceiver_or_error.ok());
+ rtc::scoped_refptr<RtpTransceiverInterface> audio_transceiver =
+ transceiver_or_error.MoveValue();
+ ASSERT_TRUE(audio_transceiver->SetCodecPreferences(not_opus_codecs).ok());
+
+ webrtc::RtpParameters parameters =
+ audio_transceiver->sender()->GetParameters();
+ parameters.encodings[0].codec = opus;
+ RTCError error = audio_transceiver->sender()->SetParameters(parameters);
+ EXPECT_EQ(error.type(), RTCErrorType::INVALID_MODIFICATION);
+}
+
+TEST_F(PeerConnectionEncodingsIntegrationTest,
+ SetParametersRejectsNonPreferredCodecParameterVideo) {
+ rtc::scoped_refptr<PeerConnectionTestWrapper> local_pc_wrapper = CreatePc();
+
+ absl::optional<webrtc::RtpCodecCapability> vp8 =
+ local_pc_wrapper->FindFirstSendCodecWithName(cricket::MEDIA_TYPE_VIDEO,
+ "vp8");
+ ASSERT_TRUE(vp8);
+
+ std::vector<webrtc::RtpCodecCapability> not_vp8_codecs =
+ local_pc_wrapper->pc_factory()
+ ->GetRtpSenderCapabilities(cricket::MEDIA_TYPE_VIDEO)
+ .codecs;
+ not_vp8_codecs.erase(
+ std::remove_if(not_vp8_codecs.begin(), not_vp8_codecs.end(),
+ [&](const auto& codec) {
+ return absl::EqualsIgnoreCase(codec.name, vp8->name);
+ }),
+ not_vp8_codecs.end());
+
+ auto transceiver_or_error =
+ local_pc_wrapper->pc()->AddTransceiver(cricket::MEDIA_TYPE_VIDEO);
+ ASSERT_TRUE(transceiver_or_error.ok());
+ rtc::scoped_refptr<RtpTransceiverInterface> video_transceiver =
+ transceiver_or_error.MoveValue();
+ ASSERT_TRUE(video_transceiver->SetCodecPreferences(not_vp8_codecs).ok());
+
+ webrtc::RtpParameters parameters =
+ video_transceiver->sender()->GetParameters();
+ parameters.encodings[0].codec = vp8;
+ RTCError error = video_transceiver->sender()->SetParameters(parameters);
+ EXPECT_EQ(error.type(), RTCErrorType::INVALID_MODIFICATION);
+}
+
+TEST_F(PeerConnectionEncodingsIntegrationTest,
+ SetParametersRejectsNonNegotiatedCodecParameterAudio) {
+ rtc::scoped_refptr<PeerConnectionTestWrapper> local_pc_wrapper = CreatePc();
+ rtc::scoped_refptr<PeerConnectionTestWrapper> remote_pc_wrapper = CreatePc();
+ ExchangeIceCandidates(local_pc_wrapper, remote_pc_wrapper);
+
+ absl::optional<webrtc::RtpCodecCapability> opus =
+ local_pc_wrapper->FindFirstSendCodecWithName(cricket::MEDIA_TYPE_AUDIO,
+ "opus");
+ ASSERT_TRUE(opus);
+
+ std::vector<webrtc::RtpCodecCapability> not_opus_codecs =
+ local_pc_wrapper->pc_factory()
+ ->GetRtpSenderCapabilities(cricket::MEDIA_TYPE_AUDIO)
+ .codecs;
+ not_opus_codecs.erase(
+ std::remove_if(not_opus_codecs.begin(), not_opus_codecs.end(),
+ [&](const auto& codec) {
+ return absl::EqualsIgnoreCase(codec.name, opus->name);
+ }),
+ not_opus_codecs.end());
+
+ auto transceiver_or_error =
+ local_pc_wrapper->pc()->AddTransceiver(cricket::MEDIA_TYPE_AUDIO);
+ ASSERT_TRUE(transceiver_or_error.ok());
+ rtc::scoped_refptr<RtpTransceiverInterface> audio_transceiver =
+ transceiver_or_error.MoveValue();
+ ASSERT_TRUE(audio_transceiver->SetCodecPreferences(not_opus_codecs).ok());
+
+ NegotiateWithSimulcastTweaks(local_pc_wrapper, remote_pc_wrapper);
+ local_pc_wrapper->WaitForConnection();
+ remote_pc_wrapper->WaitForConnection();
+
+ webrtc::RtpParameters parameters =
+ audio_transceiver->sender()->GetParameters();
+ parameters.encodings[0].codec = opus;
+ RTCError error = audio_transceiver->sender()->SetParameters(parameters);
+ EXPECT_EQ(error.type(), RTCErrorType::INVALID_MODIFICATION);
+}
+
+TEST_F(PeerConnectionEncodingsIntegrationTest,
+ SetParametersRejectsNonRemotelyNegotiatedCodecParameterAudio) {
+ rtc::scoped_refptr<PeerConnectionTestWrapper> local_pc_wrapper = CreatePc();
+ rtc::scoped_refptr<PeerConnectionTestWrapper> remote_pc_wrapper = CreatePc();
+ ExchangeIceCandidates(local_pc_wrapper, remote_pc_wrapper);
+
+ absl::optional<webrtc::RtpCodecCapability> opus =
+ local_pc_wrapper->FindFirstSendCodecWithName(cricket::MEDIA_TYPE_AUDIO,
+ "opus");
+ ASSERT_TRUE(opus);
+
+ std::vector<webrtc::RtpCodecCapability> not_opus_codecs =
+ local_pc_wrapper->pc_factory()
+ ->GetRtpSenderCapabilities(cricket::MEDIA_TYPE_AUDIO)
+ .codecs;
+ not_opus_codecs.erase(
+ std::remove_if(not_opus_codecs.begin(), not_opus_codecs.end(),
+ [&](const auto& codec) {
+ return absl::EqualsIgnoreCase(codec.name, opus->name);
+ }),
+ not_opus_codecs.end());
+
+ auto transceiver_or_error =
+ local_pc_wrapper->pc()->AddTransceiver(cricket::MEDIA_TYPE_AUDIO);
+ ASSERT_TRUE(transceiver_or_error.ok());
+ rtc::scoped_refptr<RtpTransceiverInterface> audio_transceiver =
+ transceiver_or_error.MoveValue();
+
+ // Negotiation, create offer and apply it
+ std::unique_ptr<SessionDescriptionInterface> offer =
+ CreateOffer(local_pc_wrapper);
+ rtc::scoped_refptr<MockSetSessionDescriptionObserver> p1 =
+ SetLocalDescription(local_pc_wrapper, offer.get());
+ rtc::scoped_refptr<MockSetSessionDescriptionObserver> p2 =
+ SetRemoteDescription(remote_pc_wrapper, offer.get());
+ EXPECT_TRUE(Await({p1, p2}));
+
+ // Update the remote transceiver to reject Opus
+ std::vector<rtc::scoped_refptr<RtpTransceiverInterface>> remote_transceivers =
+ remote_pc_wrapper->pc()->GetTransceivers();
+ ASSERT_TRUE(!remote_transceivers.empty());
+ rtc::scoped_refptr<RtpTransceiverInterface> remote_audio_transceiver =
+ remote_transceivers[0];
+ ASSERT_TRUE(
+ remote_audio_transceiver->SetCodecPreferences(not_opus_codecs).ok());
+
+ // Create answer and apply it
+ std::unique_ptr<SessionDescriptionInterface> answer =
+ CreateAnswer(remote_pc_wrapper);
+ p1 = SetLocalDescription(remote_pc_wrapper, answer.get());
+ p2 = SetRemoteDescription(local_pc_wrapper, answer.get());
+ EXPECT_TRUE(Await({p1, p2}));
+
+ local_pc_wrapper->WaitForConnection();
+ remote_pc_wrapper->WaitForConnection();
+
+ webrtc::RtpParameters parameters =
+ audio_transceiver->sender()->GetParameters();
+ parameters.encodings[0].codec = opus;
+ RTCError error = audio_transceiver->sender()->SetParameters(parameters);
+ EXPECT_EQ(error.type(), RTCErrorType::INVALID_MODIFICATION);
+}
+
+TEST_F(PeerConnectionEncodingsIntegrationTest,
+ SetParametersRejectsNonNegotiatedCodecParameterVideo) {
+ rtc::scoped_refptr<PeerConnectionTestWrapper> local_pc_wrapper = CreatePc();
+ rtc::scoped_refptr<PeerConnectionTestWrapper> remote_pc_wrapper = CreatePc();
+ ExchangeIceCandidates(local_pc_wrapper, remote_pc_wrapper);
+
+ absl::optional<webrtc::RtpCodecCapability> vp8 =
+ local_pc_wrapper->FindFirstSendCodecWithName(cricket::MEDIA_TYPE_VIDEO,
+ "vp8");
+ ASSERT_TRUE(vp8);
+
+ std::vector<webrtc::RtpCodecCapability> not_vp8_codecs =
+ local_pc_wrapper->pc_factory()
+ ->GetRtpSenderCapabilities(cricket::MEDIA_TYPE_VIDEO)
+ .codecs;
+ not_vp8_codecs.erase(
+ std::remove_if(not_vp8_codecs.begin(), not_vp8_codecs.end(),
+ [&](const auto& codec) {
+ return absl::EqualsIgnoreCase(codec.name, vp8->name);
+ }),
+ not_vp8_codecs.end());
+
+ auto transceiver_or_error =
+ local_pc_wrapper->pc()->AddTransceiver(cricket::MEDIA_TYPE_VIDEO);
+ ASSERT_TRUE(transceiver_or_error.ok());
+ rtc::scoped_refptr<RtpTransceiverInterface> video_transceiver =
+ transceiver_or_error.MoveValue();
+ ASSERT_TRUE(video_transceiver->SetCodecPreferences(not_vp8_codecs).ok());
+
+ NegotiateWithSimulcastTweaks(local_pc_wrapper, remote_pc_wrapper);
+ local_pc_wrapper->WaitForConnection();
+ remote_pc_wrapper->WaitForConnection();
+
+ webrtc::RtpParameters parameters =
+ video_transceiver->sender()->GetParameters();
+ parameters.encodings[0].codec = vp8;
+ RTCError error = video_transceiver->sender()->SetParameters(parameters);
+ EXPECT_EQ(error.type(), RTCErrorType::INVALID_MODIFICATION);
+}
+
+TEST_F(PeerConnectionEncodingsIntegrationTest,
+ SetParametersRejectsNonRemotelyNegotiatedCodecParameterVideo) {
+ rtc::scoped_refptr<PeerConnectionTestWrapper> local_pc_wrapper = CreatePc();
+ rtc::scoped_refptr<PeerConnectionTestWrapper> remote_pc_wrapper = CreatePc();
+ ExchangeIceCandidates(local_pc_wrapper, remote_pc_wrapper);
+
+ absl::optional<webrtc::RtpCodecCapability> vp8 =
+ local_pc_wrapper->FindFirstSendCodecWithName(cricket::MEDIA_TYPE_VIDEO,
+ "vp8");
+ ASSERT_TRUE(vp8);
+
+ std::vector<webrtc::RtpCodecCapability> not_vp8_codecs =
+ local_pc_wrapper->pc_factory()
+ ->GetRtpSenderCapabilities(cricket::MEDIA_TYPE_VIDEO)
+ .codecs;
+ not_vp8_codecs.erase(
+ std::remove_if(not_vp8_codecs.begin(), not_vp8_codecs.end(),
+ [&](const auto& codec) {
+ return absl::EqualsIgnoreCase(codec.name, vp8->name);
+ }),
+ not_vp8_codecs.end());
+
+ auto transceiver_or_error =
+ local_pc_wrapper->pc()->AddTransceiver(cricket::MEDIA_TYPE_VIDEO);
+ ASSERT_TRUE(transceiver_or_error.ok());
+ rtc::scoped_refptr<RtpTransceiverInterface> video_transceiver =
+ transceiver_or_error.MoveValue();
+
+ // Negotiation, create offer and apply it
+ std::unique_ptr<SessionDescriptionInterface> offer =
+ CreateOffer(local_pc_wrapper);
+ rtc::scoped_refptr<MockSetSessionDescriptionObserver> p1 =
+ SetLocalDescription(local_pc_wrapper, offer.get());
+ rtc::scoped_refptr<MockSetSessionDescriptionObserver> p2 =
+ SetRemoteDescription(remote_pc_wrapper, offer.get());
+ EXPECT_TRUE(Await({p1, p2}));
+
+ // Update the remote transceiver to reject VP8
+ std::vector<rtc::scoped_refptr<RtpTransceiverInterface>> remote_transceivers =
+ remote_pc_wrapper->pc()->GetTransceivers();
+ ASSERT_TRUE(!remote_transceivers.empty());
+ rtc::scoped_refptr<RtpTransceiverInterface> remote_video_transceiver =
+ remote_transceivers[0];
+ ASSERT_TRUE(
+ remote_video_transceiver->SetCodecPreferences(not_vp8_codecs).ok());
+
+ // Create answer and apply it
+ std::unique_ptr<SessionDescriptionInterface> answer =
+ CreateAnswer(remote_pc_wrapper);
+ p1 = SetLocalDescription(remote_pc_wrapper, answer.get());
+ p2 = SetRemoteDescription(local_pc_wrapper, answer.get());
+ EXPECT_TRUE(Await({p1, p2}));
+
+ local_pc_wrapper->WaitForConnection();
+ remote_pc_wrapper->WaitForConnection();
+
+ webrtc::RtpParameters parameters =
+ video_transceiver->sender()->GetParameters();
+ parameters.encodings[0].codec = vp8;
+ RTCError error = video_transceiver->sender()->SetParameters(parameters);
+ EXPECT_EQ(error.type(), RTCErrorType::INVALID_MODIFICATION);
+}
+
+TEST_F(PeerConnectionEncodingsIntegrationTest,
+ EncodingParametersCodecRemovedAfterNegotiationAudio) {
+ rtc::scoped_refptr<PeerConnectionTestWrapper> local_pc_wrapper = CreatePc();
+ rtc::scoped_refptr<PeerConnectionTestWrapper> remote_pc_wrapper = CreatePc();
+ ExchangeIceCandidates(local_pc_wrapper, remote_pc_wrapper);
+
+ absl::optional<webrtc::RtpCodecCapability> opus =
+ local_pc_wrapper->FindFirstSendCodecWithName(cricket::MEDIA_TYPE_AUDIO,
+ "opus");
+ ASSERT_TRUE(opus);
+
+ std::vector<webrtc::RtpCodecCapability> not_opus_codecs =
+ local_pc_wrapper->pc_factory()
+ ->GetRtpSenderCapabilities(cricket::MEDIA_TYPE_AUDIO)
+ .codecs;
+ not_opus_codecs.erase(
+ std::remove_if(not_opus_codecs.begin(), not_opus_codecs.end(),
+ [&](const auto& codec) {
+ return absl::EqualsIgnoreCase(codec.name, opus->name);
+ }),
+ not_opus_codecs.end());
+
+ webrtc::RtpTransceiverInit init;
+ init.direction = webrtc::RtpTransceiverDirection::kSendOnly;
+ webrtc::RtpEncodingParameters encoding_parameters;
+ encoding_parameters.codec = opus;
+ init.send_encodings.push_back(encoding_parameters);
+
+ auto transceiver_or_error =
+ local_pc_wrapper->pc()->AddTransceiver(cricket::MEDIA_TYPE_AUDIO, init);
+ ASSERT_TRUE(transceiver_or_error.ok());
+ rtc::scoped_refptr<RtpTransceiverInterface> audio_transceiver =
+ transceiver_or_error.MoveValue();
+
+ NegotiateWithSimulcastTweaks(local_pc_wrapper, remote_pc_wrapper);
+ local_pc_wrapper->WaitForConnection();
+ remote_pc_wrapper->WaitForConnection();
+
+ webrtc::RtpParameters parameters =
+ audio_transceiver->sender()->GetParameters();
+ EXPECT_EQ(parameters.encodings[0].codec, opus);
+
+ ASSERT_TRUE(audio_transceiver->SetCodecPreferences(not_opus_codecs).ok());
+ NegotiateWithSimulcastTweaks(local_pc_wrapper, remote_pc_wrapper);
+
+ parameters = audio_transceiver->sender()->GetParameters();
+ EXPECT_FALSE(parameters.encodings[0].codec);
+}
+
+TEST_F(PeerConnectionEncodingsIntegrationTest,
+ EncodingParametersRedEnabledBeforeNegotiationAudio) {
+ rtc::scoped_refptr<PeerConnectionTestWrapper> local_pc_wrapper = CreatePc();
+ rtc::scoped_refptr<PeerConnectionTestWrapper> remote_pc_wrapper = CreatePc();
+ ExchangeIceCandidates(local_pc_wrapper, remote_pc_wrapper);
+
+ std::vector<webrtc::RtpCodecCapability> send_codecs =
+ local_pc_wrapper->pc_factory()
+ ->GetRtpSenderCapabilities(cricket::MEDIA_TYPE_AUDIO)
+ .codecs;
+
+ absl::optional<webrtc::RtpCodecCapability> opus =
+ local_pc_wrapper->FindFirstSendCodecWithName(cricket::MEDIA_TYPE_AUDIO,
+ "opus");
+ ASSERT_TRUE(opus);
+
+ absl::optional<webrtc::RtpCodecCapability> red =
+ local_pc_wrapper->FindFirstSendCodecWithName(cricket::MEDIA_TYPE_AUDIO,
+ "red");
+ ASSERT_TRUE(red);
+
+ webrtc::RtpTransceiverInit init;
+ init.direction = webrtc::RtpTransceiverDirection::kSendOnly;
+ webrtc::RtpEncodingParameters encoding_parameters;
+ encoding_parameters.codec = opus;
+ init.send_encodings.push_back(encoding_parameters);
+
+ auto transceiver_or_error =
+ local_pc_wrapper->pc()->AddTransceiver(cricket::MEDIA_TYPE_AUDIO, init);
+ ASSERT_TRUE(transceiver_or_error.ok());
+ rtc::scoped_refptr<RtpTransceiverInterface> audio_transceiver =
+ transceiver_or_error.MoveValue();
+
+ // Preferring RED over Opus should enable RED with Opus encoding.
+ send_codecs[0] = red.value();
+ send_codecs[1] = opus.value();
+
+ ASSERT_TRUE(audio_transceiver->SetCodecPreferences(send_codecs).ok());
+ NegotiateWithSimulcastTweaks(local_pc_wrapper, remote_pc_wrapper);
+ local_pc_wrapper->WaitForConnection();
+ remote_pc_wrapper->WaitForConnection();
+
+ webrtc::RtpParameters parameters =
+ audio_transceiver->sender()->GetParameters();
+ EXPECT_EQ(parameters.encodings[0].codec, opus);
+ EXPECT_EQ(parameters.codecs[0].payload_type, red->preferred_payload_type);
+ EXPECT_EQ(parameters.codecs[0].name, red->name);
+
+ // Check that it's possible to switch back to Opus without RED.
+ send_codecs[0] = opus.value();
+ send_codecs[1] = red.value();
+
+ ASSERT_TRUE(audio_transceiver->SetCodecPreferences(send_codecs).ok());
+ NegotiateWithSimulcastTweaks(local_pc_wrapper, remote_pc_wrapper);
+
+ parameters = audio_transceiver->sender()->GetParameters();
+ EXPECT_EQ(parameters.encodings[0].codec, opus);
+ EXPECT_EQ(parameters.codecs[0].payload_type, opus->preferred_payload_type);
+ EXPECT_EQ(parameters.codecs[0].name, opus->name);
+}
+
+TEST_F(PeerConnectionEncodingsIntegrationTest,
+ SetParametersRejectsScalabilityModeForSelectedCodec) {
+ rtc::scoped_refptr<PeerConnectionTestWrapper> local_pc_wrapper = CreatePc();
+
+ absl::optional<webrtc::RtpCodecCapability> vp8 =
+ local_pc_wrapper->FindFirstSendCodecWithName(cricket::MEDIA_TYPE_VIDEO,
+ "vp8");
+ ASSERT_TRUE(vp8);
+
+ webrtc::RtpTransceiverInit init;
+ init.direction = webrtc::RtpTransceiverDirection::kSendOnly;
+ webrtc::RtpEncodingParameters encoding_parameters;
+ encoding_parameters.codec = vp8;
+ encoding_parameters.scalability_mode = "L1T3";
+ init.send_encodings.push_back(encoding_parameters);
+
+ auto transceiver_or_error =
+ local_pc_wrapper->pc()->AddTransceiver(cricket::MEDIA_TYPE_VIDEO, init);
+ ASSERT_TRUE(transceiver_or_error.ok());
+ rtc::scoped_refptr<RtpTransceiverInterface> video_transceiver =
+ transceiver_or_error.MoveValue();
+
+ webrtc::RtpParameters parameters =
+ video_transceiver->sender()->GetParameters();
+ parameters.encodings[0].scalability_mode = "L3T3";
+ RTCError error = video_transceiver->sender()->SetParameters(parameters);
+ EXPECT_EQ(error.type(), RTCErrorType::INVALID_MODIFICATION);
+}
+
+TEST_F(PeerConnectionEncodingsIntegrationTest,
+ EncodingParametersCodecRemovedByNegotiationVideo) {
+ rtc::scoped_refptr<PeerConnectionTestWrapper> local_pc_wrapper = CreatePc();
+ rtc::scoped_refptr<PeerConnectionTestWrapper> remote_pc_wrapper = CreatePc();
+ ExchangeIceCandidates(local_pc_wrapper, remote_pc_wrapper);
+
+ absl::optional<webrtc::RtpCodecCapability> vp8 =
+ local_pc_wrapper->FindFirstSendCodecWithName(cricket::MEDIA_TYPE_VIDEO,
+ "vp8");
+ ASSERT_TRUE(vp8);
+
+ std::vector<webrtc::RtpCodecCapability> not_vp8_codecs =
+ local_pc_wrapper->pc_factory()
+ ->GetRtpSenderCapabilities(cricket::MEDIA_TYPE_VIDEO)
+ .codecs;
+ not_vp8_codecs.erase(
+ std::remove_if(not_vp8_codecs.begin(), not_vp8_codecs.end(),
+ [&](const auto& codec) {
+ return absl::EqualsIgnoreCase(codec.name, vp8->name);
+ }),
+ not_vp8_codecs.end());
+
+ webrtc::RtpTransceiverInit init;
+ init.direction = webrtc::RtpTransceiverDirection::kSendOnly;
+ webrtc::RtpEncodingParameters encoding_parameters;
+ encoding_parameters.rid = "h";
+ encoding_parameters.codec = vp8;
+ encoding_parameters.scale_resolution_down_by = 2;
+ init.send_encodings.push_back(encoding_parameters);
+ encoding_parameters.rid = "f";
+ encoding_parameters.scale_resolution_down_by = 1;
+ init.send_encodings.push_back(encoding_parameters);
+
+ auto transceiver_or_error =
+ local_pc_wrapper->pc()->AddTransceiver(cricket::MEDIA_TYPE_VIDEO, init);
+ ASSERT_TRUE(transceiver_or_error.ok());
+ rtc::scoped_refptr<RtpTransceiverInterface> video_transceiver =
+ transceiver_or_error.MoveValue();
+
+ NegotiateWithSimulcastTweaks(local_pc_wrapper, remote_pc_wrapper);
+ local_pc_wrapper->WaitForConnection();
+ remote_pc_wrapper->WaitForConnection();
+
+ webrtc::RtpParameters parameters =
+ video_transceiver->sender()->GetParameters();
+ ASSERT_EQ(parameters.encodings.size(), 2u);
+ EXPECT_EQ(parameters.encodings[0].codec, vp8);
+ EXPECT_EQ(parameters.encodings[1].codec, vp8);
+
+ ASSERT_TRUE(video_transceiver->SetCodecPreferences(not_vp8_codecs).ok());
+ NegotiateWithSimulcastTweaks(local_pc_wrapper, remote_pc_wrapper);
+
+ parameters = video_transceiver->sender()->GetParameters();
+ EXPECT_FALSE(parameters.encodings[0].codec);
+ EXPECT_FALSE(parameters.encodings[1].codec);
+}
+
+TEST_F(PeerConnectionEncodingsIntegrationTest,
+ AddTransceiverRejectsMixedCodecSimulcast) {
+ // Mixed Codec Simulcast is not yet supported, so we ensure that we reject
+ // such parameters.
+ rtc::scoped_refptr<PeerConnectionTestWrapper> local_pc_wrapper = CreatePc();
+ rtc::scoped_refptr<PeerConnectionTestWrapper> remote_pc_wrapper = CreatePc();
+ ExchangeIceCandidates(local_pc_wrapper, remote_pc_wrapper);
+
+ absl::optional<webrtc::RtpCodecCapability> vp8 =
+ local_pc_wrapper->FindFirstSendCodecWithName(cricket::MEDIA_TYPE_VIDEO,
+ "vp8");
+ ASSERT_TRUE(vp8);
+ absl::optional<webrtc::RtpCodecCapability> vp9 =
+ local_pc_wrapper->FindFirstSendCodecWithName(cricket::MEDIA_TYPE_VIDEO,
+ "vp9");
+
+ webrtc::RtpTransceiverInit init;
+ init.direction = webrtc::RtpTransceiverDirection::kSendOnly;
+ webrtc::RtpEncodingParameters encoding_parameters;
+ encoding_parameters.rid = "h";
+ encoding_parameters.codec = vp8;
+ encoding_parameters.scale_resolution_down_by = 2;
+ init.send_encodings.push_back(encoding_parameters);
+ encoding_parameters.rid = "f";
+ encoding_parameters.codec = vp9;
+ encoding_parameters.scale_resolution_down_by = 1;
+ init.send_encodings.push_back(encoding_parameters);
+
+ auto transceiver_or_error =
+ local_pc_wrapper->pc()->AddTransceiver(cricket::MEDIA_TYPE_VIDEO, init);
+ ASSERT_FALSE(transceiver_or_error.ok());
+ EXPECT_EQ(transceiver_or_error.error().type(),
+ RTCErrorType::UNSUPPORTED_OPERATION);
+}
+
+// Tests that use the standard path (specifying both `scalability_mode` and
+// `scale_resolution_down_by`) should pass for all codecs.
+class PeerConnectionEncodingsIntegrationParameterizedTest
+ : public PeerConnectionEncodingsIntegrationTest,
+ public ::testing::WithParamInterface<std::string> {
+ public:
+ PeerConnectionEncodingsIntegrationParameterizedTest()
+ : codec_name_(GetParam()), mime_type_("video/" + codec_name_) {}
+
+ // Work-around for the fact that whether or not AV1 is supported is not known
+ // at compile-time so we have to skip tests early if missing.
+ // TODO(https://crbug.com/webrtc/15011): Increase availability of AV1 or make
+ // it possible to check support at compile-time.
+ bool SkipTestDueToAv1Missing(
+ rtc::scoped_refptr<PeerConnectionTestWrapper> local_pc_wrapper) {
+ if (codec_name_ == "AV1" &&
+ !HasSenderVideoCodecCapability(local_pc_wrapper, "AV1")) {
+ RTC_LOG(LS_WARNING) << "\n***\nAV1 is not available, skipping test.\n***";
+ return true;
+ }
+ return false;
+ }
+
+ protected:
+ const std::string codec_name_; // E.g. "VP9"
+ const std::string mime_type_; // E.g. "video/VP9"
+};
+
+TEST_P(PeerConnectionEncodingsIntegrationParameterizedTest, AllLayersInactive) {
+ rtc::scoped_refptr<PeerConnectionTestWrapper> local_pc_wrapper = CreatePc();
+ if (SkipTestDueToAv1Missing(local_pc_wrapper)) {
+ return;
+ }
+ rtc::scoped_refptr<PeerConnectionTestWrapper> remote_pc_wrapper = CreatePc();
+ ExchangeIceCandidates(local_pc_wrapper, remote_pc_wrapper);
+
+ std::vector<cricket::SimulcastLayer> layers =
+ CreateLayers({"f", "h", "q"}, /*active=*/true);
+ rtc::scoped_refptr<RtpTransceiverInterface> transceiver =
+ AddTransceiverWithSimulcastLayers(local_pc_wrapper, remote_pc_wrapper,
+ layers);
+ std::vector<RtpCodecCapability> codecs =
+ GetCapabilitiesAndRestrictToCodec(local_pc_wrapper, codec_name_);
+ transceiver->SetCodecPreferences(codecs);
+
+ // Standard mode and all layers inactive.
+ rtc::scoped_refptr<RtpSenderInterface> sender = transceiver->sender();
+ RtpParameters parameters = sender->GetParameters();
+ ASSERT_THAT(parameters.encodings, SizeIs(3));
+ parameters.encodings[0].scalability_mode = "L1T3";
+ parameters.encodings[0].scale_resolution_down_by = 1;
+ parameters.encodings[0].active = false;
+ parameters.encodings[1].active = false;
+ parameters.encodings[2].active = false;
+ sender->SetParameters(parameters);
+
+ NegotiateWithSimulcastTweaks(local_pc_wrapper, remote_pc_wrapper);
+ local_pc_wrapper->WaitForConnection();
+ remote_pc_wrapper->WaitForConnection();
+
+ // Ensure no media is flowing (1 second should be enough).
+ rtc::Thread::Current()->SleepMs(1000);
+ rtc::scoped_refptr<const RTCStatsReport> report = GetStats(local_pc_wrapper);
+ std::vector<const RTCOutboundRtpStreamStats*> outbound_rtps =
+ report->GetStatsOfType<RTCOutboundRtpStreamStats>();
+ ASSERT_THAT(outbound_rtps, SizeIs(3u));
+ EXPECT_EQ(*outbound_rtps[0]->bytes_sent, 0u);
+ EXPECT_EQ(*outbound_rtps[1]->bytes_sent, 0u);
+ EXPECT_EQ(*outbound_rtps[2]->bytes_sent, 0u);
+}
+
+TEST_P(PeerConnectionEncodingsIntegrationParameterizedTest, Simulcast) {
+ rtc::scoped_refptr<PeerConnectionTestWrapper> local_pc_wrapper = CreatePc();
+ if (SkipTestDueToAv1Missing(local_pc_wrapper)) {
+ return;
+ }
+ rtc::scoped_refptr<PeerConnectionTestWrapper> remote_pc_wrapper = CreatePc();
+ ExchangeIceCandidates(local_pc_wrapper, remote_pc_wrapper);
+
+ std::vector<cricket::SimulcastLayer> layers =
+ CreateLayers({"f", "h", "q"}, /*active=*/true);
+ rtc::scoped_refptr<RtpTransceiverInterface> transceiver =
+ AddTransceiverWithSimulcastLayers(local_pc_wrapper, remote_pc_wrapper,
+ layers);
+ std::vector<RtpCodecCapability> codecs =
+ GetCapabilitiesAndRestrictToCodec(local_pc_wrapper, codec_name_);
+ transceiver->SetCodecPreferences(codecs);
+
+ rtc::scoped_refptr<RtpSenderInterface> sender = transceiver->sender();
+ RtpParameters parameters = sender->GetParameters();
+ ASSERT_THAT(parameters.encodings, SizeIs(3));
+ parameters.encodings[0].scalability_mode = "L1T3";
+ parameters.encodings[0].scale_resolution_down_by = 4;
+ parameters.encodings[1].scalability_mode = "L1T3";
+ parameters.encodings[1].scale_resolution_down_by = 2;
+ parameters.encodings[2].scalability_mode = "L1T3";
+ parameters.encodings[2].scale_resolution_down_by = 1;
+ sender->SetParameters(parameters);
+
+ NegotiateWithSimulcastTweaks(local_pc_wrapper, remote_pc_wrapper);
+ local_pc_wrapper->WaitForConnection();
+ remote_pc_wrapper->WaitForConnection();
+
+ // GetParameters() does not report any fallback.
+ parameters = sender->GetParameters();
+ ASSERT_THAT(parameters.encodings, SizeIs(3));
+ EXPECT_THAT(parameters.encodings[0].scalability_mode,
+ Optional(std::string("L1T3")));
+ EXPECT_THAT(parameters.encodings[1].scalability_mode,
+ Optional(std::string("L1T3")));
+ EXPECT_THAT(parameters.encodings[2].scalability_mode,
+ Optional(std::string("L1T3")));
+
+ // Wait until media is flowing on all three layers.
+ // Ramp up time is needed before all three layers are sending.
+ ASSERT_TRUE_WAIT(HasOutboundRtpBytesSent(local_pc_wrapper, 3u),
+ kLongTimeoutForRampingUp.ms());
+ EXPECT_TRUE(OutboundRtpResolutionsAreLessThanOrEqualToExpectations(
+ local_pc_wrapper, {{"f", 320, 180}, {"h", 640, 360}, {"q", 1280, 720}}));
+ // Verify codec and scalability mode.
+ rtc::scoped_refptr<const RTCStatsReport> report = GetStats(local_pc_wrapper);
+ std::vector<const RTCOutboundRtpStreamStats*> outbound_rtps =
+ report->GetStatsOfType<RTCOutboundRtpStreamStats>();
+ ASSERT_THAT(outbound_rtps, SizeIs(3u));
+ EXPECT_THAT(GetCurrentCodecMimeType(report, *outbound_rtps[0]),
+ StrCaseEq(mime_type_));
+ EXPECT_THAT(GetCurrentCodecMimeType(report, *outbound_rtps[1]),
+ StrCaseEq(mime_type_));
+ EXPECT_THAT(GetCurrentCodecMimeType(report, *outbound_rtps[2]),
+ StrCaseEq(mime_type_));
+ EXPECT_THAT(*outbound_rtps[0]->scalability_mode, StrEq("L1T3"));
+ EXPECT_THAT(*outbound_rtps[1]->scalability_mode, StrEq("L1T3"));
+ EXPECT_THAT(*outbound_rtps[2]->scalability_mode, StrEq("L1T3"));
+}
+
+INSTANTIATE_TEST_SUITE_P(StandardPath,
+ PeerConnectionEncodingsIntegrationParameterizedTest,
+ ::testing::Values("VP8",
+ "VP9",
+#if defined(WEBRTC_USE_H264)
+ "H264",
+#endif // defined(WEBRTC_USE_H264)
+ "AV1"),
+ StringParamToString());
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/peer_connection_end_to_end_unittest.cc b/third_party/libwebrtc/pc/peer_connection_end_to_end_unittest.cc
new file mode 100644
index 0000000000..a21d455ec5
--- /dev/null
+++ b/third_party/libwebrtc/pc/peer_connection_end_to_end_unittest.cc
@@ -0,0 +1,767 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdint.h>
+
+#include <cstddef>
+#include <limits>
+#include <memory>
+#include <string>
+#include <type_traits>
+#include <utility>
+#include <vector>
+
+#include "absl/strings/match.h"
+#include "absl/types/optional.h"
+#include "api/audio_codecs/L16/audio_decoder_L16.h"
+#include "api/audio_codecs/L16/audio_encoder_L16.h"
+#include "api/audio_codecs/audio_codec_pair_id.h"
+#include "api/audio_codecs/audio_decoder.h"
+#include "api/audio_codecs/audio_decoder_factory.h"
+#include "api/audio_codecs/audio_decoder_factory_template.h"
+#include "api/audio_codecs/audio_encoder.h"
+#include "api/audio_codecs/audio_encoder_factory.h"
+#include "api/audio_codecs/audio_encoder_factory_template.h"
+#include "api/audio_codecs/audio_format.h"
+#include "api/audio_codecs/opus_audio_decoder_factory.h"
+#include "api/audio_codecs/opus_audio_encoder_factory.h"
+#include "api/audio_options.h"
+#include "api/data_channel_interface.h"
+#include "api/media_stream_interface.h"
+#include "api/peer_connection_interface.h"
+#include "api/rtc_error.h"
+#include "api/scoped_refptr.h"
+#include "media/sctp/sctp_transport_internal.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/copy_on_write_buffer.h"
+#include "rtc_base/gunit.h"
+#include "rtc_base/physical_socket_server.h"
+#include "rtc_base/third_party/sigslot/sigslot.h"
+#include "rtc_base/thread.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+
+#ifdef WEBRTC_ANDROID
+#include "pc/test/android_test_initializer.h"
+#endif
+#include "pc/test/peer_connection_test_wrapper.h"
+// Notice that mockpeerconnectionobservers.h must be included after the above!
+#include "pc/test/mock_peer_connection_observers.h"
+#include "test/mock_audio_decoder.h"
+#include "test/mock_audio_decoder_factory.h"
+#include "test/mock_audio_encoder_factory.h"
+
+using ::testing::_;
+using ::testing::AtLeast;
+using ::testing::Invoke;
+using ::testing::StrictMock;
+using ::testing::Values;
+
+using webrtc::DataChannelInterface;
+using webrtc::MediaStreamInterface;
+using webrtc::PeerConnectionInterface;
+using webrtc::SdpSemantics;
+
+namespace {
+
+const int kMaxWait = 25000;
+
+} // namespace
+
+class PeerConnectionEndToEndBaseTest : public sigslot::has_slots<>,
+ public ::testing::Test {
+ public:
+ typedef std::vector<rtc::scoped_refptr<DataChannelInterface>> DataChannelList;
+
+ explicit PeerConnectionEndToEndBaseTest(SdpSemantics sdp_semantics)
+ : network_thread_(std::make_unique<rtc::Thread>(&pss_)),
+ worker_thread_(rtc::Thread::Create()) {
+ RTC_CHECK(network_thread_->Start());
+ RTC_CHECK(worker_thread_->Start());
+ caller_ = rtc::make_ref_counted<PeerConnectionTestWrapper>(
+ "caller", &pss_, network_thread_.get(), worker_thread_.get());
+ callee_ = rtc::make_ref_counted<PeerConnectionTestWrapper>(
+ "callee", &pss_, network_thread_.get(), worker_thread_.get());
+ webrtc::PeerConnectionInterface::IceServer ice_server;
+ ice_server.uri = "stun:stun.l.google.com:19302";
+ config_.servers.push_back(ice_server);
+ config_.sdp_semantics = sdp_semantics;
+
+#ifdef WEBRTC_ANDROID
+ webrtc::InitializeAndroidObjects();
+#endif
+ }
+
+ void CreatePcs(
+ rtc::scoped_refptr<webrtc::AudioEncoderFactory> audio_encoder_factory1,
+ rtc::scoped_refptr<webrtc::AudioDecoderFactory> audio_decoder_factory1,
+ rtc::scoped_refptr<webrtc::AudioEncoderFactory> audio_encoder_factory2,
+ rtc::scoped_refptr<webrtc::AudioDecoderFactory> audio_decoder_factory2) {
+ EXPECT_TRUE(caller_->CreatePc(config_, audio_encoder_factory1,
+ audio_decoder_factory1));
+ EXPECT_TRUE(callee_->CreatePc(config_, audio_encoder_factory2,
+ audio_decoder_factory2));
+ PeerConnectionTestWrapper::Connect(caller_.get(), callee_.get());
+
+ caller_->SignalOnDataChannel.connect(
+ this, &PeerConnectionEndToEndBaseTest::OnCallerAddedDataChanel);
+ callee_->SignalOnDataChannel.connect(
+ this, &PeerConnectionEndToEndBaseTest::OnCalleeAddedDataChannel);
+ }
+
+ void CreatePcs(
+ rtc::scoped_refptr<webrtc::AudioEncoderFactory> audio_encoder_factory,
+ rtc::scoped_refptr<webrtc::AudioDecoderFactory> audio_decoder_factory) {
+ CreatePcs(audio_encoder_factory, audio_decoder_factory,
+ audio_encoder_factory, audio_decoder_factory);
+ }
+
+ void GetAndAddUserMedia() {
+ cricket::AudioOptions audio_options;
+ GetAndAddUserMedia(true, audio_options, true);
+ }
+
+ void GetAndAddUserMedia(bool audio,
+ const cricket::AudioOptions& audio_options,
+ bool video) {
+ caller_->GetAndAddUserMedia(audio, audio_options, video);
+ callee_->GetAndAddUserMedia(audio, audio_options, video);
+ }
+
+ void Negotiate() {
+ caller_->CreateOffer(
+ webrtc::PeerConnectionInterface::RTCOfferAnswerOptions());
+ }
+
+ void WaitForCallEstablished() {
+ caller_->WaitForCallEstablished();
+ callee_->WaitForCallEstablished();
+ }
+
+ void WaitForConnection() {
+ caller_->WaitForConnection();
+ callee_->WaitForConnection();
+ }
+
+ void OnCallerAddedDataChanel(DataChannelInterface* dc) {
+ caller_signaled_data_channels_.push_back(
+ rtc::scoped_refptr<DataChannelInterface>(dc));
+ }
+
+ void OnCalleeAddedDataChannel(DataChannelInterface* dc) {
+ callee_signaled_data_channels_.push_back(
+ rtc::scoped_refptr<DataChannelInterface>(dc));
+ }
+
+ // Tests that `dc1` and `dc2` can send to and receive from each other.
+ void TestDataChannelSendAndReceive(DataChannelInterface* dc1,
+ DataChannelInterface* dc2,
+ size_t size = 6) {
+ std::unique_ptr<webrtc::MockDataChannelObserver> dc1_observer(
+ new webrtc::MockDataChannelObserver(dc1));
+
+ std::unique_ptr<webrtc::MockDataChannelObserver> dc2_observer(
+ new webrtc::MockDataChannelObserver(dc2));
+
+ static const std::string kDummyData =
+ "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
+ webrtc::DataBuffer buffer("");
+
+ size_t sizeLeft = size;
+ while (sizeLeft > 0) {
+ size_t chunkSize =
+ sizeLeft > kDummyData.length() ? kDummyData.length() : sizeLeft;
+ buffer.data.AppendData(kDummyData.data(), chunkSize);
+ sizeLeft -= chunkSize;
+ }
+
+ EXPECT_TRUE(dc1->Send(buffer));
+ EXPECT_EQ_WAIT(buffer.data,
+ rtc::CopyOnWriteBuffer(dc2_observer->last_message()),
+ kMaxWait);
+
+ EXPECT_TRUE(dc2->Send(buffer));
+ EXPECT_EQ_WAIT(buffer.data,
+ rtc::CopyOnWriteBuffer(dc1_observer->last_message()),
+ kMaxWait);
+
+ EXPECT_EQ(1U, dc1_observer->received_message_count());
+ EXPECT_EQ(size, dc1_observer->last_message().length());
+ EXPECT_EQ(1U, dc2_observer->received_message_count());
+ EXPECT_EQ(size, dc2_observer->last_message().length());
+ }
+
+ void WaitForDataChannelsToOpen(DataChannelInterface* local_dc,
+ const DataChannelList& remote_dc_list,
+ size_t remote_dc_index) {
+ EXPECT_EQ_WAIT(DataChannelInterface::kOpen, local_dc->state(), kMaxWait);
+
+ ASSERT_TRUE_WAIT(remote_dc_list.size() > remote_dc_index, kMaxWait);
+ EXPECT_EQ_WAIT(DataChannelInterface::kOpen,
+ remote_dc_list[remote_dc_index]->state(), kMaxWait);
+ EXPECT_EQ(local_dc->id(), remote_dc_list[remote_dc_index]->id());
+ }
+
+ void CloseDataChannels(DataChannelInterface* local_dc,
+ const DataChannelList& remote_dc_list,
+ size_t remote_dc_index) {
+ local_dc->Close();
+ EXPECT_EQ_WAIT(DataChannelInterface::kClosed, local_dc->state(), kMaxWait);
+ EXPECT_EQ_WAIT(DataChannelInterface::kClosed,
+ remote_dc_list[remote_dc_index]->state(), kMaxWait);
+ }
+
+ protected:
+ rtc::AutoThread main_thread_;
+ rtc::PhysicalSocketServer pss_;
+ std::unique_ptr<rtc::Thread> network_thread_;
+ std::unique_ptr<rtc::Thread> worker_thread_;
+ rtc::scoped_refptr<PeerConnectionTestWrapper> caller_;
+ rtc::scoped_refptr<PeerConnectionTestWrapper> callee_;
+ DataChannelList caller_signaled_data_channels_;
+ DataChannelList callee_signaled_data_channels_;
+ webrtc::PeerConnectionInterface::RTCConfiguration config_;
+};
+
+class PeerConnectionEndToEndTest
+ : public PeerConnectionEndToEndBaseTest,
+ public ::testing::WithParamInterface<SdpSemantics> {
+ protected:
+ PeerConnectionEndToEndTest() : PeerConnectionEndToEndBaseTest(GetParam()) {}
+};
+
+namespace {
+
+std::unique_ptr<webrtc::AudioDecoder> CreateForwardingMockDecoder(
+ std::unique_ptr<webrtc::AudioDecoder> real_decoder) {
+ class ForwardingMockDecoder : public StrictMock<webrtc::MockAudioDecoder> {
+ public:
+ explicit ForwardingMockDecoder(std::unique_ptr<AudioDecoder> decoder)
+ : decoder_(std::move(decoder)) {}
+
+ private:
+ std::unique_ptr<AudioDecoder> decoder_;
+ };
+
+ const auto dec = real_decoder.get(); // For lambda capturing.
+ auto mock_decoder =
+ std::make_unique<ForwardingMockDecoder>(std::move(real_decoder));
+ EXPECT_CALL(*mock_decoder, Channels())
+ .Times(AtLeast(1))
+ .WillRepeatedly(Invoke([dec] { return dec->Channels(); }));
+ EXPECT_CALL(*mock_decoder, DecodeInternal(_, _, _, _, _))
+ .Times(AtLeast(1))
+ .WillRepeatedly(
+ Invoke([dec](const uint8_t* encoded, size_t encoded_len,
+ int sample_rate_hz, int16_t* decoded,
+ webrtc::AudioDecoder::SpeechType* speech_type) {
+ return dec->Decode(encoded, encoded_len, sample_rate_hz,
+ std::numeric_limits<size_t>::max(), decoded,
+ speech_type);
+ }));
+ EXPECT_CALL(*mock_decoder, Die());
+ EXPECT_CALL(*mock_decoder, HasDecodePlc()).WillRepeatedly(Invoke([dec] {
+ return dec->HasDecodePlc();
+ }));
+ EXPECT_CALL(*mock_decoder, PacketDuration(_, _))
+ .Times(AtLeast(1))
+ .WillRepeatedly(Invoke([dec](const uint8_t* encoded, size_t encoded_len) {
+ return dec->PacketDuration(encoded, encoded_len);
+ }));
+ EXPECT_CALL(*mock_decoder, SampleRateHz())
+ .Times(AtLeast(1))
+ .WillRepeatedly(Invoke([dec] { return dec->SampleRateHz(); }));
+
+ return std::move(mock_decoder);
+}
+
+rtc::scoped_refptr<webrtc::AudioDecoderFactory>
+CreateForwardingMockDecoderFactory(
+ webrtc::AudioDecoderFactory* real_decoder_factory) {
+ rtc::scoped_refptr<webrtc::MockAudioDecoderFactory> mock_decoder_factory =
+ rtc::make_ref_counted<StrictMock<webrtc::MockAudioDecoderFactory>>();
+ EXPECT_CALL(*mock_decoder_factory, GetSupportedDecoders())
+ .Times(AtLeast(1))
+ .WillRepeatedly(Invoke([real_decoder_factory] {
+ return real_decoder_factory->GetSupportedDecoders();
+ }));
+ EXPECT_CALL(*mock_decoder_factory, IsSupportedDecoder(_))
+ .Times(AtLeast(1))
+ .WillRepeatedly(
+ Invoke([real_decoder_factory](const webrtc::SdpAudioFormat& format) {
+ return real_decoder_factory->IsSupportedDecoder(format);
+ }));
+ EXPECT_CALL(*mock_decoder_factory, MakeAudioDecoderMock(_, _, _))
+ .Times(AtLeast(2))
+ .WillRepeatedly(
+ Invoke([real_decoder_factory](
+ const webrtc::SdpAudioFormat& format,
+ absl::optional<webrtc::AudioCodecPairId> codec_pair_id,
+ std::unique_ptr<webrtc::AudioDecoder>* return_value) {
+ auto real_decoder =
+ real_decoder_factory->MakeAudioDecoder(format, codec_pair_id);
+ *return_value =
+ real_decoder
+ ? CreateForwardingMockDecoder(std::move(real_decoder))
+ : nullptr;
+ }));
+ return mock_decoder_factory;
+}
+
+struct AudioEncoderUnicornSparklesRainbow {
+ using Config = webrtc::AudioEncoderL16::Config;
+ static absl::optional<Config> SdpToConfig(webrtc::SdpAudioFormat format) {
+ if (absl::EqualsIgnoreCase(format.name, "UnicornSparklesRainbow")) {
+ const webrtc::SdpAudioFormat::Parameters expected_params = {
+ {"num_horns", "1"}};
+ EXPECT_EQ(expected_params, format.parameters);
+ format.parameters.clear();
+ format.name = "L16";
+ return webrtc::AudioEncoderL16::SdpToConfig(format);
+ } else {
+ return absl::nullopt;
+ }
+ }
+ static void AppendSupportedEncoders(
+ std::vector<webrtc::AudioCodecSpec>* specs) {
+ std::vector<webrtc::AudioCodecSpec> new_specs;
+ webrtc::AudioEncoderL16::AppendSupportedEncoders(&new_specs);
+ for (auto& spec : new_specs) {
+ spec.format.name = "UnicornSparklesRainbow";
+ EXPECT_TRUE(spec.format.parameters.empty());
+ spec.format.parameters.emplace("num_horns", "1");
+ specs->push_back(spec);
+ }
+ }
+ static webrtc::AudioCodecInfo QueryAudioEncoder(const Config& config) {
+ return webrtc::AudioEncoderL16::QueryAudioEncoder(config);
+ }
+ static std::unique_ptr<webrtc::AudioEncoder> MakeAudioEncoder(
+ const Config& config,
+ int payload_type,
+ absl::optional<webrtc::AudioCodecPairId> codec_pair_id = absl::nullopt) {
+ return webrtc::AudioEncoderL16::MakeAudioEncoder(config, payload_type,
+ codec_pair_id);
+ }
+};
+
+struct AudioDecoderUnicornSparklesRainbow {
+ using Config = webrtc::AudioDecoderL16::Config;
+ static absl::optional<Config> SdpToConfig(webrtc::SdpAudioFormat format) {
+ if (absl::EqualsIgnoreCase(format.name, "UnicornSparklesRainbow")) {
+ const webrtc::SdpAudioFormat::Parameters expected_params = {
+ {"num_horns", "1"}};
+ EXPECT_EQ(expected_params, format.parameters);
+ format.parameters.clear();
+ format.name = "L16";
+ return webrtc::AudioDecoderL16::SdpToConfig(format);
+ } else {
+ return absl::nullopt;
+ }
+ }
+ static void AppendSupportedDecoders(
+ std::vector<webrtc::AudioCodecSpec>* specs) {
+ std::vector<webrtc::AudioCodecSpec> new_specs;
+ webrtc::AudioDecoderL16::AppendSupportedDecoders(&new_specs);
+ for (auto& spec : new_specs) {
+ spec.format.name = "UnicornSparklesRainbow";
+ EXPECT_TRUE(spec.format.parameters.empty());
+ spec.format.parameters.emplace("num_horns", "1");
+ specs->push_back(spec);
+ }
+ }
+ static std::unique_ptr<webrtc::AudioDecoder> MakeAudioDecoder(
+ const Config& config,
+ absl::optional<webrtc::AudioCodecPairId> codec_pair_id = absl::nullopt) {
+ return webrtc::AudioDecoderL16::MakeAudioDecoder(config, codec_pair_id);
+ }
+};
+
+} // namespace
+
+TEST_P(PeerConnectionEndToEndTest, Call) {
+ rtc::scoped_refptr<webrtc::AudioDecoderFactory> real_decoder_factory =
+ webrtc::CreateOpusAudioDecoderFactory();
+ CreatePcs(webrtc::CreateOpusAudioEncoderFactory(),
+ CreateForwardingMockDecoderFactory(real_decoder_factory.get()));
+ GetAndAddUserMedia();
+ Negotiate();
+ WaitForCallEstablished();
+}
+
+#if defined(IS_FUCHSIA)
+TEST_P(PeerConnectionEndToEndTest, CallWithSdesKeyNegotiation) {
+ config_.enable_dtls_srtp = false;
+ CreatePcs(webrtc::CreateOpusAudioEncoderFactory(),
+ webrtc::CreateOpusAudioDecoderFactory());
+ GetAndAddUserMedia();
+ Negotiate();
+ WaitForCallEstablished();
+}
+#endif
+
+TEST_P(PeerConnectionEndToEndTest, CallWithCustomCodec) {
+ class IdLoggingAudioEncoderFactory : public webrtc::AudioEncoderFactory {
+ public:
+ IdLoggingAudioEncoderFactory(
+ rtc::scoped_refptr<AudioEncoderFactory> real_factory,
+ std::vector<webrtc::AudioCodecPairId>* const codec_ids)
+ : fact_(real_factory), codec_ids_(codec_ids) {}
+ std::vector<webrtc::AudioCodecSpec> GetSupportedEncoders() override {
+ return fact_->GetSupportedEncoders();
+ }
+ absl::optional<webrtc::AudioCodecInfo> QueryAudioEncoder(
+ const webrtc::SdpAudioFormat& format) override {
+ return fact_->QueryAudioEncoder(format);
+ }
+ std::unique_ptr<webrtc::AudioEncoder> MakeAudioEncoder(
+ int payload_type,
+ const webrtc::SdpAudioFormat& format,
+ absl::optional<webrtc::AudioCodecPairId> codec_pair_id) override {
+ EXPECT_TRUE(codec_pair_id.has_value());
+ codec_ids_->push_back(*codec_pair_id);
+ return fact_->MakeAudioEncoder(payload_type, format, codec_pair_id);
+ }
+
+ private:
+ const rtc::scoped_refptr<webrtc::AudioEncoderFactory> fact_;
+ std::vector<webrtc::AudioCodecPairId>* const codec_ids_;
+ };
+
+ class IdLoggingAudioDecoderFactory : public webrtc::AudioDecoderFactory {
+ public:
+ IdLoggingAudioDecoderFactory(
+ rtc::scoped_refptr<AudioDecoderFactory> real_factory,
+ std::vector<webrtc::AudioCodecPairId>* const codec_ids)
+ : fact_(real_factory), codec_ids_(codec_ids) {}
+ std::vector<webrtc::AudioCodecSpec> GetSupportedDecoders() override {
+ return fact_->GetSupportedDecoders();
+ }
+ bool IsSupportedDecoder(const webrtc::SdpAudioFormat& format) override {
+ return fact_->IsSupportedDecoder(format);
+ }
+ std::unique_ptr<webrtc::AudioDecoder> MakeAudioDecoder(
+ const webrtc::SdpAudioFormat& format,
+ absl::optional<webrtc::AudioCodecPairId> codec_pair_id) override {
+ EXPECT_TRUE(codec_pair_id.has_value());
+ codec_ids_->push_back(*codec_pair_id);
+ return fact_->MakeAudioDecoder(format, codec_pair_id);
+ }
+
+ private:
+ const rtc::scoped_refptr<webrtc::AudioDecoderFactory> fact_;
+ std::vector<webrtc::AudioCodecPairId>* const codec_ids_;
+ };
+
+ std::vector<webrtc::AudioCodecPairId> encoder_id1, encoder_id2, decoder_id1,
+ decoder_id2;
+ CreatePcs(rtc::make_ref_counted<IdLoggingAudioEncoderFactory>(
+ webrtc::CreateAudioEncoderFactory<
+ AudioEncoderUnicornSparklesRainbow>(),
+ &encoder_id1),
+ rtc::make_ref_counted<IdLoggingAudioDecoderFactory>(
+ webrtc::CreateAudioDecoderFactory<
+ AudioDecoderUnicornSparklesRainbow>(),
+ &decoder_id1),
+ rtc::make_ref_counted<IdLoggingAudioEncoderFactory>(
+ webrtc::CreateAudioEncoderFactory<
+ AudioEncoderUnicornSparklesRainbow>(),
+ &encoder_id2),
+ rtc::make_ref_counted<IdLoggingAudioDecoderFactory>(
+ webrtc::CreateAudioDecoderFactory<
+ AudioDecoderUnicornSparklesRainbow>(),
+ &decoder_id2));
+ GetAndAddUserMedia();
+ Negotiate();
+ WaitForCallEstablished();
+
+ // Each codec factory has been used to create one codec. The first pair got
+ // the same ID because they were passed to the same PeerConnectionFactory,
+ // and the second pair got the same ID---but these two IDs are not equal,
+ // because each PeerConnectionFactory has its own ID.
+ EXPECT_EQ(1U, encoder_id1.size());
+ EXPECT_EQ(1U, encoder_id2.size());
+ EXPECT_EQ(encoder_id1, decoder_id1);
+ EXPECT_EQ(encoder_id2, decoder_id2);
+ EXPECT_NE(encoder_id1, encoder_id2);
+}
+
+#ifdef WEBRTC_HAVE_SCTP
+// Verifies that a DataChannel created before the negotiation can transition to
+// "OPEN" and transfer data.
+TEST_P(PeerConnectionEndToEndTest, CreateDataChannelBeforeNegotiate) {
+ CreatePcs(webrtc::MockAudioEncoderFactory::CreateEmptyFactory(),
+ webrtc::MockAudioDecoderFactory::CreateEmptyFactory());
+
+ webrtc::DataChannelInit init;
+ rtc::scoped_refptr<DataChannelInterface> caller_dc(
+ caller_->CreateDataChannel("data", init));
+ rtc::scoped_refptr<DataChannelInterface> callee_dc(
+ callee_->CreateDataChannel("data", init));
+
+ Negotiate();
+ WaitForConnection();
+
+ WaitForDataChannelsToOpen(caller_dc.get(), callee_signaled_data_channels_, 0);
+ WaitForDataChannelsToOpen(callee_dc.get(), caller_signaled_data_channels_, 0);
+
+ TestDataChannelSendAndReceive(caller_dc.get(),
+ callee_signaled_data_channels_[0].get());
+ TestDataChannelSendAndReceive(callee_dc.get(),
+ caller_signaled_data_channels_[0].get());
+
+ CloseDataChannels(caller_dc.get(), callee_signaled_data_channels_, 0);
+ CloseDataChannels(callee_dc.get(), caller_signaled_data_channels_, 0);
+}
+
+// Verifies that a DataChannel created after the negotiation can transition to
+// "OPEN" and transfer data.
+TEST_P(PeerConnectionEndToEndTest, CreateDataChannelAfterNegotiate) {
+ CreatePcs(webrtc::MockAudioEncoderFactory::CreateEmptyFactory(),
+ webrtc::MockAudioDecoderFactory::CreateEmptyFactory());
+
+ webrtc::DataChannelInit init;
+
+ // This DataChannel is for creating the data content in the negotiation.
+ rtc::scoped_refptr<DataChannelInterface> dummy(
+ caller_->CreateDataChannel("data", init));
+ Negotiate();
+ WaitForConnection();
+
+ // Wait for the data channel created pre-negotiation to be opened.
+ WaitForDataChannelsToOpen(dummy.get(), callee_signaled_data_channels_, 0);
+
+ // Create new DataChannels after the negotiation and verify their states.
+ rtc::scoped_refptr<DataChannelInterface> caller_dc(
+ caller_->CreateDataChannel("hello", init));
+ rtc::scoped_refptr<DataChannelInterface> callee_dc(
+ callee_->CreateDataChannel("hello", init));
+
+ WaitForDataChannelsToOpen(caller_dc.get(), callee_signaled_data_channels_, 1);
+ WaitForDataChannelsToOpen(callee_dc.get(), caller_signaled_data_channels_, 0);
+
+ TestDataChannelSendAndReceive(caller_dc.get(),
+ callee_signaled_data_channels_[1].get());
+ TestDataChannelSendAndReceive(callee_dc.get(),
+ caller_signaled_data_channels_[0].get());
+
+ CloseDataChannels(caller_dc.get(), callee_signaled_data_channels_, 1);
+ CloseDataChannels(callee_dc.get(), caller_signaled_data_channels_, 0);
+}
+
+// Verifies that a DataChannel created can transfer large messages.
+TEST_P(PeerConnectionEndToEndTest, CreateDataChannelLargeTransfer) {
+ CreatePcs(webrtc::MockAudioEncoderFactory::CreateEmptyFactory(),
+ webrtc::MockAudioDecoderFactory::CreateEmptyFactory());
+
+ webrtc::DataChannelInit init;
+
+ // This DataChannel is for creating the data content in the negotiation.
+ rtc::scoped_refptr<DataChannelInterface> dummy(
+ caller_->CreateDataChannel("data", init));
+ Negotiate();
+ WaitForConnection();
+
+ // Wait for the data channel created pre-negotiation to be opened.
+ WaitForDataChannelsToOpen(dummy.get(), callee_signaled_data_channels_, 0);
+
+ // Create new DataChannels after the negotiation and verify their states.
+ rtc::scoped_refptr<DataChannelInterface> caller_dc(
+ caller_->CreateDataChannel("hello", init));
+ rtc::scoped_refptr<DataChannelInterface> callee_dc(
+ callee_->CreateDataChannel("hello", init));
+
+ WaitForDataChannelsToOpen(caller_dc.get(), callee_signaled_data_channels_, 1);
+ WaitForDataChannelsToOpen(callee_dc.get(), caller_signaled_data_channels_, 0);
+
+ TestDataChannelSendAndReceive(
+ caller_dc.get(), callee_signaled_data_channels_[1].get(), 256 * 1024);
+ TestDataChannelSendAndReceive(
+ callee_dc.get(), caller_signaled_data_channels_[0].get(), 256 * 1024);
+
+ CloseDataChannels(caller_dc.get(), callee_signaled_data_channels_, 1);
+ CloseDataChannels(callee_dc.get(), caller_signaled_data_channels_, 0);
+}
+
+// Verifies that DataChannel IDs are even/odd based on the DTLS roles.
+TEST_P(PeerConnectionEndToEndTest, DataChannelIdAssignment) {
+ CreatePcs(webrtc::MockAudioEncoderFactory::CreateEmptyFactory(),
+ webrtc::MockAudioDecoderFactory::CreateEmptyFactory());
+
+ webrtc::DataChannelInit init;
+ rtc::scoped_refptr<DataChannelInterface> caller_dc_1(
+ caller_->CreateDataChannel("data", init));
+ rtc::scoped_refptr<DataChannelInterface> callee_dc_1(
+ callee_->CreateDataChannel("data", init));
+
+ Negotiate();
+ WaitForConnection();
+
+ EXPECT_EQ(1, caller_dc_1->id() % 2);
+ EXPECT_EQ(0, callee_dc_1->id() % 2);
+
+ rtc::scoped_refptr<DataChannelInterface> caller_dc_2(
+ caller_->CreateDataChannel("data", init));
+ rtc::scoped_refptr<DataChannelInterface> callee_dc_2(
+ callee_->CreateDataChannel("data", init));
+
+ EXPECT_EQ(1, caller_dc_2->id() % 2);
+ EXPECT_EQ(0, callee_dc_2->id() % 2);
+}
+
+// Verifies that the message is received by the right remote DataChannel when
+// there are multiple DataChannels.
+TEST_P(PeerConnectionEndToEndTest,
+ MessageTransferBetweenTwoPairsOfDataChannels) {
+ CreatePcs(webrtc::MockAudioEncoderFactory::CreateEmptyFactory(),
+ webrtc::MockAudioDecoderFactory::CreateEmptyFactory());
+
+ webrtc::DataChannelInit init;
+
+ rtc::scoped_refptr<DataChannelInterface> caller_dc_1(
+ caller_->CreateDataChannel("data", init));
+ rtc::scoped_refptr<DataChannelInterface> caller_dc_2(
+ caller_->CreateDataChannel("data", init));
+
+ Negotiate();
+ WaitForConnection();
+ WaitForDataChannelsToOpen(caller_dc_1.get(), callee_signaled_data_channels_,
+ 0);
+ WaitForDataChannelsToOpen(caller_dc_2.get(), callee_signaled_data_channels_,
+ 1);
+
+ std::unique_ptr<webrtc::MockDataChannelObserver> dc_1_observer(
+ new webrtc::MockDataChannelObserver(
+ callee_signaled_data_channels_[0].get()));
+
+ std::unique_ptr<webrtc::MockDataChannelObserver> dc_2_observer(
+ new webrtc::MockDataChannelObserver(
+ callee_signaled_data_channels_[1].get()));
+
+ const std::string message_1 = "hello 1";
+ const std::string message_2 = "hello 2";
+
+ caller_dc_1->Send(webrtc::DataBuffer(message_1));
+ EXPECT_EQ_WAIT(message_1, dc_1_observer->last_message(), kMaxWait);
+
+ caller_dc_2->Send(webrtc::DataBuffer(message_2));
+ EXPECT_EQ_WAIT(message_2, dc_2_observer->last_message(), kMaxWait);
+
+ EXPECT_EQ(1U, dc_1_observer->received_message_count());
+ EXPECT_EQ(1U, dc_2_observer->received_message_count());
+}
+
+// Verifies that a DataChannel added from an OPEN message functions after
+// a channel has been previously closed (webrtc issue 3778).
+// This previously failed because the new channel re-used the ID of the closed
+// channel, and the closed channel was incorrectly still assigned to the ID.
+TEST_P(PeerConnectionEndToEndTest,
+ DataChannelFromOpenWorksAfterPreviousChannelClosed) {
+ CreatePcs(webrtc::MockAudioEncoderFactory::CreateEmptyFactory(),
+ webrtc::MockAudioDecoderFactory::CreateEmptyFactory());
+
+ webrtc::DataChannelInit init;
+ rtc::scoped_refptr<DataChannelInterface> caller_dc(
+ caller_->CreateDataChannel("data", init));
+
+ Negotiate();
+ WaitForConnection();
+
+ WaitForDataChannelsToOpen(caller_dc.get(), callee_signaled_data_channels_, 0);
+ int first_channel_id = caller_dc->id();
+ // Wait for the local side to say it's closed, but not the remote side.
+ // Previously, the channel on which Close is called reported being closed
+ // prematurely, and this caused issues; see bugs.webrtc.org/4453.
+ caller_dc->Close();
+ EXPECT_EQ_WAIT(DataChannelInterface::kClosed, caller_dc->state(), kMaxWait);
+
+ // Create a new channel and ensure it works after closing the previous one.
+ caller_dc = caller_->CreateDataChannel("data2", init);
+ WaitForDataChannelsToOpen(caller_dc.get(), callee_signaled_data_channels_, 1);
+ // Since the second channel was created after the first finished closing, it
+ // should be able to re-use the first one's ID.
+ EXPECT_EQ(first_channel_id, caller_dc->id());
+ TestDataChannelSendAndReceive(caller_dc.get(),
+ callee_signaled_data_channels_[1].get());
+
+ CloseDataChannels(caller_dc.get(), callee_signaled_data_channels_, 1);
+}
+
+// This tests that if a data channel is closed remotely while not referenced
+// by the application (meaning only the PeerConnection contributes to its
+// reference count), no memory access violation will occur.
+// See: https://code.google.com/p/chromium/issues/detail?id=565048
+TEST_P(PeerConnectionEndToEndTest, CloseDataChannelRemotelyWhileNotReferenced) {
+ CreatePcs(webrtc::MockAudioEncoderFactory::CreateEmptyFactory(),
+ webrtc::MockAudioDecoderFactory::CreateEmptyFactory());
+
+ webrtc::DataChannelInit init;
+ rtc::scoped_refptr<DataChannelInterface> caller_dc(
+ caller_->CreateDataChannel("data", init));
+
+ Negotiate();
+ WaitForConnection();
+
+ WaitForDataChannelsToOpen(caller_dc.get(), callee_signaled_data_channels_, 0);
+ // This removes the reference to the remote data channel that we hold.
+ callee_signaled_data_channels_.clear();
+ caller_dc->Close();
+ EXPECT_EQ_WAIT(DataChannelInterface::kClosed, caller_dc->state(), kMaxWait);
+
+ // Wait for a bit longer so the remote data channel will receive the
+ // close message and be destroyed.
+ rtc::Thread::Current()->ProcessMessages(100);
+}
+
+// Test behavior of creating too many datachannels.
+TEST_P(PeerConnectionEndToEndTest, TooManyDataChannelsOpenedBeforeConnecting) {
+ CreatePcs(webrtc::MockAudioEncoderFactory::CreateEmptyFactory(),
+ webrtc::MockAudioDecoderFactory::CreateEmptyFactory());
+
+ webrtc::DataChannelInit init;
+ std::vector<rtc::scoped_refptr<DataChannelInterface>> channels;
+ for (int i = 0; i <= cricket::kMaxSctpStreams / 2; i++) {
+ rtc::scoped_refptr<DataChannelInterface> caller_dc(
+ caller_->CreateDataChannel("data", init));
+ channels.push_back(std::move(caller_dc));
+ }
+ Negotiate();
+ WaitForConnection();
+ EXPECT_EQ_WAIT(callee_signaled_data_channels_.size(),
+ static_cast<size_t>(cricket::kMaxSctpStreams / 2), kMaxWait);
+ EXPECT_EQ(DataChannelInterface::kOpen,
+ channels[(cricket::kMaxSctpStreams / 2) - 1]->state());
+ EXPECT_EQ(DataChannelInterface::kClosed,
+ channels[cricket::kMaxSctpStreams / 2]->state());
+}
+
+#endif // WEBRTC_HAVE_SCTP
+
+TEST_P(PeerConnectionEndToEndTest, CanRestartIce) {
+ rtc::scoped_refptr<webrtc::AudioDecoderFactory> real_decoder_factory =
+ webrtc::CreateOpusAudioDecoderFactory();
+ CreatePcs(webrtc::CreateOpusAudioEncoderFactory(),
+ CreateForwardingMockDecoderFactory(real_decoder_factory.get()));
+ GetAndAddUserMedia();
+ Negotiate();
+ WaitForCallEstablished();
+ // Cause ICE restart to be requested.
+ auto config = caller_->pc()->GetConfiguration();
+ ASSERT_NE(PeerConnectionInterface::kRelay, config.type);
+ config.type = PeerConnectionInterface::kRelay;
+ ASSERT_TRUE(caller_->pc()->SetConfiguration(config).ok());
+ // When solving https://crbug.com/webrtc/10504, all we need to check
+ // is that we do not crash. We should also be testing that restart happens.
+}
+
+INSTANTIATE_TEST_SUITE_P(PeerConnectionEndToEndTest,
+ PeerConnectionEndToEndTest,
+ Values(SdpSemantics::kPlanB_DEPRECATED,
+ SdpSemantics::kUnifiedPlan));
diff --git a/third_party/libwebrtc/pc/peer_connection_factory.cc b/third_party/libwebrtc/pc/peer_connection_factory.cc
new file mode 100644
index 0000000000..81780cf51e
--- /dev/null
+++ b/third_party/libwebrtc/pc/peer_connection_factory.cc
@@ -0,0 +1,352 @@
+/*
+ * Copyright 2004 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/peer_connection_factory.h"
+
+#include <type_traits>
+#include <utility>
+
+#include "absl/strings/match.h"
+#include "api/async_resolver_factory.h"
+#include "api/call/call_factory_interface.h"
+#include "api/fec_controller.h"
+#include "api/ice_transport_interface.h"
+#include "api/network_state_predictor.h"
+#include "api/packet_socket_factory.h"
+#include "api/rtc_event_log/rtc_event_log.h"
+#include "api/sequence_checker.h"
+#include "api/transport/bitrate_settings.h"
+#include "api/units/data_rate.h"
+#include "call/audio_state.h"
+#include "call/rtp_transport_controller_send_factory.h"
+#include "media/base/media_engine.h"
+#include "p2p/base/basic_async_resolver_factory.h"
+#include "p2p/base/basic_packet_socket_factory.h"
+#include "p2p/base/default_ice_transport_factory.h"
+#include "p2p/base/port_allocator.h"
+#include "p2p/client/basic_port_allocator.h"
+#include "pc/audio_track.h"
+#include "pc/local_audio_source.h"
+#include "pc/media_stream.h"
+#include "pc/media_stream_proxy.h"
+#include "pc/media_stream_track_proxy.h"
+#include "pc/peer_connection.h"
+#include "pc/peer_connection_factory_proxy.h"
+#include "pc/peer_connection_proxy.h"
+#include "pc/rtp_parameters_conversion.h"
+#include "pc/session_description.h"
+#include "pc/video_track.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/experiments/field_trial_parser.h"
+#include "rtc_base/experiments/field_trial_units.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/numerics/safe_conversions.h"
+#include "rtc_base/rtc_certificate_generator.h"
+#include "rtc_base/system/file_wrapper.h"
+
+namespace webrtc {
+
+rtc::scoped_refptr<PeerConnectionFactoryInterface>
+CreateModularPeerConnectionFactory(
+ PeerConnectionFactoryDependencies dependencies) {
+ // The PeerConnectionFactory must be created on the signaling thread.
+ if (dependencies.signaling_thread &&
+ !dependencies.signaling_thread->IsCurrent()) {
+ return dependencies.signaling_thread->BlockingCall([&dependencies] {
+ return CreateModularPeerConnectionFactory(std::move(dependencies));
+ });
+ }
+
+ auto pc_factory = PeerConnectionFactory::Create(std::move(dependencies));
+ if (!pc_factory) {
+ return nullptr;
+ }
+ // Verify that the invocation and the initialization ended up agreeing on the
+ // thread.
+ RTC_DCHECK_RUN_ON(pc_factory->signaling_thread());
+ return PeerConnectionFactoryProxy::Create(
+ pc_factory->signaling_thread(), pc_factory->worker_thread(), pc_factory);
+}
+
+// Static
+rtc::scoped_refptr<PeerConnectionFactory> PeerConnectionFactory::Create(
+ PeerConnectionFactoryDependencies dependencies) {
+ auto context = ConnectionContext::Create(&dependencies);
+ if (!context) {
+ return nullptr;
+ }
+ return rtc::make_ref_counted<PeerConnectionFactory>(context, &dependencies);
+}
+
+PeerConnectionFactory::PeerConnectionFactory(
+ rtc::scoped_refptr<ConnectionContext> context,
+ PeerConnectionFactoryDependencies* dependencies)
+ : context_(context),
+ task_queue_factory_(std::move(dependencies->task_queue_factory)),
+ event_log_factory_(std::move(dependencies->event_log_factory)),
+ fec_controller_factory_(std::move(dependencies->fec_controller_factory)),
+ network_state_predictor_factory_(
+ std::move(dependencies->network_state_predictor_factory)),
+ injected_network_controller_factory_(
+ std::move(dependencies->network_controller_factory)),
+ neteq_factory_(std::move(dependencies->neteq_factory)),
+ transport_controller_send_factory_(
+ (dependencies->transport_controller_send_factory)
+ ? std::move(dependencies->transport_controller_send_factory)
+ : std::make_unique<RtpTransportControllerSendFactory>()),
+ metronome_(std::move(dependencies->metronome)) {}
+
+PeerConnectionFactory::PeerConnectionFactory(
+ PeerConnectionFactoryDependencies dependencies)
+ : PeerConnectionFactory(ConnectionContext::Create(&dependencies),
+ &dependencies) {}
+
+PeerConnectionFactory::~PeerConnectionFactory() {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ worker_thread()->BlockingCall([this] {
+ RTC_DCHECK_RUN_ON(worker_thread());
+ metronome_ = nullptr;
+ });
+}
+
+void PeerConnectionFactory::SetOptions(const Options& options) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ options_ = options;
+}
+
+RtpCapabilities PeerConnectionFactory::GetRtpSenderCapabilities(
+ cricket::MediaType kind) const {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ switch (kind) {
+ case cricket::MEDIA_TYPE_AUDIO: {
+ cricket::AudioCodecs cricket_codecs;
+ cricket_codecs = media_engine()->voice().send_codecs();
+ auto extensions =
+ GetDefaultEnabledRtpHeaderExtensions(media_engine()->voice());
+ return ToRtpCapabilities(cricket_codecs, extensions);
+ }
+ case cricket::MEDIA_TYPE_VIDEO: {
+ cricket::VideoCodecs cricket_codecs;
+ cricket_codecs = media_engine()->video().send_codecs(context_->use_rtx());
+ auto extensions =
+ GetDefaultEnabledRtpHeaderExtensions(media_engine()->video());
+ return ToRtpCapabilities(cricket_codecs, extensions);
+ }
+ case cricket::MEDIA_TYPE_DATA:
+ return RtpCapabilities();
+ case cricket::MEDIA_TYPE_UNSUPPORTED:
+ return RtpCapabilities();
+ }
+ RTC_DLOG(LS_ERROR) << "Got unexpected MediaType " << kind;
+ RTC_CHECK_NOTREACHED();
+}
+
+RtpCapabilities PeerConnectionFactory::GetRtpReceiverCapabilities(
+ cricket::MediaType kind) const {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ switch (kind) {
+ case cricket::MEDIA_TYPE_AUDIO: {
+ cricket::AudioCodecs cricket_codecs;
+ cricket_codecs = media_engine()->voice().recv_codecs();
+ auto extensions =
+ GetDefaultEnabledRtpHeaderExtensions(media_engine()->voice());
+ return ToRtpCapabilities(cricket_codecs, extensions);
+ }
+ case cricket::MEDIA_TYPE_VIDEO: {
+ cricket::VideoCodecs cricket_codecs =
+ media_engine()->video().recv_codecs(context_->use_rtx());
+ auto extensions =
+ GetDefaultEnabledRtpHeaderExtensions(media_engine()->video());
+ return ToRtpCapabilities(cricket_codecs, extensions);
+ }
+ case cricket::MEDIA_TYPE_DATA:
+ return RtpCapabilities();
+ case cricket::MEDIA_TYPE_UNSUPPORTED:
+ return RtpCapabilities();
+ }
+ RTC_DLOG(LS_ERROR) << "Got unexpected MediaType " << kind;
+ RTC_CHECK_NOTREACHED();
+}
+
+rtc::scoped_refptr<AudioSourceInterface>
+PeerConnectionFactory::CreateAudioSource(const cricket::AudioOptions& options) {
+ RTC_DCHECK(signaling_thread()->IsCurrent());
+ rtc::scoped_refptr<LocalAudioSource> source(
+ LocalAudioSource::Create(&options));
+ return source;
+}
+
+bool PeerConnectionFactory::StartAecDump(FILE* file, int64_t max_size_bytes) {
+ RTC_DCHECK_RUN_ON(worker_thread());
+ return media_engine()->voice().StartAecDump(FileWrapper(file),
+ max_size_bytes);
+}
+
+void PeerConnectionFactory::StopAecDump() {
+ RTC_DCHECK_RUN_ON(worker_thread());
+ media_engine()->voice().StopAecDump();
+}
+
+cricket::MediaEngineInterface* PeerConnectionFactory::media_engine() const {
+ RTC_DCHECK(context_);
+ return context_->media_engine();
+}
+
+RTCErrorOr<rtc::scoped_refptr<PeerConnectionInterface>>
+PeerConnectionFactory::CreatePeerConnectionOrError(
+ const PeerConnectionInterface::RTCConfiguration& configuration,
+ PeerConnectionDependencies dependencies) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+
+ // Set internal defaults if optional dependencies are not set.
+ if (!dependencies.cert_generator) {
+ dependencies.cert_generator =
+ std::make_unique<rtc::RTCCertificateGenerator>(signaling_thread(),
+ network_thread());
+ }
+ if (!dependencies.allocator) {
+ const FieldTrialsView* trials =
+ dependencies.trials ? dependencies.trials.get() : &field_trials();
+ dependencies.allocator = std::make_unique<cricket::BasicPortAllocator>(
+ context_->default_network_manager(), context_->default_socket_factory(),
+ configuration.turn_customizer, /*relay_port_factory=*/nullptr, trials);
+ dependencies.allocator->SetPortRange(
+ configuration.port_allocator_config.min_port,
+ configuration.port_allocator_config.max_port);
+ dependencies.allocator->set_flags(
+ configuration.port_allocator_config.flags);
+ }
+
+ if (!dependencies.ice_transport_factory) {
+ dependencies.ice_transport_factory =
+ std::make_unique<DefaultIceTransportFactory>();
+ }
+
+ dependencies.allocator->SetNetworkIgnoreMask(options().network_ignore_mask);
+ dependencies.allocator->SetVpnList(configuration.vpn_list);
+
+ std::unique_ptr<RtcEventLog> event_log =
+ worker_thread()->BlockingCall([this] { return CreateRtcEventLog_w(); });
+
+ const FieldTrialsView* trials =
+ dependencies.trials ? dependencies.trials.get() : &field_trials();
+ std::unique_ptr<Call> call =
+ worker_thread()->BlockingCall([this, &event_log, trials, &configuration] {
+ return CreateCall_w(event_log.get(), *trials, configuration);
+ });
+
+ auto result = PeerConnection::Create(context_, options_, std::move(event_log),
+ std::move(call), configuration,
+ std::move(dependencies));
+ if (!result.ok()) {
+ return result.MoveError();
+ }
+ // We configure the proxy with a pointer to the network thread for methods
+ // that need to be invoked there rather than on the signaling thread.
+ // Internally, the proxy object has a member variable named `worker_thread_`
+ // which will point to the network thread (and not the factory's
+ // worker_thread()). All such methods have thread checks though, so the code
+ // should still be clear (outside of macro expansion).
+ rtc::scoped_refptr<PeerConnectionInterface> result_proxy =
+ PeerConnectionProxy::Create(signaling_thread(), network_thread(),
+ result.MoveValue());
+ return result_proxy;
+}
+
+rtc::scoped_refptr<MediaStreamInterface>
+PeerConnectionFactory::CreateLocalMediaStream(const std::string& stream_id) {
+ RTC_DCHECK(signaling_thread()->IsCurrent());
+ return MediaStreamProxy::Create(signaling_thread(),
+ MediaStream::Create(stream_id));
+}
+
+rtc::scoped_refptr<VideoTrackInterface> PeerConnectionFactory::CreateVideoTrack(
+ rtc::scoped_refptr<VideoTrackSourceInterface> source,
+ absl::string_view id) {
+ RTC_DCHECK(signaling_thread()->IsCurrent());
+ rtc::scoped_refptr<VideoTrackInterface> track =
+ VideoTrack::Create(id, source, worker_thread());
+ return VideoTrackProxy::Create(signaling_thread(), worker_thread(), track);
+}
+
+rtc::scoped_refptr<AudioTrackInterface> PeerConnectionFactory::CreateAudioTrack(
+ const std::string& id,
+ AudioSourceInterface* source) {
+ RTC_DCHECK(signaling_thread()->IsCurrent());
+ rtc::scoped_refptr<AudioTrackInterface> track =
+ AudioTrack::Create(id, rtc::scoped_refptr<AudioSourceInterface>(source));
+ return AudioTrackProxy::Create(signaling_thread(), track);
+}
+
+std::unique_ptr<RtcEventLog> PeerConnectionFactory::CreateRtcEventLog_w() {
+ RTC_DCHECK_RUN_ON(worker_thread());
+
+ auto encoding_type = RtcEventLog::EncodingType::NewFormat;
+ if (field_trials().IsDisabled("WebRTC-RtcEventLogNewFormat"))
+ encoding_type = RtcEventLog::EncodingType::Legacy;
+ return event_log_factory_ ? event_log_factory_->Create(encoding_type)
+ : std::make_unique<RtcEventLogNull>();
+}
+
+std::unique_ptr<Call> PeerConnectionFactory::CreateCall_w(
+ RtcEventLog* event_log,
+ const FieldTrialsView& field_trials,
+ const PeerConnectionInterface::RTCConfiguration& configuration) {
+ RTC_DCHECK_RUN_ON(worker_thread());
+
+ CallConfig call_config(event_log, network_thread());
+ if (!media_engine() || !context_->call_factory()) {
+ return nullptr;
+ }
+ call_config.audio_state = media_engine()->voice().GetAudioState();
+
+ FieldTrialParameter<DataRate> min_bandwidth("min",
+ DataRate::KilobitsPerSec(30));
+ FieldTrialParameter<DataRate> start_bandwidth("start",
+ DataRate::KilobitsPerSec(300));
+ FieldTrialParameter<DataRate> max_bandwidth("max",
+ DataRate::KilobitsPerSec(2000));
+ ParseFieldTrial({&min_bandwidth, &start_bandwidth, &max_bandwidth},
+ field_trials.Lookup("WebRTC-PcFactoryDefaultBitrates"));
+
+ call_config.bitrate_config.min_bitrate_bps =
+ rtc::saturated_cast<int>(min_bandwidth->bps());
+ call_config.bitrate_config.start_bitrate_bps =
+ rtc::saturated_cast<int>(start_bandwidth->bps());
+ call_config.bitrate_config.max_bitrate_bps =
+ rtc::saturated_cast<int>(max_bandwidth->bps());
+
+ call_config.fec_controller_factory = fec_controller_factory_.get();
+ call_config.task_queue_factory = task_queue_factory_.get();
+ call_config.network_state_predictor_factory =
+ network_state_predictor_factory_.get();
+ call_config.neteq_factory = neteq_factory_.get();
+
+ if (IsTrialEnabled("WebRTC-Bwe-InjectedCongestionController")) {
+ RTC_LOG(LS_INFO) << "Using injected network controller factory";
+ call_config.network_controller_factory =
+ injected_network_controller_factory_.get();
+ } else {
+ RTC_LOG(LS_INFO) << "Using default network controller factory";
+ }
+
+ call_config.trials = &field_trials;
+ call_config.rtp_transport_controller_send_factory =
+ transport_controller_send_factory_.get();
+ call_config.metronome = metronome_.get();
+ call_config.pacer_burst_interval = configuration.pacer_burst_interval;
+ return context_->call_factory()->CreateCall(call_config);
+}
+
+bool PeerConnectionFactory::IsTrialEnabled(absl::string_view key) const {
+ return absl::StartsWith(field_trials().Lookup(key), "Enabled");
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/peer_connection_factory.h b/third_party/libwebrtc/pc/peer_connection_factory.h
new file mode 100644
index 0000000000..f55d09f6d8
--- /dev/null
+++ b/third_party/libwebrtc/pc/peer_connection_factory.h
@@ -0,0 +1,162 @@
+
+/*
+ * Copyright 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_PEER_CONNECTION_FACTORY_H_
+#define PC_PEER_CONNECTION_FACTORY_H_
+
+#include <stdint.h>
+#include <stdio.h>
+
+#include <memory>
+#include <string>
+
+#include "absl/strings/string_view.h"
+#include "api/audio_options.h"
+#include "api/fec_controller.h"
+#include "api/field_trials_view.h"
+#include "api/media_stream_interface.h"
+#include "api/media_types.h"
+#include "api/metronome/metronome.h"
+#include "api/neteq/neteq_factory.h"
+#include "api/network_state_predictor.h"
+#include "api/peer_connection_interface.h"
+#include "api/rtc_error.h"
+#include "api/rtc_event_log/rtc_event_log.h"
+#include "api/rtc_event_log/rtc_event_log_factory_interface.h"
+#include "api/rtp_parameters.h"
+#include "api/scoped_refptr.h"
+#include "api/sequence_checker.h"
+#include "api/task_queue/task_queue_factory.h"
+#include "api/transport/network_control.h"
+#include "api/transport/sctp_transport_factory_interface.h"
+#include "call/call.h"
+#include "call/rtp_transport_controller_send_factory_interface.h"
+#include "p2p/base/port_allocator.h"
+#include "pc/connection_context.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/rtc_certificate_generator.h"
+#include "rtc_base/thread.h"
+#include "rtc_base/thread_annotations.h"
+
+namespace rtc {
+class BasicNetworkManager;
+class BasicPacketSocketFactory;
+} // namespace rtc
+
+namespace webrtc {
+
+class RtcEventLog;
+
+class PeerConnectionFactory : public PeerConnectionFactoryInterface {
+ public:
+ // Creates a PeerConnectionFactory. It returns nullptr on initialization
+ // error.
+ //
+ // The Dependencies structure allows simple management of all new
+ // dependencies being added to the PeerConnectionFactory.
+ static rtc::scoped_refptr<PeerConnectionFactory> Create(
+ PeerConnectionFactoryDependencies dependencies);
+
+ void SetOptions(const Options& options) override;
+
+ RTCErrorOr<rtc::scoped_refptr<PeerConnectionInterface>>
+ CreatePeerConnectionOrError(
+ const PeerConnectionInterface::RTCConfiguration& configuration,
+ PeerConnectionDependencies dependencies) override;
+
+ RtpCapabilities GetRtpSenderCapabilities(
+ cricket::MediaType kind) const override;
+
+ RtpCapabilities GetRtpReceiverCapabilities(
+ cricket::MediaType kind) const override;
+
+ rtc::scoped_refptr<MediaStreamInterface> CreateLocalMediaStream(
+ const std::string& stream_id) override;
+
+ rtc::scoped_refptr<AudioSourceInterface> CreateAudioSource(
+ const cricket::AudioOptions& options) override;
+
+ rtc::scoped_refptr<VideoTrackInterface> CreateVideoTrack(
+ rtc::scoped_refptr<VideoTrackSourceInterface> video_source,
+ absl::string_view id) override;
+
+ rtc::scoped_refptr<AudioTrackInterface> CreateAudioTrack(
+ const std::string& id,
+ AudioSourceInterface* audio_source) override;
+
+ bool StartAecDump(FILE* file, int64_t max_size_bytes) override;
+ void StopAecDump() override;
+
+ SctpTransportFactoryInterface* sctp_transport_factory() {
+ return context_->sctp_transport_factory();
+ }
+
+ rtc::Thread* signaling_thread() const {
+ // This method can be called on a different thread when the factory is
+ // created in CreatePeerConnectionFactory().
+ return context_->signaling_thread();
+ }
+
+ rtc::Thread* worker_thread() const { return context_->worker_thread(); }
+
+ const Options& options() const {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ return options_;
+ }
+
+ const FieldTrialsView& field_trials() const {
+ return context_->field_trials();
+ }
+
+ cricket::MediaEngineInterface* media_engine() const;
+
+ protected:
+ // Constructor used by the static Create() method. Modifies the dependencies.
+ PeerConnectionFactory(rtc::scoped_refptr<ConnectionContext> context,
+ PeerConnectionFactoryDependencies* dependencies);
+
+ // Constructor for use in testing. Ignores the possibility of initialization
+ // failure. The dependencies are passed in by std::move().
+ explicit PeerConnectionFactory(
+ PeerConnectionFactoryDependencies dependencies);
+
+ virtual ~PeerConnectionFactory();
+
+ private:
+ rtc::Thread* network_thread() const { return context_->network_thread(); }
+
+ bool IsTrialEnabled(absl::string_view key) const;
+
+ std::unique_ptr<RtcEventLog> CreateRtcEventLog_w();
+ std::unique_ptr<Call> CreateCall_w(
+ RtcEventLog* event_log,
+ const FieldTrialsView& field_trials,
+ const PeerConnectionInterface::RTCConfiguration& configuration);
+
+ rtc::scoped_refptr<ConnectionContext> context_;
+ PeerConnectionFactoryInterface::Options options_
+ RTC_GUARDED_BY(signaling_thread());
+ std::unique_ptr<TaskQueueFactory> task_queue_factory_;
+ std::unique_ptr<RtcEventLogFactoryInterface> event_log_factory_;
+ std::unique_ptr<FecControllerFactoryInterface> fec_controller_factory_;
+ std::unique_ptr<NetworkStatePredictorFactoryInterface>
+ network_state_predictor_factory_;
+ std::unique_ptr<NetworkControllerFactoryInterface>
+ injected_network_controller_factory_;
+ std::unique_ptr<NetEqFactory> neteq_factory_;
+ const std::unique_ptr<RtpTransportControllerSendFactoryInterface>
+ transport_controller_send_factory_;
+ std::unique_ptr<Metronome> metronome_ RTC_GUARDED_BY(worker_thread());
+};
+
+} // namespace webrtc
+
+#endif // PC_PEER_CONNECTION_FACTORY_H_
diff --git a/third_party/libwebrtc/pc/peer_connection_factory_proxy.h b/third_party/libwebrtc/pc/peer_connection_factory_proxy.h
new file mode 100644
index 0000000000..4781497642
--- /dev/null
+++ b/third_party/libwebrtc/pc/peer_connection_factory_proxy.h
@@ -0,0 +1,58 @@
+/*
+ * Copyright 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_PEER_CONNECTION_FACTORY_PROXY_H_
+#define PC_PEER_CONNECTION_FACTORY_PROXY_H_
+
+#include <memory>
+#include <string>
+#include <utility>
+
+#include "api/peer_connection_interface.h"
+#include "pc/proxy.h"
+
+namespace webrtc {
+
+// TODO(deadbeef): Move this to .cc file. What threads methods are called on is
+// an implementation detail.
+BEGIN_PROXY_MAP(PeerConnectionFactory)
+PROXY_PRIMARY_THREAD_DESTRUCTOR()
+PROXY_METHOD1(void, SetOptions, const Options&)
+PROXY_METHOD2(RTCErrorOr<rtc::scoped_refptr<PeerConnectionInterface>>,
+ CreatePeerConnectionOrError,
+ const PeerConnectionInterface::RTCConfiguration&,
+ PeerConnectionDependencies)
+PROXY_CONSTMETHOD1(webrtc::RtpCapabilities,
+ GetRtpSenderCapabilities,
+ cricket::MediaType)
+PROXY_CONSTMETHOD1(webrtc::RtpCapabilities,
+ GetRtpReceiverCapabilities,
+ cricket::MediaType)
+PROXY_METHOD1(rtc::scoped_refptr<MediaStreamInterface>,
+ CreateLocalMediaStream,
+ const std::string&)
+PROXY_METHOD1(rtc::scoped_refptr<AudioSourceInterface>,
+ CreateAudioSource,
+ const cricket::AudioOptions&)
+PROXY_METHOD2(rtc::scoped_refptr<VideoTrackInterface>,
+ CreateVideoTrack,
+ rtc::scoped_refptr<VideoTrackSourceInterface>,
+ absl::string_view)
+PROXY_METHOD2(rtc::scoped_refptr<AudioTrackInterface>,
+ CreateAudioTrack,
+ const std::string&,
+ AudioSourceInterface*)
+PROXY_SECONDARY_METHOD2(bool, StartAecDump, FILE*, int64_t)
+PROXY_SECONDARY_METHOD0(void, StopAecDump)
+END_PROXY_MAP(PeerConnectionFactory)
+
+} // namespace webrtc
+
+#endif // PC_PEER_CONNECTION_FACTORY_PROXY_H_
diff --git a/third_party/libwebrtc/pc/peer_connection_factory_unittest.cc b/third_party/libwebrtc/pc/peer_connection_factory_unittest.cc
new file mode 100644
index 0000000000..11e232c01f
--- /dev/null
+++ b/third_party/libwebrtc/pc/peer_connection_factory_unittest.cc
@@ -0,0 +1,736 @@
+/*
+ * Copyright 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/peer_connection_factory.h"
+
+#include <algorithm>
+#include <memory>
+#include <utility>
+#include <vector>
+
+#include "api/audio/audio_mixer.h"
+#include "api/audio_codecs/builtin_audio_decoder_factory.h"
+#include "api/audio_codecs/builtin_audio_encoder_factory.h"
+#include "api/create_peerconnection_factory.h"
+#include "api/data_channel_interface.h"
+#include "api/jsep.h"
+#include "api/media_stream_interface.h"
+#include "api/task_queue/default_task_queue_factory.h"
+#include "api/test/mock_packet_socket_factory.h"
+#include "api/video_codecs/video_decoder_factory_template.h"
+#include "api/video_codecs/video_decoder_factory_template_dav1d_adapter.h"
+#include "api/video_codecs/video_decoder_factory_template_libvpx_vp8_adapter.h"
+#include "api/video_codecs/video_decoder_factory_template_libvpx_vp9_adapter.h"
+#include "api/video_codecs/video_decoder_factory_template_open_h264_adapter.h"
+#include "api/video_codecs/video_encoder_factory_template.h"
+#include "api/video_codecs/video_encoder_factory_template_libaom_av1_adapter.h"
+#include "api/video_codecs/video_encoder_factory_template_libvpx_vp8_adapter.h"
+#include "api/video_codecs/video_encoder_factory_template_libvpx_vp9_adapter.h"
+#include "api/video_codecs/video_encoder_factory_template_open_h264_adapter.h"
+#include "media/base/fake_frame_source.h"
+#include "media/engine/webrtc_media_engine.h"
+#include "modules/audio_device/include/audio_device.h"
+#include "modules/audio_processing/include/audio_processing.h"
+#include "p2p/base/fake_port_allocator.h"
+#include "p2p/base/port.h"
+#include "p2p/base/port_allocator.h"
+#include "p2p/base/port_interface.h"
+#include "pc/test/fake_audio_capture_module.h"
+#include "pc/test/fake_video_track_source.h"
+#include "pc/test/mock_peer_connection_observers.h"
+#include "rtc_base/gunit.h"
+#include "rtc_base/internal/default_socket_server.h"
+#include "rtc_base/rtc_certificate_generator.h"
+#include "rtc_base/socket_address.h"
+#include "rtc_base/time_utils.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+#include "test/scoped_key_value_config.h"
+
+#ifdef WEBRTC_ANDROID
+#include "pc/test/android_test_initializer.h"
+#endif
+#include "pc/test/fake_rtc_certificate_generator.h"
+#include "pc/test/fake_video_track_renderer.h"
+
+namespace webrtc {
+namespace {
+
+using ::testing::_;
+using ::testing::AtLeast;
+using ::testing::InvokeWithoutArgs;
+using ::testing::NiceMock;
+using ::testing::Return;
+using ::testing::UnorderedElementsAre;
+
+static const char kStunIceServer[] = "stun:stun.l.google.com:19302";
+static const char kTurnIceServer[] = "turn:test.com:1234";
+static const char kTurnIceServerWithTransport[] =
+ "turn:hello.com?transport=tcp";
+static const char kSecureTurnIceServer[] = "turns:hello.com?transport=tcp";
+static const char kSecureTurnIceServerWithoutTransportParam[] =
+ "turns:hello.com:443";
+static const char kSecureTurnIceServerWithoutTransportAndPortParam[] =
+ "turns:hello.com";
+static const char kTurnIceServerWithNoUsernameInUri[] = "turn:test.com:1234";
+static const char kTurnPassword[] = "turnpassword";
+static const int kDefaultStunPort = 3478;
+static const int kDefaultStunTlsPort = 5349;
+static const char kTurnUsername[] = "test";
+static const char kStunIceServerWithIPv4Address[] = "stun:1.2.3.4:1234";
+static const char kStunIceServerWithIPv4AddressWithoutPort[] = "stun:1.2.3.4";
+static const char kStunIceServerWithIPv6Address[] = "stun:[2401:fa00:4::]:1234";
+static const char kStunIceServerWithIPv6AddressWithoutPort[] =
+ "stun:[2401:fa00:4::]";
+static const char kTurnIceServerWithIPv6Address[] = "turn:[2401:fa00:4::]:1234";
+
+class NullPeerConnectionObserver : public PeerConnectionObserver {
+ public:
+ virtual ~NullPeerConnectionObserver() = default;
+ void OnSignalingChange(
+ PeerConnectionInterface::SignalingState new_state) override {}
+ void OnAddStream(rtc::scoped_refptr<MediaStreamInterface> stream) override {}
+ void OnRemoveStream(
+ rtc::scoped_refptr<MediaStreamInterface> stream) override {}
+ void OnDataChannel(
+ rtc::scoped_refptr<DataChannelInterface> data_channel) override {}
+ void OnRenegotiationNeeded() override {}
+ void OnIceConnectionChange(
+ PeerConnectionInterface::IceConnectionState new_state) override {}
+ void OnIceGatheringChange(
+ PeerConnectionInterface::IceGatheringState new_state) override {}
+ void OnIceCandidate(const webrtc::IceCandidateInterface* candidate) override {
+ }
+};
+
+class MockNetworkManager : public rtc::NetworkManager {
+ public:
+ MOCK_METHOD(void, StartUpdating, (), (override));
+ MOCK_METHOD(void, StopUpdating, (), (override));
+ MOCK_METHOD(std::vector<const rtc::Network*>,
+ GetNetworks,
+ (),
+ (const override));
+ MOCK_METHOD(std::vector<const rtc::Network*>,
+ GetAnyAddressNetworks,
+ (),
+ (override));
+};
+
+class PeerConnectionFactoryTest : public ::testing::Test {
+ public:
+ PeerConnectionFactoryTest()
+ : socket_server_(rtc::CreateDefaultSocketServer()),
+ main_thread_(socket_server_.get()) {}
+
+ private:
+ void SetUp() {
+#ifdef WEBRTC_ANDROID
+ webrtc::InitializeAndroidObjects();
+#endif
+ // Use fake audio device module since we're only testing the interface
+ // level, and using a real one could make tests flaky e.g. when run in
+ // parallel.
+ factory_ = webrtc::CreatePeerConnectionFactory(
+ rtc::Thread::Current(), rtc::Thread::Current(), rtc::Thread::Current(),
+ rtc::scoped_refptr<webrtc::AudioDeviceModule>(
+ FakeAudioCaptureModule::Create()),
+ webrtc::CreateBuiltinAudioEncoderFactory(),
+ webrtc::CreateBuiltinAudioDecoderFactory(),
+ std::make_unique<VideoEncoderFactoryTemplate<
+ LibvpxVp8EncoderTemplateAdapter, LibvpxVp9EncoderTemplateAdapter,
+ OpenH264EncoderTemplateAdapter, LibaomAv1EncoderTemplateAdapter>>(),
+ std::make_unique<VideoDecoderFactoryTemplate<
+ LibvpxVp8DecoderTemplateAdapter, LibvpxVp9DecoderTemplateAdapter,
+ OpenH264DecoderTemplateAdapter, Dav1dDecoderTemplateAdapter>>(),
+ nullptr /* audio_mixer */, nullptr /* audio_processing */);
+
+ ASSERT_TRUE(factory_.get() != NULL);
+ packet_socket_factory_.reset(
+ new rtc::BasicPacketSocketFactory(socket_server_.get()));
+ port_allocator_.reset(new cricket::FakePortAllocator(
+ rtc::Thread::Current(), packet_socket_factory_.get(), &field_trials_));
+ raw_port_allocator_ = port_allocator_.get();
+ }
+
+ protected:
+ void VerifyStunServers(cricket::ServerAddresses stun_servers) {
+ EXPECT_EQ(stun_servers, raw_port_allocator_->stun_servers());
+ }
+
+ void VerifyTurnServers(std::vector<cricket::RelayServerConfig> turn_servers) {
+ EXPECT_EQ(turn_servers.size(), raw_port_allocator_->turn_servers().size());
+ for (size_t i = 0; i < turn_servers.size(); ++i) {
+ ASSERT_EQ(1u, turn_servers[i].ports.size());
+ EXPECT_EQ(1u, raw_port_allocator_->turn_servers()[i].ports.size());
+ EXPECT_EQ(
+ turn_servers[i].ports[0].address.ToString(),
+ raw_port_allocator_->turn_servers()[i].ports[0].address.ToString());
+ EXPECT_EQ(turn_servers[i].ports[0].proto,
+ raw_port_allocator_->turn_servers()[i].ports[0].proto);
+ EXPECT_EQ(turn_servers[i].credentials.username,
+ raw_port_allocator_->turn_servers()[i].credentials.username);
+ EXPECT_EQ(turn_servers[i].credentials.password,
+ raw_port_allocator_->turn_servers()[i].credentials.password);
+ }
+ }
+
+ void VerifyAudioCodecCapability(const webrtc::RtpCodecCapability& codec) {
+ EXPECT_EQ(codec.kind, cricket::MEDIA_TYPE_AUDIO);
+ EXPECT_FALSE(codec.name.empty());
+ EXPECT_GT(codec.clock_rate, 0);
+ EXPECT_GT(codec.num_channels, 0);
+ }
+
+ void VerifyVideoCodecCapability(const webrtc::RtpCodecCapability& codec,
+ bool sender) {
+ EXPECT_EQ(codec.kind, cricket::MEDIA_TYPE_VIDEO);
+ EXPECT_FALSE(codec.name.empty());
+ EXPECT_GT(codec.clock_rate, 0);
+ if (sender) {
+ if (codec.name == "VP8" || codec.name == "H264") {
+ EXPECT_THAT(codec.scalability_modes,
+ UnorderedElementsAre(webrtc::ScalabilityMode::kL1T1,
+ webrtc::ScalabilityMode::kL1T2,
+ webrtc::ScalabilityMode::kL1T3))
+ << "Codec: " << codec.name;
+ } else if (codec.name == "VP9" || codec.name == "AV1") {
+ EXPECT_THAT(
+ codec.scalability_modes,
+ UnorderedElementsAre(
+ // clang-format off
+ webrtc::ScalabilityMode::kL1T1,
+ webrtc::ScalabilityMode::kL1T2,
+ webrtc::ScalabilityMode::kL1T3,
+ webrtc::ScalabilityMode::kL2T1,
+ webrtc::ScalabilityMode::kL2T1h,
+ webrtc::ScalabilityMode::kL2T1_KEY,
+ webrtc::ScalabilityMode::kL2T2,
+ webrtc::ScalabilityMode::kL2T2h,
+ webrtc::ScalabilityMode::kL2T2_KEY,
+ webrtc::ScalabilityMode::kL2T2_KEY_SHIFT,
+ webrtc::ScalabilityMode::kL2T3,
+ webrtc::ScalabilityMode::kL2T3h,
+ webrtc::ScalabilityMode::kL2T3_KEY,
+ webrtc::ScalabilityMode::kL3T1,
+ webrtc::ScalabilityMode::kL3T1h,
+ webrtc::ScalabilityMode::kL3T1_KEY,
+ webrtc::ScalabilityMode::kL3T2,
+ webrtc::ScalabilityMode::kL3T2h,
+ webrtc::ScalabilityMode::kL3T2_KEY,
+ webrtc::ScalabilityMode::kL3T3,
+ webrtc::ScalabilityMode::kL3T3h,
+ webrtc::ScalabilityMode::kL3T3_KEY,
+ webrtc::ScalabilityMode::kS2T1,
+ webrtc::ScalabilityMode::kS2T1h,
+ webrtc::ScalabilityMode::kS2T2,
+ webrtc::ScalabilityMode::kS2T2h,
+ webrtc::ScalabilityMode::kS2T3,
+ webrtc::ScalabilityMode::kS2T3h,
+ webrtc::ScalabilityMode::kS3T1,
+ webrtc::ScalabilityMode::kS3T1h,
+ webrtc::ScalabilityMode::kS3T2,
+ webrtc::ScalabilityMode::kS3T2h,
+ webrtc::ScalabilityMode::kS3T3,
+ webrtc::ScalabilityMode::kS3T3h)
+ // clang-format on
+ )
+ << "Codec: " << codec.name;
+ } else {
+ EXPECT_TRUE(codec.scalability_modes.empty());
+ }
+ } else {
+ EXPECT_TRUE(codec.scalability_modes.empty());
+ }
+ }
+
+ webrtc::test::ScopedKeyValueConfig field_trials_;
+ std::unique_ptr<rtc::SocketServer> socket_server_;
+ rtc::AutoSocketServerThread main_thread_;
+ rtc::scoped_refptr<PeerConnectionFactoryInterface> factory_;
+ NullPeerConnectionObserver observer_;
+ std::unique_ptr<rtc::PacketSocketFactory> packet_socket_factory_;
+ std::unique_ptr<cricket::FakePortAllocator> port_allocator_;
+ // Since the PC owns the port allocator after it's been initialized,
+ // this should only be used when known to be safe.
+ cricket::FakePortAllocator* raw_port_allocator_;
+};
+
+// Since there is no public PeerConnectionFactory API to control RTX usage, need
+// to reconstruct factory with our own ConnectionContext.
+rtc::scoped_refptr<PeerConnectionFactoryInterface>
+CreatePeerConnectionFactoryWithRtxDisabled() {
+ webrtc::PeerConnectionFactoryDependencies pcf_dependencies;
+ pcf_dependencies.signaling_thread = rtc::Thread::Current();
+ pcf_dependencies.worker_thread = rtc::Thread::Current();
+ pcf_dependencies.network_thread = rtc::Thread::Current();
+ pcf_dependencies.task_queue_factory = CreateDefaultTaskQueueFactory();
+ pcf_dependencies.call_factory = CreateCallFactory();
+ pcf_dependencies.trials = std::make_unique<webrtc::FieldTrialBasedConfig>();
+
+ cricket::MediaEngineDependencies media_dependencies;
+ media_dependencies.task_queue_factory =
+ pcf_dependencies.task_queue_factory.get();
+ media_dependencies.adm = rtc::scoped_refptr<webrtc::AudioDeviceModule>(
+ FakeAudioCaptureModule::Create());
+ media_dependencies.audio_encoder_factory =
+ webrtc::CreateBuiltinAudioEncoderFactory();
+ media_dependencies.audio_decoder_factory =
+ webrtc::CreateBuiltinAudioDecoderFactory();
+ media_dependencies.video_encoder_factory =
+ std::make_unique<VideoEncoderFactoryTemplate<
+ LibvpxVp8EncoderTemplateAdapter, LibvpxVp9EncoderTemplateAdapter,
+ OpenH264EncoderTemplateAdapter, LibaomAv1EncoderTemplateAdapter>>();
+ media_dependencies.video_decoder_factory =
+ std::make_unique<VideoDecoderFactoryTemplate<
+ LibvpxVp8DecoderTemplateAdapter, LibvpxVp9DecoderTemplateAdapter,
+ OpenH264DecoderTemplateAdapter, Dav1dDecoderTemplateAdapter>>(),
+ media_dependencies.trials = pcf_dependencies.trials.get();
+ pcf_dependencies.media_engine =
+ cricket::CreateMediaEngine(std::move(media_dependencies));
+
+ rtc::scoped_refptr<webrtc::ConnectionContext> context =
+ ConnectionContext::Create(&pcf_dependencies);
+ context->set_use_rtx(false);
+ return rtc::make_ref_counted<PeerConnectionFactory>(context,
+ &pcf_dependencies);
+}
+
+// Verify creation of PeerConnection using internal ADM, video factory and
+// internal libjingle threads.
+// TODO(henrika): disabling this test since relying on real audio can result in
+// flaky tests and focus on details that are out of scope for you might expect
+// for a PeerConnectionFactory unit test.
+// See https://bugs.chromium.org/p/webrtc/issues/detail?id=7806 for details.
+TEST(PeerConnectionFactoryTestInternal, DISABLED_CreatePCUsingInternalModules) {
+#ifdef WEBRTC_ANDROID
+ webrtc::InitializeAndroidObjects();
+#endif
+
+ rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
+ webrtc::CreatePeerConnectionFactory(
+ nullptr /* network_thread */, nullptr /* worker_thread */,
+ nullptr /* signaling_thread */, nullptr /* default_adm */,
+ webrtc::CreateBuiltinAudioEncoderFactory(),
+ webrtc::CreateBuiltinAudioDecoderFactory(),
+ nullptr /* video_encoder_factory */,
+ nullptr /* video_decoder_factory */, nullptr /* audio_mixer */,
+ nullptr /* audio_processing */));
+
+ NullPeerConnectionObserver observer;
+ webrtc::PeerConnectionInterface::RTCConfiguration config;
+ config.sdp_semantics = webrtc::SdpSemantics::kUnifiedPlan;
+
+ std::unique_ptr<FakeRTCCertificateGenerator> cert_generator(
+ new FakeRTCCertificateGenerator());
+ webrtc::PeerConnectionDependencies pc_dependencies(&observer);
+ pc_dependencies.cert_generator = std::move(cert_generator);
+ auto result =
+ factory->CreatePeerConnectionOrError(config, std::move(pc_dependencies));
+
+ EXPECT_TRUE(result.ok());
+}
+
+TEST_F(PeerConnectionFactoryTest, CheckRtpSenderAudioCapabilities) {
+ webrtc::RtpCapabilities audio_capabilities =
+ factory_->GetRtpSenderCapabilities(cricket::MEDIA_TYPE_AUDIO);
+ EXPECT_FALSE(audio_capabilities.codecs.empty());
+ for (const auto& codec : audio_capabilities.codecs) {
+ VerifyAudioCodecCapability(codec);
+ }
+ EXPECT_FALSE(audio_capabilities.header_extensions.empty());
+ for (const auto& header_extension : audio_capabilities.header_extensions) {
+ EXPECT_FALSE(header_extension.uri.empty());
+ }
+}
+
+TEST_F(PeerConnectionFactoryTest, CheckRtpSenderVideoCapabilities) {
+ webrtc::RtpCapabilities video_capabilities =
+ factory_->GetRtpSenderCapabilities(cricket::MEDIA_TYPE_VIDEO);
+ EXPECT_FALSE(video_capabilities.codecs.empty());
+ for (const auto& codec : video_capabilities.codecs) {
+ VerifyVideoCodecCapability(codec, true);
+ }
+ EXPECT_FALSE(video_capabilities.header_extensions.empty());
+ for (const auto& header_extension : video_capabilities.header_extensions) {
+ EXPECT_FALSE(header_extension.uri.empty());
+ }
+}
+
+TEST_F(PeerConnectionFactoryTest, CheckRtpSenderRtxEnabledCapabilities) {
+ webrtc::RtpCapabilities video_capabilities =
+ factory_->GetRtpSenderCapabilities(cricket::MEDIA_TYPE_VIDEO);
+ const auto it = std::find_if(
+ video_capabilities.codecs.begin(), video_capabilities.codecs.end(),
+ [](const auto& c) { return c.name == cricket::kRtxCodecName; });
+ EXPECT_TRUE(it != video_capabilities.codecs.end());
+}
+
+TEST(PeerConnectionFactoryTestInternal, CheckRtpSenderRtxDisabledCapabilities) {
+ auto factory = CreatePeerConnectionFactoryWithRtxDisabled();
+ webrtc::RtpCapabilities video_capabilities =
+ factory->GetRtpSenderCapabilities(cricket::MEDIA_TYPE_VIDEO);
+ const auto it = std::find_if(
+ video_capabilities.codecs.begin(), video_capabilities.codecs.end(),
+ [](const auto& c) { return c.name == cricket::kRtxCodecName; });
+ EXPECT_TRUE(it == video_capabilities.codecs.end());
+}
+
+TEST_F(PeerConnectionFactoryTest, CheckRtpSenderDataCapabilities) {
+ webrtc::RtpCapabilities data_capabilities =
+ factory_->GetRtpSenderCapabilities(cricket::MEDIA_TYPE_DATA);
+ EXPECT_TRUE(data_capabilities.codecs.empty());
+ EXPECT_TRUE(data_capabilities.header_extensions.empty());
+}
+
+TEST_F(PeerConnectionFactoryTest, CheckRtpReceiverAudioCapabilities) {
+ webrtc::RtpCapabilities audio_capabilities =
+ factory_->GetRtpReceiverCapabilities(cricket::MEDIA_TYPE_AUDIO);
+ EXPECT_FALSE(audio_capabilities.codecs.empty());
+ for (const auto& codec : audio_capabilities.codecs) {
+ VerifyAudioCodecCapability(codec);
+ }
+ EXPECT_FALSE(audio_capabilities.header_extensions.empty());
+ for (const auto& header_extension : audio_capabilities.header_extensions) {
+ EXPECT_FALSE(header_extension.uri.empty());
+ }
+}
+
+TEST_F(PeerConnectionFactoryTest, CheckRtpReceiverVideoCapabilities) {
+ webrtc::RtpCapabilities video_capabilities =
+ factory_->GetRtpReceiverCapabilities(cricket::MEDIA_TYPE_VIDEO);
+ EXPECT_FALSE(video_capabilities.codecs.empty());
+ for (const auto& codec : video_capabilities.codecs) {
+ VerifyVideoCodecCapability(codec, false);
+ }
+ EXPECT_FALSE(video_capabilities.header_extensions.empty());
+ for (const auto& header_extension : video_capabilities.header_extensions) {
+ EXPECT_FALSE(header_extension.uri.empty());
+ }
+}
+
+TEST_F(PeerConnectionFactoryTest, CheckRtpReceiverRtxEnabledCapabilities) {
+ webrtc::RtpCapabilities video_capabilities =
+ factory_->GetRtpReceiverCapabilities(cricket::MEDIA_TYPE_VIDEO);
+ const auto it = std::find_if(
+ video_capabilities.codecs.begin(), video_capabilities.codecs.end(),
+ [](const auto& c) { return c.name == cricket::kRtxCodecName; });
+ EXPECT_TRUE(it != video_capabilities.codecs.end());
+}
+
+TEST(PeerConnectionFactoryTestInternal,
+ CheckRtpReceiverRtxDisabledCapabilities) {
+ auto factory = CreatePeerConnectionFactoryWithRtxDisabled();
+ webrtc::RtpCapabilities video_capabilities =
+ factory->GetRtpReceiverCapabilities(cricket::MEDIA_TYPE_VIDEO);
+ const auto it = std::find_if(
+ video_capabilities.codecs.begin(), video_capabilities.codecs.end(),
+ [](const auto& c) { return c.name == cricket::kRtxCodecName; });
+ EXPECT_TRUE(it == video_capabilities.codecs.end());
+}
+
+TEST_F(PeerConnectionFactoryTest, CheckRtpReceiverDataCapabilities) {
+ webrtc::RtpCapabilities data_capabilities =
+ factory_->GetRtpReceiverCapabilities(cricket::MEDIA_TYPE_DATA);
+ EXPECT_TRUE(data_capabilities.codecs.empty());
+ EXPECT_TRUE(data_capabilities.header_extensions.empty());
+}
+
+// This test verifies creation of PeerConnection with valid STUN and TURN
+// configuration. Also verifies the URL's parsed correctly as expected.
+TEST_F(PeerConnectionFactoryTest, CreatePCUsingIceServers) {
+ PeerConnectionInterface::RTCConfiguration config;
+ config.sdp_semantics = webrtc::SdpSemantics::kUnifiedPlan;
+ webrtc::PeerConnectionInterface::IceServer ice_server;
+ ice_server.uri = kStunIceServer;
+ config.servers.push_back(ice_server);
+ ice_server.uri = kTurnIceServer;
+ ice_server.username = kTurnUsername;
+ ice_server.password = kTurnPassword;
+ config.servers.push_back(ice_server);
+ ice_server.uri = kTurnIceServerWithTransport;
+ ice_server.username = kTurnUsername;
+ ice_server.password = kTurnPassword;
+ config.servers.push_back(ice_server);
+ webrtc::PeerConnectionDependencies pc_dependencies(&observer_);
+ pc_dependencies.cert_generator =
+ std::make_unique<FakeRTCCertificateGenerator>();
+ pc_dependencies.allocator = std::move(port_allocator_);
+ auto result =
+ factory_->CreatePeerConnectionOrError(config, std::move(pc_dependencies));
+ ASSERT_TRUE(result.ok());
+ cricket::ServerAddresses stun_servers;
+ rtc::SocketAddress stun1("stun.l.google.com", 19302);
+ stun_servers.insert(stun1);
+ VerifyStunServers(stun_servers);
+ std::vector<cricket::RelayServerConfig> turn_servers;
+ cricket::RelayServerConfig turn1("test.com", 1234, kTurnUsername,
+ kTurnPassword, cricket::PROTO_UDP);
+ turn_servers.push_back(turn1);
+ cricket::RelayServerConfig turn2("hello.com", kDefaultStunPort, kTurnUsername,
+ kTurnPassword, cricket::PROTO_TCP);
+ turn_servers.push_back(turn2);
+ VerifyTurnServers(turn_servers);
+}
+
+// This test verifies creation of PeerConnection with valid STUN and TURN
+// configuration. Also verifies the list of URL's parsed correctly as expected.
+TEST_F(PeerConnectionFactoryTest, CreatePCUsingIceServersUrls) {
+ PeerConnectionInterface::RTCConfiguration config;
+ config.sdp_semantics = webrtc::SdpSemantics::kUnifiedPlan;
+ webrtc::PeerConnectionInterface::IceServer ice_server;
+ ice_server.urls.push_back(kStunIceServer);
+ ice_server.urls.push_back(kTurnIceServer);
+ ice_server.urls.push_back(kTurnIceServerWithTransport);
+ ice_server.username = kTurnUsername;
+ ice_server.password = kTurnPassword;
+ config.servers.push_back(ice_server);
+ webrtc::PeerConnectionDependencies pc_dependencies(&observer_);
+ pc_dependencies.cert_generator =
+ std::make_unique<FakeRTCCertificateGenerator>();
+ pc_dependencies.allocator = std::move(port_allocator_);
+ auto result =
+ factory_->CreatePeerConnectionOrError(config, std::move(pc_dependencies));
+ ASSERT_TRUE(result.ok());
+ cricket::ServerAddresses stun_servers;
+ rtc::SocketAddress stun1("stun.l.google.com", 19302);
+ stun_servers.insert(stun1);
+ VerifyStunServers(stun_servers);
+ std::vector<cricket::RelayServerConfig> turn_servers;
+ cricket::RelayServerConfig turn1("test.com", 1234, kTurnUsername,
+ kTurnPassword, cricket::PROTO_UDP);
+ turn_servers.push_back(turn1);
+ cricket::RelayServerConfig turn2("hello.com", kDefaultStunPort, kTurnUsername,
+ kTurnPassword, cricket::PROTO_TCP);
+ turn_servers.push_back(turn2);
+ VerifyTurnServers(turn_servers);
+}
+
+TEST_F(PeerConnectionFactoryTest, CreatePCUsingNoUsernameInUri) {
+ PeerConnectionInterface::RTCConfiguration config;
+ config.sdp_semantics = webrtc::SdpSemantics::kUnifiedPlan;
+ webrtc::PeerConnectionInterface::IceServer ice_server;
+ ice_server.uri = kStunIceServer;
+ config.servers.push_back(ice_server);
+ ice_server.uri = kTurnIceServerWithNoUsernameInUri;
+ ice_server.username = kTurnUsername;
+ ice_server.password = kTurnPassword;
+ config.servers.push_back(ice_server);
+ webrtc::PeerConnectionDependencies pc_dependencies(&observer_);
+ pc_dependencies.cert_generator =
+ std::make_unique<FakeRTCCertificateGenerator>();
+ pc_dependencies.allocator = std::move(port_allocator_);
+ auto result =
+ factory_->CreatePeerConnectionOrError(config, std::move(pc_dependencies));
+ ASSERT_TRUE(result.ok());
+ std::vector<cricket::RelayServerConfig> turn_servers;
+ cricket::RelayServerConfig turn("test.com", 1234, kTurnUsername,
+ kTurnPassword, cricket::PROTO_UDP);
+ turn_servers.push_back(turn);
+ VerifyTurnServers(turn_servers);
+}
+
+// This test verifies the PeerConnection created properly with TURN url which
+// has transport parameter in it.
+TEST_F(PeerConnectionFactoryTest, CreatePCUsingTurnUrlWithTransportParam) {
+ PeerConnectionInterface::RTCConfiguration config;
+ config.sdp_semantics = webrtc::SdpSemantics::kUnifiedPlan;
+ webrtc::PeerConnectionInterface::IceServer ice_server;
+ ice_server.uri = kTurnIceServerWithTransport;
+ ice_server.username = kTurnUsername;
+ ice_server.password = kTurnPassword;
+ config.servers.push_back(ice_server);
+ webrtc::PeerConnectionDependencies pc_dependencies(&observer_);
+ pc_dependencies.cert_generator =
+ std::make_unique<FakeRTCCertificateGenerator>();
+ pc_dependencies.allocator = std::move(port_allocator_);
+ auto result =
+ factory_->CreatePeerConnectionOrError(config, std::move(pc_dependencies));
+ ASSERT_TRUE(result.ok());
+ std::vector<cricket::RelayServerConfig> turn_servers;
+ cricket::RelayServerConfig turn("hello.com", kDefaultStunPort, kTurnUsername,
+ kTurnPassword, cricket::PROTO_TCP);
+ turn_servers.push_back(turn);
+ VerifyTurnServers(turn_servers);
+}
+
+TEST_F(PeerConnectionFactoryTest, CreatePCUsingSecureTurnUrl) {
+ PeerConnectionInterface::RTCConfiguration config;
+ config.sdp_semantics = webrtc::SdpSemantics::kUnifiedPlan;
+ webrtc::PeerConnectionInterface::IceServer ice_server;
+ ice_server.uri = kSecureTurnIceServer;
+ ice_server.username = kTurnUsername;
+ ice_server.password = kTurnPassword;
+ config.servers.push_back(ice_server);
+ ice_server.uri = kSecureTurnIceServerWithoutTransportParam;
+ ice_server.username = kTurnUsername;
+ ice_server.password = kTurnPassword;
+ config.servers.push_back(ice_server);
+ ice_server.uri = kSecureTurnIceServerWithoutTransportAndPortParam;
+ ice_server.username = kTurnUsername;
+ ice_server.password = kTurnPassword;
+ config.servers.push_back(ice_server);
+ webrtc::PeerConnectionDependencies pc_dependencies(&observer_);
+ pc_dependencies.cert_generator =
+ std::make_unique<FakeRTCCertificateGenerator>();
+ pc_dependencies.allocator = std::move(port_allocator_);
+ auto result =
+ factory_->CreatePeerConnectionOrError(config, std::move(pc_dependencies));
+ ASSERT_TRUE(result.ok());
+ std::vector<cricket::RelayServerConfig> turn_servers;
+ cricket::RelayServerConfig turn1("hello.com", kDefaultStunTlsPort,
+ kTurnUsername, kTurnPassword,
+ cricket::PROTO_TLS);
+ turn_servers.push_back(turn1);
+ // TURNS with transport param should be default to tcp.
+ cricket::RelayServerConfig turn2("hello.com", 443, kTurnUsername,
+ kTurnPassword, cricket::PROTO_TLS);
+ turn_servers.push_back(turn2);
+ cricket::RelayServerConfig turn3("hello.com", kDefaultStunTlsPort,
+ kTurnUsername, kTurnPassword,
+ cricket::PROTO_TLS);
+ turn_servers.push_back(turn3);
+ VerifyTurnServers(turn_servers);
+}
+
+TEST_F(PeerConnectionFactoryTest, CreatePCUsingIPLiteralAddress) {
+ PeerConnectionInterface::RTCConfiguration config;
+ config.sdp_semantics = webrtc::SdpSemantics::kUnifiedPlan;
+ webrtc::PeerConnectionInterface::IceServer ice_server;
+ ice_server.uri = kStunIceServerWithIPv4Address;
+ config.servers.push_back(ice_server);
+ ice_server.uri = kStunIceServerWithIPv4AddressWithoutPort;
+ config.servers.push_back(ice_server);
+ ice_server.uri = kStunIceServerWithIPv6Address;
+ config.servers.push_back(ice_server);
+ ice_server.uri = kStunIceServerWithIPv6AddressWithoutPort;
+ config.servers.push_back(ice_server);
+ ice_server.uri = kTurnIceServerWithIPv6Address;
+ ice_server.username = kTurnUsername;
+ ice_server.password = kTurnPassword;
+ config.servers.push_back(ice_server);
+ webrtc::PeerConnectionDependencies pc_dependencies(&observer_);
+ pc_dependencies.cert_generator =
+ std::make_unique<FakeRTCCertificateGenerator>();
+ pc_dependencies.allocator = std::move(port_allocator_);
+ auto result =
+ factory_->CreatePeerConnectionOrError(config, std::move(pc_dependencies));
+ ASSERT_TRUE(result.ok());
+ cricket::ServerAddresses stun_servers;
+ rtc::SocketAddress stun1("1.2.3.4", 1234);
+ stun_servers.insert(stun1);
+ rtc::SocketAddress stun2("1.2.3.4", 3478);
+ stun_servers.insert(stun2); // Default port
+ rtc::SocketAddress stun3("2401:fa00:4::", 1234);
+ stun_servers.insert(stun3);
+ rtc::SocketAddress stun4("2401:fa00:4::", 3478);
+ stun_servers.insert(stun4); // Default port
+ VerifyStunServers(stun_servers);
+
+ std::vector<cricket::RelayServerConfig> turn_servers;
+ cricket::RelayServerConfig turn1("2401:fa00:4::", 1234, kTurnUsername,
+ kTurnPassword, cricket::PROTO_UDP);
+ turn_servers.push_back(turn1);
+ VerifyTurnServers(turn_servers);
+}
+
+// This test verifies the captured stream is rendered locally using a
+// local video track.
+TEST_F(PeerConnectionFactoryTest, LocalRendering) {
+ rtc::scoped_refptr<webrtc::FakeVideoTrackSource> source =
+ webrtc::FakeVideoTrackSource::Create(/*is_screencast=*/false);
+
+ cricket::FakeFrameSource frame_source(1280, 720,
+ rtc::kNumMicrosecsPerSec / 30);
+
+ ASSERT_TRUE(source.get() != NULL);
+ rtc::scoped_refptr<VideoTrackInterface> track(
+ factory_->CreateVideoTrack(source, "testlabel"));
+ ASSERT_TRUE(track.get() != NULL);
+ FakeVideoTrackRenderer local_renderer(track.get());
+
+ EXPECT_EQ(0, local_renderer.num_rendered_frames());
+ source->InjectFrame(frame_source.GetFrame());
+ EXPECT_EQ(1, local_renderer.num_rendered_frames());
+ EXPECT_FALSE(local_renderer.black_frame());
+
+ track->set_enabled(false);
+ source->InjectFrame(frame_source.GetFrame());
+ EXPECT_EQ(2, local_renderer.num_rendered_frames());
+ EXPECT_TRUE(local_renderer.black_frame());
+
+ track->set_enabled(true);
+ source->InjectFrame(frame_source.GetFrame());
+ EXPECT_EQ(3, local_renderer.num_rendered_frames());
+ EXPECT_FALSE(local_renderer.black_frame());
+}
+
+TEST(PeerConnectionFactoryDependenciesTest, UsesNetworkManager) {
+ constexpr webrtc::TimeDelta kWaitTimeout = webrtc::TimeDelta::Seconds(10);
+ auto mock_network_manager = std::make_unique<NiceMock<MockNetworkManager>>();
+
+ rtc::Event called;
+ EXPECT_CALL(*mock_network_manager, StartUpdating())
+ .Times(AtLeast(1))
+ .WillRepeatedly(InvokeWithoutArgs([&] { called.Set(); }));
+
+ webrtc::PeerConnectionFactoryDependencies pcf_dependencies;
+ pcf_dependencies.network_manager = std::move(mock_network_manager);
+
+ rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface> pcf =
+ CreateModularPeerConnectionFactory(std::move(pcf_dependencies));
+
+ PeerConnectionInterface::RTCConfiguration config;
+ config.ice_candidate_pool_size = 2;
+ NullPeerConnectionObserver observer;
+ auto pc = pcf->CreatePeerConnectionOrError(
+ config, webrtc::PeerConnectionDependencies(&observer));
+ ASSERT_TRUE(pc.ok());
+
+ called.Wait(kWaitTimeout);
+}
+
+TEST(PeerConnectionFactoryDependenciesTest, UsesPacketSocketFactory) {
+ constexpr webrtc::TimeDelta kWaitTimeout = webrtc::TimeDelta::Seconds(10);
+ auto mock_socket_factory =
+ std::make_unique<NiceMock<rtc::MockPacketSocketFactory>>();
+
+ rtc::Event called;
+ EXPECT_CALL(*mock_socket_factory, CreateUdpSocket(_, _, _))
+ .WillOnce(InvokeWithoutArgs([&] {
+ called.Set();
+ return nullptr;
+ }))
+ .WillRepeatedly(Return(nullptr));
+
+ webrtc::PeerConnectionFactoryDependencies pcf_dependencies;
+ pcf_dependencies.packet_socket_factory = std::move(mock_socket_factory);
+
+ rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface> pcf =
+ CreateModularPeerConnectionFactory(std::move(pcf_dependencies));
+
+ // By default, localhost addresses are ignored, which makes tests fail if test
+ // machine is offline.
+ PeerConnectionFactoryInterface::Options options;
+ options.network_ignore_mask = 0;
+ pcf->SetOptions(options);
+
+ PeerConnectionInterface::RTCConfiguration config;
+ config.ice_candidate_pool_size = 2;
+ NullPeerConnectionObserver observer;
+ auto pc = pcf->CreatePeerConnectionOrError(
+ config, webrtc::PeerConnectionDependencies(&observer));
+ ASSERT_TRUE(pc.ok());
+
+ called.Wait(kWaitTimeout);
+}
+
+} // namespace
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/peer_connection_field_trial_tests.cc b/third_party/libwebrtc/pc/peer_connection_field_trial_tests.cc
new file mode 100644
index 0000000000..7799c9d6e3
--- /dev/null
+++ b/third_party/libwebrtc/pc/peer_connection_field_trial_tests.cc
@@ -0,0 +1,277 @@
+/*
+ * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This file contains tests that verify that field trials do what they're
+// supposed to do.
+
+#include <set>
+
+#include "api/audio_codecs/builtin_audio_decoder_factory.h"
+#include "api/audio_codecs/builtin_audio_encoder_factory.h"
+#include "api/create_peerconnection_factory.h"
+#include "api/peer_connection_interface.h"
+#include "api/stats/rtcstats_objects.h"
+#include "api/task_queue/default_task_queue_factory.h"
+#include "api/video_codecs/builtin_video_decoder_factory.h"
+#include "api/video_codecs/builtin_video_encoder_factory.h"
+#include "media/engine/webrtc_media_engine.h"
+#include "media/engine/webrtc_media_engine_defaults.h"
+#include "pc/peer_connection_wrapper.h"
+#include "pc/session_description.h"
+#include "pc/test/fake_audio_capture_module.h"
+#include "pc/test/frame_generator_capturer_video_track_source.h"
+#include "pc/test/peer_connection_test_wrapper.h"
+#include "rtc_base/gunit.h"
+#include "rtc_base/internal/default_socket_server.h"
+#include "rtc_base/physical_socket_server.h"
+#include "rtc_base/thread.h"
+#include "test/gtest.h"
+#include "test/scoped_key_value_config.h"
+
+#ifdef WEBRTC_ANDROID
+#include "pc/test/android_test_initializer.h"
+#endif
+
+namespace webrtc {
+
+namespace {
+static const int kDefaultTimeoutMs = 5000;
+
+bool AddIceCandidates(PeerConnectionWrapper* peer,
+ std::vector<const IceCandidateInterface*> candidates) {
+ for (const auto candidate : candidates) {
+ if (!peer->pc()->AddIceCandidate(candidate)) {
+ return false;
+ }
+ }
+ return true;
+}
+} // namespace
+
+using RTCConfiguration = PeerConnectionInterface::RTCConfiguration;
+
+class PeerConnectionFieldTrialTest : public ::testing::Test {
+ protected:
+ typedef std::unique_ptr<PeerConnectionWrapper> WrapperPtr;
+
+ PeerConnectionFieldTrialTest()
+ : clock_(Clock::GetRealTimeClock()),
+ socket_server_(rtc::CreateDefaultSocketServer()),
+ main_thread_(socket_server_.get()) {
+#ifdef WEBRTC_ANDROID
+ InitializeAndroidObjects();
+#endif
+ webrtc::PeerConnectionInterface::IceServer ice_server;
+ ice_server.uri = "stun:stun.l.google.com:19302";
+ config_.servers.push_back(ice_server);
+ config_.sdp_semantics = SdpSemantics::kUnifiedPlan;
+ }
+
+ void TearDown() override { pc_factory_ = nullptr; }
+
+ void CreatePCFactory(std::unique_ptr<FieldTrialsView> field_trials) {
+ PeerConnectionFactoryDependencies pcf_deps;
+ pcf_deps.signaling_thread = rtc::Thread::Current();
+ pcf_deps.trials = std::move(field_trials);
+ pcf_deps.task_queue_factory = CreateDefaultTaskQueueFactory();
+ pcf_deps.call_factory = webrtc::CreateCallFactory();
+ cricket::MediaEngineDependencies media_deps;
+ media_deps.task_queue_factory = pcf_deps.task_queue_factory.get();
+ media_deps.adm = FakeAudioCaptureModule::Create();
+ media_deps.trials = pcf_deps.trials.get();
+ webrtc::SetMediaEngineDefaults(&media_deps);
+ pcf_deps.media_engine = cricket::CreateMediaEngine(std::move(media_deps));
+ pc_factory_ = CreateModularPeerConnectionFactory(std::move(pcf_deps));
+
+ // Allow ADAPTER_TYPE_LOOPBACK to create PeerConnections with loopback in
+ // this test.
+ RTC_DCHECK(pc_factory_);
+ PeerConnectionFactoryInterface::Options options;
+ options.network_ignore_mask = 0;
+ pc_factory_->SetOptions(options);
+ }
+
+ WrapperPtr CreatePeerConnection() {
+ auto observer = std::make_unique<MockPeerConnectionObserver>();
+ auto result = pc_factory_->CreatePeerConnectionOrError(
+ config_, PeerConnectionDependencies(observer.get()));
+ RTC_CHECK(result.ok());
+
+ observer->SetPeerConnectionInterface(result.value().get());
+ return std::make_unique<PeerConnectionWrapper>(
+ pc_factory_, result.MoveValue(), std::move(observer));
+ }
+
+ Clock* const clock_;
+ std::unique_ptr<rtc::SocketServer> socket_server_;
+ rtc::AutoSocketServerThread main_thread_;
+ rtc::scoped_refptr<PeerConnectionFactoryInterface> pc_factory_ = nullptr;
+ webrtc::PeerConnectionInterface::RTCConfiguration config_;
+};
+
+// Tests for the dependency descriptor field trial. The dependency descriptor
+// field trial is implemented in media/engine/webrtc_video_engine.cc.
+TEST_F(PeerConnectionFieldTrialTest, EnableDependencyDescriptorAdvertised) {
+ std::unique_ptr<test::ScopedKeyValueConfig> field_trials =
+ std::make_unique<test::ScopedKeyValueConfig>(
+ "WebRTC-DependencyDescriptorAdvertised/Enabled/");
+ CreatePCFactory(std::move(field_trials));
+
+ WrapperPtr caller = CreatePeerConnection();
+ caller->AddTransceiver(cricket::MEDIA_TYPE_VIDEO);
+
+ auto offer = caller->CreateOffer();
+ auto contents1 = offer->description()->contents();
+ ASSERT_EQ(1u, contents1.size());
+
+ const cricket::MediaContentDescription* media_description1 =
+ contents1[0].media_description();
+ EXPECT_EQ(cricket::MEDIA_TYPE_VIDEO, media_description1->type());
+ const cricket::RtpHeaderExtensions& rtp_header_extensions1 =
+ media_description1->rtp_header_extensions();
+
+ bool found = absl::c_find_if(rtp_header_extensions1,
+ [](const webrtc::RtpExtension& rtp_extension) {
+ return rtp_extension.uri ==
+ RtpExtension::kDependencyDescriptorUri;
+ }) != rtp_header_extensions1.end();
+ EXPECT_TRUE(found);
+}
+
+// Tests that dependency descriptor RTP header extensions can be exchanged
+// via SDP munging, even if dependency descriptor field trial is disabled.
+TEST_F(PeerConnectionFieldTrialTest, InjectDependencyDescriptor) {
+ std::unique_ptr<test::ScopedKeyValueConfig> field_trials =
+ std::make_unique<test::ScopedKeyValueConfig>(
+ "WebRTC-DependencyDescriptorAdvertised/Disabled/");
+ CreatePCFactory(std::move(field_trials));
+
+ WrapperPtr caller = CreatePeerConnection();
+ WrapperPtr callee = CreatePeerConnection();
+ caller->AddTransceiver(cricket::MEDIA_TYPE_VIDEO);
+
+ auto offer = caller->CreateOffer();
+ cricket::ContentInfos& contents1 = offer->description()->contents();
+ ASSERT_EQ(1u, contents1.size());
+
+ cricket::MediaContentDescription* media_description1 =
+ contents1[0].media_description();
+ EXPECT_EQ(cricket::MEDIA_TYPE_VIDEO, media_description1->type());
+ cricket::RtpHeaderExtensions rtp_header_extensions1 =
+ media_description1->rtp_header_extensions();
+
+ bool found1 = absl::c_find_if(rtp_header_extensions1,
+ [](const webrtc::RtpExtension& rtp_extension) {
+ return rtp_extension.uri ==
+ RtpExtension::kDependencyDescriptorUri;
+ }) != rtp_header_extensions1.end();
+ EXPECT_FALSE(found1);
+
+ std::set<int> existing_ids;
+ for (const webrtc::RtpExtension& rtp_extension : rtp_header_extensions1) {
+ existing_ids.insert(rtp_extension.id);
+ }
+
+ // Find the currently unused RTP header extension ID.
+ int insert_id = 1;
+ std::set<int>::const_iterator iter = existing_ids.begin();
+ while (true) {
+ if (iter == existing_ids.end()) {
+ break;
+ }
+ if (*iter != insert_id) {
+ break;
+ }
+ insert_id++;
+ iter++;
+ }
+
+ rtp_header_extensions1.emplace_back(RtpExtension::kDependencyDescriptorUri,
+ insert_id);
+ media_description1->set_rtp_header_extensions(rtp_header_extensions1);
+
+ caller->SetLocalDescription(offer->Clone());
+
+ ASSERT_TRUE(callee->SetRemoteDescription(std::move(offer)));
+ auto answer = callee->CreateAnswer();
+
+ cricket::ContentInfos& contents2 = answer->description()->contents();
+ ASSERT_EQ(1u, contents2.size());
+
+ cricket::MediaContentDescription* media_description2 =
+ contents2[0].media_description();
+ EXPECT_EQ(cricket::MEDIA_TYPE_VIDEO, media_description2->type());
+ cricket::RtpHeaderExtensions rtp_header_extensions2 =
+ media_description2->rtp_header_extensions();
+
+ bool found2 = absl::c_find_if(rtp_header_extensions2,
+ [](const webrtc::RtpExtension& rtp_extension) {
+ return rtp_extension.uri ==
+ RtpExtension::kDependencyDescriptorUri;
+ }) != rtp_header_extensions2.end();
+ EXPECT_TRUE(found2);
+}
+
+// Test that the ability to emulate degraded networks works without crashing.
+TEST_F(PeerConnectionFieldTrialTest, ApplyFakeNetworkConfig) {
+ std::unique_ptr<test::ScopedKeyValueConfig> field_trials =
+ std::make_unique<test::ScopedKeyValueConfig>(
+ "WebRTC-FakeNetworkSendConfig/link_capacity_kbps:500/"
+ "WebRTC-FakeNetworkReceiveConfig/loss_percent:1/");
+
+ CreatePCFactory(std::move(field_trials));
+
+ WrapperPtr caller = CreatePeerConnection();
+ BitrateSettings bitrate_settings;
+ bitrate_settings.start_bitrate_bps = 1'000'000;
+ bitrate_settings.max_bitrate_bps = 1'000'000;
+ caller->pc()->SetBitrate(bitrate_settings);
+ FrameGeneratorCapturerVideoTrackSource::Config config;
+ auto video_track_source =
+ rtc::make_ref_counted<FrameGeneratorCapturerVideoTrackSource>(
+ config, clock_, /*is_screencast=*/false);
+ video_track_source->Start();
+ caller->AddTrack(pc_factory_->CreateVideoTrack(video_track_source, "v"));
+ WrapperPtr callee = CreatePeerConnection();
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+ ASSERT_TRUE(
+ caller->SetRemoteDescription(callee->CreateAnswerAndSetAsLocal()));
+
+ // Do the SDP negotiation, and also exchange ice candidates.
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+ ASSERT_TRUE_WAIT(
+ caller->signaling_state() == PeerConnectionInterface::kStable,
+ kDefaultTimeoutMs);
+ ASSERT_TRUE_WAIT(caller->IsIceGatheringDone(), kDefaultTimeoutMs);
+ ASSERT_TRUE_WAIT(callee->IsIceGatheringDone(), kDefaultTimeoutMs);
+
+ // Connect an ICE candidate pairs.
+ ASSERT_TRUE(
+ AddIceCandidates(callee.get(), caller->observer()->GetAllCandidates()));
+ ASSERT_TRUE(
+ AddIceCandidates(caller.get(), callee->observer()->GetAllCandidates()));
+
+ // This means that ICE and DTLS are connected.
+ ASSERT_TRUE_WAIT(callee->IsIceConnected(), kDefaultTimeoutMs);
+ ASSERT_TRUE_WAIT(caller->IsIceConnected(), kDefaultTimeoutMs);
+
+ // Send packets for kDefaultTimeoutMs
+ WAIT(false, kDefaultTimeoutMs);
+
+ std::vector<const RTCOutboundRtpStreamStats*> outbound_rtp_stats =
+ caller->GetStats()->GetStatsOfType<RTCOutboundRtpStreamStats>();
+ ASSERT_GE(outbound_rtp_stats.size(), 1u);
+ ASSERT_TRUE(outbound_rtp_stats[0]->target_bitrate.is_defined());
+ // Link capacity is limited to 500k, so BWE is expected to be close to 500k.
+ ASSERT_LE(*outbound_rtp_stats[0]->target_bitrate, 500'000 * 1.1);
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/peer_connection_header_extension_unittest.cc b/third_party/libwebrtc/pc/peer_connection_header_extension_unittest.cc
new file mode 100644
index 0000000000..b1c6c3cfb5
--- /dev/null
+++ b/third_party/libwebrtc/pc/peer_connection_header_extension_unittest.cc
@@ -0,0 +1,589 @@
+/*
+ * Copyright 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <memory>
+#include <string>
+#include <tuple>
+#include <utility>
+#include <vector>
+
+#include "absl/strings/string_view.h"
+#include "absl/types/optional.h"
+#include "api/call/call_factory_interface.h"
+#include "api/jsep.h"
+#include "api/media_types.h"
+#include "api/peer_connection_interface.h"
+#include "api/rtc_error.h"
+#include "api/rtc_event_log/rtc_event_log_factory.h"
+#include "api/rtc_event_log/rtc_event_log_factory_interface.h"
+#include "api/rtp_parameters.h"
+#include "api/rtp_transceiver_direction.h"
+#include "api/rtp_transceiver_interface.h"
+#include "api/scoped_refptr.h"
+#include "api/task_queue/default_task_queue_factory.h"
+#include "api/task_queue/task_queue_factory.h"
+#include "media/base/fake_media_engine.h"
+#include "media/base/media_engine.h"
+#include "p2p/base/fake_port_allocator.h"
+#include "p2p/base/port_allocator.h"
+#include "pc/peer_connection_wrapper.h"
+#include "pc/session_description.h"
+#include "pc/test/mock_peer_connection_observers.h"
+#include "rtc_base/internal/default_socket_server.h"
+#include "rtc_base/rtc_certificate_generator.h"
+#include "rtc_base/strings/string_builder.h"
+#include "rtc_base/thread.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+#include "test/scoped_key_value_config.h"
+
+namespace webrtc {
+
+using ::testing::Combine;
+using ::testing::ElementsAre;
+using ::testing::Field;
+using ::testing::Return;
+using ::testing::Values;
+
+class PeerConnectionHeaderExtensionTest
+ : public ::testing::TestWithParam<
+ std::tuple<cricket::MediaType, SdpSemantics>> {
+ protected:
+ PeerConnectionHeaderExtensionTest()
+ : socket_server_(rtc::CreateDefaultSocketServer()),
+ main_thread_(socket_server_.get()),
+ extensions_(
+ {RtpHeaderExtensionCapability("uri1",
+ 1,
+ RtpTransceiverDirection::kStopped),
+ RtpHeaderExtensionCapability("uri2",
+ 2,
+ RtpTransceiverDirection::kSendOnly),
+ RtpHeaderExtensionCapability("uri3",
+ 3,
+ RtpTransceiverDirection::kRecvOnly),
+ RtpHeaderExtensionCapability(
+ "uri4",
+ 4,
+ RtpTransceiverDirection::kSendRecv)}) {}
+
+ std::unique_ptr<PeerConnectionWrapper> CreatePeerConnection(
+ cricket::MediaType media_type,
+ absl::optional<SdpSemantics> semantics) {
+ auto voice = std::make_unique<cricket::FakeVoiceEngine>();
+ auto video = std::make_unique<cricket::FakeVideoEngine>();
+ if (media_type == cricket::MediaType::MEDIA_TYPE_AUDIO)
+ voice->SetRtpHeaderExtensions(extensions_);
+ else
+ video->SetRtpHeaderExtensions(extensions_);
+ auto media_engine = std::make_unique<cricket::CompositeMediaEngine>(
+ std::move(voice), std::move(video));
+ PeerConnectionFactoryDependencies factory_dependencies;
+ factory_dependencies.network_thread = rtc::Thread::Current();
+ factory_dependencies.worker_thread = rtc::Thread::Current();
+ factory_dependencies.signaling_thread = rtc::Thread::Current();
+ factory_dependencies.task_queue_factory = CreateDefaultTaskQueueFactory();
+ factory_dependencies.media_engine = std::move(media_engine);
+ factory_dependencies.call_factory = CreateCallFactory();
+ factory_dependencies.event_log_factory =
+ std::make_unique<RtcEventLogFactory>(
+ factory_dependencies.task_queue_factory.get());
+
+ auto pc_factory =
+ CreateModularPeerConnectionFactory(std::move(factory_dependencies));
+
+ auto fake_port_allocator = std::make_unique<cricket::FakePortAllocator>(
+ rtc::Thread::Current(),
+ std::make_unique<rtc::BasicPacketSocketFactory>(socket_server_.get()),
+ &field_trials_);
+ auto observer = std::make_unique<MockPeerConnectionObserver>();
+ PeerConnectionInterface::RTCConfiguration config;
+ if (semantics)
+ config.sdp_semantics = *semantics;
+ PeerConnectionDependencies pc_dependencies(observer.get());
+ pc_dependencies.allocator = std::move(fake_port_allocator);
+ auto result = pc_factory->CreatePeerConnectionOrError(
+ config, std::move(pc_dependencies));
+ EXPECT_TRUE(result.ok());
+ observer->SetPeerConnectionInterface(result.value().get());
+ return std::make_unique<PeerConnectionWrapper>(
+ pc_factory, result.MoveValue(), std::move(observer));
+ }
+
+ webrtc::test::ScopedKeyValueConfig field_trials_;
+ std::unique_ptr<rtc::SocketServer> socket_server_;
+ rtc::AutoSocketServerThread main_thread_;
+ std::vector<RtpHeaderExtensionCapability> extensions_;
+};
+
+TEST_P(PeerConnectionHeaderExtensionTest, TransceiverOffersHeaderExtensions) {
+ cricket::MediaType media_type;
+ SdpSemantics semantics;
+ std::tie(media_type, semantics) = GetParam();
+ if (semantics != SdpSemantics::kUnifiedPlan)
+ return;
+ std::unique_ptr<PeerConnectionWrapper> wrapper =
+ CreatePeerConnection(media_type, semantics);
+ auto transceiver = wrapper->AddTransceiver(media_type);
+ EXPECT_EQ(transceiver->GetHeaderExtensionsToNegotiate(), extensions_);
+}
+
+TEST_P(PeerConnectionHeaderExtensionTest,
+ SenderReceiverCapabilitiesReturnNotStoppedExtensions) {
+ cricket::MediaType media_type;
+ SdpSemantics semantics;
+ std::tie(media_type, semantics) = GetParam();
+ std::unique_ptr<PeerConnectionWrapper> wrapper =
+ CreatePeerConnection(media_type, semantics);
+ EXPECT_THAT(wrapper->pc_factory()
+ ->GetRtpSenderCapabilities(media_type)
+ .header_extensions,
+ ElementsAre(Field(&RtpHeaderExtensionCapability::uri, "uri2"),
+ Field(&RtpHeaderExtensionCapability::uri, "uri3"),
+ Field(&RtpHeaderExtensionCapability::uri, "uri4")));
+ EXPECT_EQ(wrapper->pc_factory()
+ ->GetRtpReceiverCapabilities(media_type)
+ .header_extensions,
+ wrapper->pc_factory()
+ ->GetRtpSenderCapabilities(media_type)
+ .header_extensions);
+}
+
+TEST_P(PeerConnectionHeaderExtensionTest, OffersUnstoppedDefaultExtensions) {
+ cricket::MediaType media_type;
+ SdpSemantics semantics;
+ std::tie(media_type, semantics) = GetParam();
+ if (semantics != SdpSemantics::kUnifiedPlan)
+ return;
+ std::unique_ptr<PeerConnectionWrapper> wrapper =
+ CreatePeerConnection(media_type, semantics);
+ auto transceiver = wrapper->AddTransceiver(media_type);
+ auto session_description = wrapper->CreateOffer();
+ EXPECT_THAT(session_description->description()
+ ->contents()[0]
+ .media_description()
+ ->rtp_header_extensions(),
+ ElementsAre(Field(&RtpExtension::uri, "uri2"),
+ Field(&RtpExtension::uri, "uri3"),
+ Field(&RtpExtension::uri, "uri4")));
+}
+
+TEST_P(PeerConnectionHeaderExtensionTest, OffersUnstoppedModifiedExtensions) {
+ cricket::MediaType media_type;
+ SdpSemantics semantics;
+ std::tie(media_type, semantics) = GetParam();
+ if (semantics != SdpSemantics::kUnifiedPlan)
+ return;
+ std::unique_ptr<PeerConnectionWrapper> wrapper =
+ CreatePeerConnection(media_type, semantics);
+ auto transceiver = wrapper->AddTransceiver(media_type);
+ auto modified_extensions = transceiver->GetHeaderExtensionsToNegotiate();
+ modified_extensions[0].direction = RtpTransceiverDirection::kSendRecv;
+ modified_extensions[3].direction = RtpTransceiverDirection::kStopped;
+ EXPECT_TRUE(
+ transceiver->SetHeaderExtensionsToNegotiate(modified_extensions).ok());
+ auto session_description = wrapper->CreateOffer();
+ EXPECT_THAT(session_description->description()
+ ->contents()[0]
+ .media_description()
+ ->rtp_header_extensions(),
+ ElementsAre(Field(&RtpExtension::uri, "uri1"),
+ Field(&RtpExtension::uri, "uri2"),
+ Field(&RtpExtension::uri, "uri3")));
+}
+
+TEST_P(PeerConnectionHeaderExtensionTest, AnswersUnstoppedModifiedExtensions) {
+ cricket::MediaType media_type;
+ SdpSemantics semantics;
+ std::tie(media_type, semantics) = GetParam();
+ if (semantics != SdpSemantics::kUnifiedPlan)
+ return;
+ std::unique_ptr<PeerConnectionWrapper> pc1 =
+ CreatePeerConnection(media_type, semantics);
+ std::unique_ptr<PeerConnectionWrapper> pc2 =
+ CreatePeerConnection(media_type, semantics);
+ auto transceiver1 = pc1->AddTransceiver(media_type);
+
+ auto offer = pc1->CreateOfferAndSetAsLocal(
+ PeerConnectionInterface::RTCOfferAnswerOptions());
+ pc2->SetRemoteDescription(std::move(offer));
+
+ ASSERT_EQ(pc2->pc()->GetTransceivers().size(), 1u);
+ auto transceiver2 = pc2->pc()->GetTransceivers()[0];
+ auto modified_extensions = transceiver2->GetHeaderExtensionsToNegotiate();
+ // Don't offer uri4.
+ modified_extensions[3].direction = RtpTransceiverDirection::kStopped;
+ transceiver2->SetHeaderExtensionsToNegotiate(modified_extensions);
+
+ auto answer = pc2->CreateAnswerAndSetAsLocal(
+ PeerConnectionInterface::RTCOfferAnswerOptions());
+ EXPECT_THAT(answer->description()
+ ->contents()[0]
+ .media_description()
+ ->rtp_header_extensions(),
+ ElementsAre(Field(&RtpExtension::uri, "uri2"),
+ Field(&RtpExtension::uri, "uri3")));
+}
+
+TEST_P(PeerConnectionHeaderExtensionTest, NegotiatedExtensionsAreAccessible) {
+ cricket::MediaType media_type;
+ SdpSemantics semantics;
+ std::tie(media_type, semantics) = GetParam();
+ if (semantics != SdpSemantics::kUnifiedPlan)
+ return;
+ std::unique_ptr<PeerConnectionWrapper> pc1 =
+ CreatePeerConnection(media_type, semantics);
+ auto transceiver1 = pc1->AddTransceiver(media_type);
+ auto modified_extensions = transceiver1->GetHeaderExtensionsToNegotiate();
+ modified_extensions[3].direction = RtpTransceiverDirection::kStopped;
+ transceiver1->SetHeaderExtensionsToNegotiate(modified_extensions);
+ auto offer = pc1->CreateOfferAndSetAsLocal(
+ PeerConnectionInterface::RTCOfferAnswerOptions());
+
+ std::unique_ptr<PeerConnectionWrapper> pc2 =
+ CreatePeerConnection(media_type, semantics);
+ auto transceiver2 = pc2->AddTransceiver(media_type);
+ pc2->SetRemoteDescription(std::move(offer));
+ auto answer = pc2->CreateAnswerAndSetAsLocal(
+ PeerConnectionInterface::RTCOfferAnswerOptions());
+ pc1->SetRemoteDescription(std::move(answer));
+
+ // PC1 has exts 2-4 unstopped and PC2 has exts 1-3 unstopped -> ext 2, 3
+ // survives.
+ EXPECT_THAT(transceiver1->GetNegotiatedHeaderExtensions(),
+ ElementsAre(Field(&RtpHeaderExtensionCapability::direction,
+ RtpTransceiverDirection::kStopped),
+ Field(&RtpHeaderExtensionCapability::direction,
+ RtpTransceiverDirection::kSendRecv),
+ Field(&RtpHeaderExtensionCapability::direction,
+ RtpTransceiverDirection::kSendRecv),
+ Field(&RtpHeaderExtensionCapability::direction,
+ RtpTransceiverDirection::kStopped)));
+}
+
+TEST_P(PeerConnectionHeaderExtensionTest, OfferedExtensionsArePerTransceiver) {
+ cricket::MediaType media_type;
+ SdpSemantics semantics;
+ std::tie(media_type, semantics) = GetParam();
+ if (semantics != SdpSemantics::kUnifiedPlan)
+ return;
+ std::unique_ptr<PeerConnectionWrapper> pc1 =
+ CreatePeerConnection(media_type, semantics);
+ auto transceiver1 = pc1->AddTransceiver(media_type);
+ auto modified_extensions = transceiver1->GetHeaderExtensionsToNegotiate();
+ modified_extensions[3].direction = RtpTransceiverDirection::kStopped;
+ transceiver1->SetHeaderExtensionsToNegotiate(modified_extensions);
+ auto transceiver2 = pc1->AddTransceiver(media_type);
+
+ auto session_description = pc1->CreateOffer();
+ EXPECT_THAT(session_description->description()
+ ->contents()[0]
+ .media_description()
+ ->rtp_header_extensions(),
+ ElementsAre(Field(&RtpExtension::uri, "uri2"),
+ Field(&RtpExtension::uri, "uri3")));
+ EXPECT_THAT(session_description->description()
+ ->contents()[1]
+ .media_description()
+ ->rtp_header_extensions(),
+ ElementsAre(Field(&RtpExtension::uri, "uri2"),
+ Field(&RtpExtension::uri, "uri3"),
+ Field(&RtpExtension::uri, "uri4")));
+}
+
+TEST_P(PeerConnectionHeaderExtensionTest, RemovalAfterRenegotiation) {
+ cricket::MediaType media_type;
+ SdpSemantics semantics;
+ std::tie(media_type, semantics) = GetParam();
+ if (semantics != SdpSemantics::kUnifiedPlan)
+ return;
+ std::unique_ptr<PeerConnectionWrapper> pc1 =
+ CreatePeerConnection(media_type, semantics);
+ std::unique_ptr<PeerConnectionWrapper> pc2 =
+ CreatePeerConnection(media_type, semantics);
+ auto transceiver1 = pc1->AddTransceiver(media_type);
+
+ auto offer = pc1->CreateOfferAndSetAsLocal(
+ PeerConnectionInterface::RTCOfferAnswerOptions());
+ pc2->SetRemoteDescription(std::move(offer));
+ auto answer = pc2->CreateAnswerAndSetAsLocal(
+ PeerConnectionInterface::RTCOfferAnswerOptions());
+ pc1->SetRemoteDescription(std::move(answer));
+
+ auto modified_extensions = transceiver1->GetHeaderExtensionsToNegotiate();
+ modified_extensions[3].direction = RtpTransceiverDirection::kStopped;
+ transceiver1->SetHeaderExtensionsToNegotiate(modified_extensions);
+ auto session_description = pc1->CreateOffer();
+ EXPECT_THAT(session_description->description()
+ ->contents()[0]
+ .media_description()
+ ->rtp_header_extensions(),
+ ElementsAre(Field(&RtpExtension::uri, "uri2"),
+ Field(&RtpExtension::uri, "uri3")));
+}
+
+TEST_P(PeerConnectionHeaderExtensionTest,
+ StoppedByDefaultExtensionCanBeActivatedByRemoteSdp) {
+ cricket::MediaType media_type;
+ SdpSemantics semantics;
+ std::tie(media_type, semantics) = GetParam();
+ if (semantics != SdpSemantics::kUnifiedPlan)
+ return;
+ std::unique_ptr<PeerConnectionWrapper> pc1 =
+ CreatePeerConnection(media_type, semantics);
+ std::unique_ptr<PeerConnectionWrapper> pc2 =
+ CreatePeerConnection(media_type, semantics);
+ auto transceiver1 = pc1->AddTransceiver(media_type);
+
+ auto offer = pc1->CreateOfferAndSetAsLocal(
+ PeerConnectionInterface::RTCOfferAnswerOptions());
+ pc2->SetRemoteDescription(std::move(offer));
+ auto answer = pc2->CreateAnswerAndSetAsLocal(
+ PeerConnectionInterface::RTCOfferAnswerOptions());
+ std::string sdp;
+ ASSERT_TRUE(answer->ToString(&sdp));
+ // We support uri1 but it is stopped by default. Let the remote reactivate it.
+ sdp += "a=extmap:15 uri1\r\n";
+ auto modified_answer = CreateSessionDescription(SdpType::kAnswer, sdp);
+ pc1->SetRemoteDescription(std::move(modified_answer));
+ EXPECT_THAT(transceiver1->GetNegotiatedHeaderExtensions(),
+ ElementsAre(Field(&RtpHeaderExtensionCapability::direction,
+ RtpTransceiverDirection::kSendRecv),
+ Field(&RtpHeaderExtensionCapability::direction,
+ RtpTransceiverDirection::kSendRecv),
+ Field(&RtpHeaderExtensionCapability::direction,
+ RtpTransceiverDirection::kSendRecv),
+ Field(&RtpHeaderExtensionCapability::direction,
+ RtpTransceiverDirection::kSendRecv)));
+}
+
+TEST_P(PeerConnectionHeaderExtensionTest,
+ UnknownExtensionInRemoteOfferDoesNotShowUp) {
+ cricket::MediaType media_type;
+ SdpSemantics semantics;
+ std::tie(media_type, semantics) = GetParam();
+ if (semantics != SdpSemantics::kUnifiedPlan)
+ return;
+ std::unique_ptr<PeerConnectionWrapper> pc =
+ CreatePeerConnection(media_type, semantics);
+ std::string sdp =
+ "v=0\r\n"
+ "o=- 0 3 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "a=fingerprint:sha-256 "
+ "A7:24:72:CA:6E:02:55:39:BA:66:DF:6E:CC:4C:D8:B0:1A:BF:1A:56:65:7D:F4:03:"
+ "AD:7E:77:43:2A:29:EC:93\r\n"
+ "a=ice-ufrag:6HHHdzzeIhkE0CKj\r\n"
+ "a=ice-pwd:XYDGVpfvklQIEnZ6YnyLsAew\r\n";
+ if (media_type == cricket::MEDIA_TYPE_AUDIO) {
+ sdp +=
+ "m=audio 9 RTP/AVPF 111\r\n"
+ "a=rtpmap:111 fake_audio_codec/8000\r\n";
+ } else {
+ sdp +=
+ "m=video 9 RTP/AVPF 111\r\n"
+ "a=rtpmap:111 fake_video_codec/90000\r\n";
+ }
+ sdp +=
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=rtcp-mux\r\n"
+ "a=sendonly\r\n"
+ "a=mid:audio\r\n"
+ "a=setup:actpass\r\n"
+ "a=extmap:1 urn:bogus\r\n";
+ auto offer = CreateSessionDescription(SdpType::kOffer, sdp);
+ pc->SetRemoteDescription(std::move(offer));
+ pc->CreateAnswerAndSetAsLocal(
+ PeerConnectionInterface::RTCOfferAnswerOptions());
+ ASSERT_GT(pc->pc()->GetTransceivers().size(), 0u);
+ auto transceiver = pc->pc()->GetTransceivers()[0];
+ auto negotiated = transceiver->GetNegotiatedHeaderExtensions();
+ EXPECT_EQ(negotiated.size(),
+ transceiver->GetHeaderExtensionsToNegotiate().size());
+ // All extensions are stopped, the "bogus" one does not show up.
+ for (const auto& extension : negotiated) {
+ EXPECT_EQ(extension.direction, RtpTransceiverDirection::kStopped);
+ EXPECT_NE(extension.uri, "urn:bogus");
+ }
+}
+
+// These tests are regression tests for behavior that the API
+// enables in a proper way. It conflicts with the behavior
+// of the API to only offer non-stopped extensions.
+TEST_P(PeerConnectionHeaderExtensionTest,
+ SdpMungingAnswerWithoutApiUsageEnablesExtensions) {
+ cricket::MediaType media_type;
+ SdpSemantics semantics;
+ std::tie(media_type, semantics) = GetParam();
+ if (semantics != SdpSemantics::kUnifiedPlan)
+ return;
+ std::unique_ptr<PeerConnectionWrapper> pc =
+ CreatePeerConnection(media_type, semantics);
+ std::string sdp =
+ "v=0\r\n"
+ "o=- 0 3 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "a=fingerprint:sha-256 "
+ "A7:24:72:CA:6E:02:55:39:BA:66:DF:6E:CC:4C:D8:B0:1A:BF:1A:56:65:7D:F4:03:"
+ "AD:7E:77:43:2A:29:EC:93\r\n"
+ "a=ice-ufrag:6HHHdzzeIhkE0CKj\r\n"
+ "a=ice-pwd:XYDGVpfvklQIEnZ6YnyLsAew\r\n";
+ if (media_type == cricket::MEDIA_TYPE_AUDIO) {
+ sdp +=
+ "m=audio 9 RTP/AVPF 111\r\n"
+ "a=rtpmap:111 fake_audio_codec/8000\r\n";
+ } else {
+ sdp +=
+ "m=video 9 RTP/AVPF 111\r\n"
+ "a=rtpmap:111 fake_video_codec/90000\r\n";
+ }
+ sdp +=
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=rtcp-mux\r\n"
+ "a=sendrecv\r\n"
+ "a=mid:audio\r\n"
+ "a=setup:actpass\r\n"
+ "a=extmap:1 uri1\r\n";
+ auto offer = CreateSessionDescription(SdpType::kOffer, sdp);
+ pc->SetRemoteDescription(std::move(offer));
+ auto answer =
+ pc->CreateAnswer(PeerConnectionInterface::RTCOfferAnswerOptions());
+ std::string modified_sdp;
+ ASSERT_TRUE(answer->ToString(&modified_sdp));
+ modified_sdp += "a=extmap:1 uri1\r\n";
+ auto modified_answer =
+ CreateSessionDescription(SdpType::kAnswer, modified_sdp);
+ ASSERT_TRUE(pc->SetLocalDescription(std::move(modified_answer)));
+
+ auto session_description = pc->CreateOffer();
+ EXPECT_THAT(session_description->description()
+ ->contents()[0]
+ .media_description()
+ ->rtp_header_extensions(),
+ ElementsAre(Field(&RtpExtension::uri, "uri1"),
+ Field(&RtpExtension::uri, "uri2"),
+ Field(&RtpExtension::uri, "uri3"),
+ Field(&RtpExtension::uri, "uri4")));
+}
+
+TEST_P(PeerConnectionHeaderExtensionTest,
+ SdpMungingOfferWithoutApiUsageEnablesExtensions) {
+ cricket::MediaType media_type;
+ SdpSemantics semantics;
+ std::tie(media_type, semantics) = GetParam();
+ if (semantics != SdpSemantics::kUnifiedPlan)
+ return;
+ std::unique_ptr<PeerConnectionWrapper> pc =
+ CreatePeerConnection(media_type, semantics);
+ pc->AddTransceiver(media_type);
+
+ auto offer =
+ pc->CreateOffer(PeerConnectionInterface::RTCOfferAnswerOptions());
+ std::string modified_sdp;
+ ASSERT_TRUE(offer->ToString(&modified_sdp));
+ modified_sdp += "a=extmap:1 uri1\r\n";
+ auto modified_offer = CreateSessionDescription(SdpType::kOffer, modified_sdp);
+ ASSERT_TRUE(pc->SetLocalDescription(std::move(modified_offer)));
+
+ auto offer2 =
+ pc->CreateOffer(PeerConnectionInterface::RTCOfferAnswerOptions());
+ EXPECT_THAT(offer2->description()
+ ->contents()[0]
+ .media_description()
+ ->rtp_header_extensions(),
+ ElementsAre(Field(&RtpExtension::uri, "uri2"),
+ Field(&RtpExtension::uri, "uri3"),
+ Field(&RtpExtension::uri, "uri4"),
+ Field(&RtpExtension::uri, "uri1")));
+}
+
+TEST_P(PeerConnectionHeaderExtensionTest, EnablingExtensionsAfterRemoteOffer) {
+ cricket::MediaType media_type;
+ SdpSemantics semantics;
+ std::tie(media_type, semantics) = GetParam();
+ if (semantics != SdpSemantics::kUnifiedPlan)
+ return;
+ std::unique_ptr<PeerConnectionWrapper> pc =
+ CreatePeerConnection(media_type, semantics);
+ std::string sdp =
+ "v=0\r\n"
+ "o=- 0 3 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "a=fingerprint:sha-256 "
+ "A7:24:72:CA:6E:02:55:39:BA:66:DF:6E:CC:4C:D8:B0:1A:BF:1A:56:65:7D:F4:03:"
+ "AD:7E:77:43:2A:29:EC:93\r\n"
+ "a=ice-ufrag:6HHHdzzeIhkE0CKj\r\n"
+ "a=ice-pwd:XYDGVpfvklQIEnZ6YnyLsAew\r\n";
+ if (media_type == cricket::MEDIA_TYPE_AUDIO) {
+ sdp +=
+ "m=audio 9 RTP/AVPF 111\r\n"
+ "a=rtpmap:111 fake_audio_codec/8000\r\n";
+ } else {
+ sdp +=
+ "m=video 9 RTP/AVPF 111\r\n"
+ "a=rtpmap:111 fake_video_codec/90000\r\n";
+ }
+ sdp +=
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=rtcp-mux\r\n"
+ "a=sendrecv\r\n"
+ "a=mid:audio\r\n"
+ "a=setup:actpass\r\n"
+ "a=extmap:5 uri1\r\n";
+ auto offer = CreateSessionDescription(SdpType::kOffer, sdp);
+ pc->SetRemoteDescription(std::move(offer));
+
+ ASSERT_GT(pc->pc()->GetTransceivers().size(), 0u);
+ auto transceiver = pc->pc()->GetTransceivers()[0];
+ auto modified_extensions = transceiver->GetHeaderExtensionsToNegotiate();
+ modified_extensions[0].direction = RtpTransceiverDirection::kSendRecv;
+ transceiver->SetHeaderExtensionsToNegotiate(modified_extensions);
+
+ pc->CreateAnswerAndSetAsLocal(
+ PeerConnectionInterface::RTCOfferAnswerOptions());
+
+ auto session_description = pc->CreateOffer();
+ auto extensions = session_description->description()
+ ->contents()[0]
+ .media_description()
+ ->rtp_header_extensions();
+ EXPECT_THAT(extensions, ElementsAre(Field(&RtpExtension::uri, "uri1"),
+ Field(&RtpExtension::uri, "uri2"),
+ Field(&RtpExtension::uri, "uri3"),
+ Field(&RtpExtension::uri, "uri4")));
+ // Check uri1's id still matches the remote id.
+ EXPECT_EQ(extensions[0].id, 5);
+}
+
+INSTANTIATE_TEST_SUITE_P(
+ ,
+ PeerConnectionHeaderExtensionTest,
+ Combine(Values(SdpSemantics::kPlanB_DEPRECATED, SdpSemantics::kUnifiedPlan),
+ Values(cricket::MediaType::MEDIA_TYPE_AUDIO,
+ cricket::MediaType::MEDIA_TYPE_VIDEO)),
+ [](const testing::TestParamInfo<
+ PeerConnectionHeaderExtensionTest::ParamType>& info) {
+ cricket::MediaType media_type;
+ SdpSemantics semantics;
+ std::tie(media_type, semantics) = info.param;
+ return (rtc::StringBuilder("With")
+ << (semantics == SdpSemantics::kPlanB_DEPRECATED ? "PlanB"
+ : "UnifiedPlan")
+ << "And"
+ << (media_type == cricket::MediaType::MEDIA_TYPE_AUDIO ? "Voice"
+ : "Video")
+ << "Engine")
+ .str();
+ });
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/peer_connection_histogram_unittest.cc b/third_party/libwebrtc/pc/peer_connection_histogram_unittest.cc
new file mode 100644
index 0000000000..68a4dbc361
--- /dev/null
+++ b/third_party/libwebrtc/pc/peer_connection_histogram_unittest.cc
@@ -0,0 +1,790 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <memory>
+#include <set>
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "api/async_resolver_factory.h"
+#include "api/call/call_factory_interface.h"
+#include "api/jsep.h"
+#include "api/jsep_session_description.h"
+#include "api/peer_connection_interface.h"
+#include "api/rtc_error.h"
+#include "api/scoped_refptr.h"
+#include "api/task_queue/default_task_queue_factory.h"
+#include "api/task_queue/task_queue_factory.h"
+#include "api/test/mock_async_dns_resolver.h"
+#include "media/base/fake_media_engine.h"
+#include "media/base/media_engine.h"
+#include "p2p/base/mock_async_resolver.h"
+#include "p2p/base/port_allocator.h"
+#include "p2p/client/basic_port_allocator.h"
+#include "pc/peer_connection.h"
+#include "pc/peer_connection_factory.h"
+#include "pc/peer_connection_proxy.h"
+#include "pc/peer_connection_wrapper.h"
+#include "pc/sdp_utils.h"
+#include "pc/test/mock_peer_connection_observers.h"
+#include "pc/usage_pattern.h"
+#include "pc/webrtc_sdp.h"
+#include "rtc_base/arraysize.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/fake_mdns_responder.h"
+#include "rtc_base/fake_network.h"
+#include "rtc_base/gunit.h"
+#include "rtc_base/mdns_responder_interface.h"
+#include "rtc_base/socket_address.h"
+#include "rtc_base/thread.h"
+#include "rtc_base/virtual_socket_server.h"
+#include "system_wrappers/include/metrics.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+
+using RTCConfiguration = PeerConnectionInterface::RTCConfiguration;
+using RTCOfferAnswerOptions = PeerConnectionInterface::RTCOfferAnswerOptions;
+using ::testing::NiceMock;
+using ::testing::Values;
+
+static const char kUsagePatternMetric[] = "WebRTC.PeerConnection.UsagePattern";
+static constexpr int kDefaultTimeout = 10000;
+static const rtc::SocketAddress kLocalAddrs[2] = {
+ rtc::SocketAddress("1.1.1.1", 0), rtc::SocketAddress("2.2.2.2", 0)};
+static const rtc::SocketAddress kPrivateLocalAddress("10.1.1.1", 0);
+static const rtc::SocketAddress kPrivateIpv6LocalAddress("fd12:3456:789a:1::1",
+ 0);
+
+int MakeUsageFingerprint(std::set<UsageEvent> events) {
+ int signature = 0;
+ for (const auto it : events) {
+ signature |= static_cast<int>(it);
+ }
+ return signature;
+}
+
+class PeerConnectionFactoryForUsageHistogramTest
+ : public PeerConnectionFactory {
+ public:
+ PeerConnectionFactoryForUsageHistogramTest()
+ : PeerConnectionFactory([] {
+ PeerConnectionFactoryDependencies dependencies;
+ dependencies.network_thread = rtc::Thread::Current();
+ dependencies.worker_thread = rtc::Thread::Current();
+ dependencies.signaling_thread = rtc::Thread::Current();
+ dependencies.task_queue_factory = CreateDefaultTaskQueueFactory();
+ dependencies.media_engine =
+ std::make_unique<cricket::FakeMediaEngine>();
+ dependencies.call_factory = CreateCallFactory();
+ return dependencies;
+ }()) {}
+};
+
+class PeerConnectionWrapperForUsageHistogramTest;
+
+typedef PeerConnectionWrapperForUsageHistogramTest* RawWrapperPtr;
+
+class ObserverForUsageHistogramTest : public MockPeerConnectionObserver {
+ public:
+ void OnIceCandidate(const webrtc::IceCandidateInterface* candidate) override;
+
+ void OnInterestingUsage(int usage_pattern) override {
+ interesting_usage_detected_ = usage_pattern;
+ }
+
+ void PrepareToExchangeCandidates(RawWrapperPtr other) {
+ candidate_target_ = other;
+ }
+
+ bool HaveDataChannel() { return last_datachannel_ != nullptr; }
+
+ absl::optional<int> interesting_usage_detected() {
+ return interesting_usage_detected_;
+ }
+
+ void ClearInterestingUsageDetector() {
+ interesting_usage_detected_ = absl::optional<int>();
+ }
+
+ bool candidate_gathered() const { return candidate_gathered_; }
+
+ private:
+ absl::optional<int> interesting_usage_detected_;
+ bool candidate_gathered_ = false;
+ RawWrapperPtr candidate_target_; // Note: Not thread-safe against deletions.
+};
+
+class PeerConnectionWrapperForUsageHistogramTest
+ : public PeerConnectionWrapper {
+ public:
+ using PeerConnectionWrapper::PeerConnectionWrapper;
+
+ PeerConnection* GetInternalPeerConnection() {
+ auto* pci =
+ static_cast<PeerConnectionProxyWithInternal<PeerConnectionInterface>*>(
+ pc());
+ return static_cast<PeerConnection*>(pci->internal());
+ }
+
+ // Override with different return type
+ ObserverForUsageHistogramTest* observer() {
+ return static_cast<ObserverForUsageHistogramTest*>(
+ PeerConnectionWrapper::observer());
+ }
+
+ void PrepareToExchangeCandidates(
+ PeerConnectionWrapperForUsageHistogramTest* other) {
+ observer()->PrepareToExchangeCandidates(other);
+ other->observer()->PrepareToExchangeCandidates(this);
+ }
+
+ bool IsConnected() {
+ return pc()->ice_connection_state() ==
+ PeerConnectionInterface::kIceConnectionConnected ||
+ pc()->ice_connection_state() ==
+ PeerConnectionInterface::kIceConnectionCompleted;
+ }
+
+ bool HaveDataChannel() {
+ return static_cast<ObserverForUsageHistogramTest*>(observer())
+ ->HaveDataChannel();
+ }
+ void BufferIceCandidate(const webrtc::IceCandidateInterface* candidate) {
+ std::string sdp;
+ EXPECT_TRUE(candidate->ToString(&sdp));
+ std::unique_ptr<webrtc::IceCandidateInterface> candidate_copy(
+ CreateIceCandidate(candidate->sdp_mid(), candidate->sdp_mline_index(),
+ sdp, nullptr));
+ buffered_candidates_.push_back(std::move(candidate_copy));
+ }
+
+ void AddBufferedIceCandidates() {
+ for (const auto& candidate : buffered_candidates_) {
+ EXPECT_TRUE(pc()->AddIceCandidate(candidate.get()));
+ }
+ buffered_candidates_.clear();
+ }
+
+ // This method performs the following actions in sequence:
+ // 1. Exchange Offer and Answer.
+ // 2. Exchange ICE candidates after both caller and callee complete
+ // gathering.
+ // 3. Wait for ICE to connect.
+ //
+ // This guarantees a deterministic sequence of events and also rules out the
+ // occurrence of prflx candidates if the offer/answer signaling and the
+ // candidate trickling race in order. In case prflx candidates need to be
+ // simulated, see the approach used by tests below for that.
+ bool ConnectTo(PeerConnectionWrapperForUsageHistogramTest* callee) {
+ PrepareToExchangeCandidates(callee);
+ if (!ExchangeOfferAnswerWith(callee)) {
+ return false;
+ }
+ // Wait until the gathering completes before we signal the candidate.
+ WAIT(observer()->ice_gathering_complete_, kDefaultTimeout);
+ WAIT(callee->observer()->ice_gathering_complete_, kDefaultTimeout);
+ AddBufferedIceCandidates();
+ callee->AddBufferedIceCandidates();
+ WAIT(IsConnected(), kDefaultTimeout);
+ WAIT(callee->IsConnected(), kDefaultTimeout);
+ return IsConnected() && callee->IsConnected();
+ }
+
+ bool GenerateOfferAndCollectCandidates() {
+ auto offer = CreateOffer(RTCOfferAnswerOptions());
+ if (!offer) {
+ return false;
+ }
+ bool set_local_offer =
+ SetLocalDescription(CloneSessionDescription(offer.get()));
+ EXPECT_TRUE(set_local_offer);
+ if (!set_local_offer) {
+ return false;
+ }
+ EXPECT_TRUE_WAIT(observer()->ice_gathering_complete_, kDefaultTimeout);
+ return true;
+ }
+
+ webrtc::PeerConnectionInterface::IceGatheringState ice_gathering_state() {
+ return pc()->ice_gathering_state();
+ }
+
+ private:
+ // Candidates that have been sent but not yet configured
+ std::vector<std::unique_ptr<webrtc::IceCandidateInterface>>
+ buffered_candidates_;
+};
+
+// Buffers candidates until we add them via AddBufferedIceCandidates.
+void ObserverForUsageHistogramTest::OnIceCandidate(
+ const webrtc::IceCandidateInterface* candidate) {
+ // If target is not set, ignore. This happens in one-ended unit tests.
+ if (candidate_target_) {
+ this->candidate_target_->BufferIceCandidate(candidate);
+ }
+ candidate_gathered_ = true;
+}
+
+class PeerConnectionUsageHistogramTest : public ::testing::Test {
+ protected:
+ typedef std::unique_ptr<PeerConnectionWrapperForUsageHistogramTest>
+ WrapperPtr;
+
+ PeerConnectionUsageHistogramTest()
+ : vss_(new rtc::VirtualSocketServer()),
+ socket_factory_(new rtc::BasicPacketSocketFactory(vss_.get())),
+ main_(vss_.get()) {
+ webrtc::metrics::Reset();
+ }
+
+ WrapperPtr CreatePeerConnection() {
+ RTCConfiguration config;
+ config.sdp_semantics = webrtc::SdpSemantics::kUnifiedPlan;
+ return CreatePeerConnection(
+ config, PeerConnectionFactoryInterface::Options(), nullptr);
+ }
+
+ WrapperPtr CreatePeerConnection(const RTCConfiguration& config) {
+ return CreatePeerConnection(
+ config, PeerConnectionFactoryInterface::Options(), nullptr);
+ }
+
+ WrapperPtr CreatePeerConnectionWithMdns(const RTCConfiguration& config) {
+ auto resolver_factory =
+ std::make_unique<NiceMock<webrtc::MockAsyncDnsResolverFactory>>();
+
+ webrtc::PeerConnectionDependencies deps(nullptr /* observer_in */);
+
+ auto fake_network = NewFakeNetwork();
+ fake_network->set_mdns_responder(
+ std::make_unique<webrtc::FakeMdnsResponder>(rtc::Thread::Current()));
+ fake_network->AddInterface(NextLocalAddress());
+
+ std::unique_ptr<cricket::BasicPortAllocator> port_allocator(
+ new cricket::BasicPortAllocator(fake_network, socket_factory_.get()));
+
+ deps.async_dns_resolver_factory = std::move(resolver_factory);
+ deps.allocator = std::move(port_allocator);
+
+ return CreatePeerConnection(
+ config, PeerConnectionFactoryInterface::Options(), std::move(deps));
+ }
+
+ WrapperPtr CreatePeerConnectionWithImmediateReport() {
+ RTCConfiguration configuration;
+ configuration.sdp_semantics = webrtc::SdpSemantics::kUnifiedPlan;
+ configuration.report_usage_pattern_delay_ms = 0;
+ return CreatePeerConnection(
+ configuration, PeerConnectionFactoryInterface::Options(), nullptr);
+ }
+
+ WrapperPtr CreatePeerConnectionWithPrivateLocalAddresses() {
+ auto* fake_network = NewFakeNetwork();
+ fake_network->AddInterface(NextLocalAddress());
+ fake_network->AddInterface(kPrivateLocalAddress);
+
+ auto port_allocator = std::make_unique<cricket::BasicPortAllocator>(
+ fake_network, socket_factory_.get());
+ RTCConfiguration config;
+ config.sdp_semantics = SdpSemantics::kUnifiedPlan;
+ return CreatePeerConnection(config,
+ PeerConnectionFactoryInterface::Options(),
+ std::move(port_allocator));
+ }
+
+ WrapperPtr CreatePeerConnectionWithPrivateIpv6LocalAddresses() {
+ auto* fake_network = NewFakeNetwork();
+ fake_network->AddInterface(NextLocalAddress());
+ fake_network->AddInterface(kPrivateIpv6LocalAddress);
+
+ auto port_allocator = std::make_unique<cricket::BasicPortAllocator>(
+ fake_network, socket_factory_.get());
+
+ RTCConfiguration config;
+ config.sdp_semantics = SdpSemantics::kUnifiedPlan;
+ return CreatePeerConnection(config,
+ PeerConnectionFactoryInterface::Options(),
+ std::move(port_allocator));
+ }
+
+ WrapperPtr CreatePeerConnection(
+ const RTCConfiguration& config,
+ const PeerConnectionFactoryInterface::Options factory_options,
+ std::unique_ptr<cricket::PortAllocator> allocator) {
+ PeerConnectionDependencies deps(nullptr);
+ deps.allocator = std::move(allocator);
+
+ return CreatePeerConnection(config, factory_options, std::move(deps));
+ }
+
+ WrapperPtr CreatePeerConnection(
+ const RTCConfiguration& config,
+ const PeerConnectionFactoryInterface::Options factory_options,
+ PeerConnectionDependencies deps) {
+ auto pc_factory =
+ rtc::make_ref_counted<PeerConnectionFactoryForUsageHistogramTest>();
+ pc_factory->SetOptions(factory_options);
+
+ // If no allocator is provided, one will be created using a network manager
+ // that uses the host network. This doesn't work on all trybots.
+ if (!deps.allocator) {
+ auto fake_network = NewFakeNetwork();
+ fake_network->AddInterface(NextLocalAddress());
+ deps.allocator = std::make_unique<cricket::BasicPortAllocator>(
+ fake_network, socket_factory_.get());
+ }
+
+ auto observer = std::make_unique<ObserverForUsageHistogramTest>();
+ deps.observer = observer.get();
+
+ auto result =
+ pc_factory->CreatePeerConnectionOrError(config, std::move(deps));
+ if (!result.ok()) {
+ return nullptr;
+ }
+
+ observer->SetPeerConnectionInterface(result.value().get());
+ auto wrapper = std::make_unique<PeerConnectionWrapperForUsageHistogramTest>(
+ pc_factory, result.MoveValue(), std::move(observer));
+ return wrapper;
+ }
+
+ int ObservedFingerprint() {
+ // This works correctly only if there is only one sample value
+ // that has been counted.
+ // Returns -1 for "not found".
+ return webrtc::metrics::MinSample(kUsagePatternMetric);
+ }
+
+ // The PeerConnection's port allocator is tied to the PeerConnection's
+ // lifetime and expects the underlying NetworkManager to outlive it. That
+ // prevents us from having the PeerConnectionWrapper own the fake network.
+ // Therefore, the test fixture will own all the fake networks even though
+ // tests should access the fake network through the PeerConnectionWrapper.
+ rtc::FakeNetworkManager* NewFakeNetwork() {
+ fake_networks_.emplace_back(std::make_unique<rtc::FakeNetworkManager>());
+ return fake_networks_.back().get();
+ }
+
+ rtc::SocketAddress NextLocalAddress() {
+ RTC_DCHECK(next_local_address_ < (int)arraysize(kLocalAddrs));
+ return kLocalAddrs[next_local_address_++];
+ }
+
+ std::vector<std::unique_ptr<rtc::FakeNetworkManager>> fake_networks_;
+ int next_local_address_ = 0;
+ std::unique_ptr<rtc::VirtualSocketServer> vss_;
+ std::unique_ptr<rtc::BasicPacketSocketFactory> socket_factory_;
+ rtc::AutoSocketServerThread main_;
+};
+
+TEST_F(PeerConnectionUsageHistogramTest, UsageFingerprintHistogramFromTimeout) {
+ auto pc = CreatePeerConnectionWithImmediateReport();
+
+ int expected_fingerprint = MakeUsageFingerprint({});
+ EXPECT_METRIC_EQ_WAIT(1, webrtc::metrics::NumSamples(kUsagePatternMetric),
+ kDefaultTimeout);
+ EXPECT_METRIC_EQ(
+ 1, webrtc::metrics::NumEvents(kUsagePatternMetric, expected_fingerprint));
+}
+
+#ifndef WEBRTC_ANDROID
+// These tests do not work on Android. Why is unclear.
+// https://bugs.webrtc.org/9461
+
+// Test getting the usage fingerprint for an audio/video connection.
+TEST_F(PeerConnectionUsageHistogramTest, FingerprintAudioVideo) {
+ auto caller = CreatePeerConnection();
+ auto callee = CreatePeerConnection();
+ caller->AddAudioTrack("audio");
+ caller->AddVideoTrack("video");
+ ASSERT_TRUE(caller->ConnectTo(callee.get()));
+ caller->pc()->Close();
+ callee->pc()->Close();
+ int expected_fingerprint = MakeUsageFingerprint(
+ {UsageEvent::AUDIO_ADDED, UsageEvent::VIDEO_ADDED,
+ UsageEvent::SET_LOCAL_DESCRIPTION_SUCCEEDED,
+ UsageEvent::SET_REMOTE_DESCRIPTION_SUCCEEDED,
+ UsageEvent::CANDIDATE_COLLECTED, UsageEvent::ADD_ICE_CANDIDATE_SUCCEEDED,
+ UsageEvent::ICE_STATE_CONNECTED, UsageEvent::REMOTE_CANDIDATE_ADDED,
+ UsageEvent::DIRECT_CONNECTION_SELECTED, UsageEvent::CLOSE_CALLED});
+ // In this case, we may or may not have PRIVATE_CANDIDATE_COLLECTED,
+ // depending on the machine configuration.
+ EXPECT_METRIC_EQ(2, webrtc::metrics::NumSamples(kUsagePatternMetric));
+ EXPECT_METRIC_TRUE(
+ webrtc::metrics::NumEvents(kUsagePatternMetric, expected_fingerprint) ==
+ 2 ||
+ webrtc::metrics::NumEvents(
+ kUsagePatternMetric,
+ expected_fingerprint |
+ static_cast<int>(UsageEvent::PRIVATE_CANDIDATE_COLLECTED)) == 2);
+}
+
+// Test getting the usage fingerprint when the caller collects an mDNS
+// candidate.
+TEST_F(PeerConnectionUsageHistogramTest, FingerprintWithMdnsCaller) {
+ RTCConfiguration config;
+ config.sdp_semantics = SdpSemantics::kUnifiedPlan;
+
+ // Enable hostname candidates with mDNS names.
+ auto caller = CreatePeerConnectionWithMdns(config);
+ auto callee = CreatePeerConnection(config);
+
+ caller->AddAudioTrack("audio");
+ caller->AddVideoTrack("video");
+ ASSERT_TRUE(caller->ConnectTo(callee.get()));
+ caller->pc()->Close();
+ callee->pc()->Close();
+
+ int expected_fingerprint_caller = MakeUsageFingerprint(
+ {UsageEvent::AUDIO_ADDED, UsageEvent::VIDEO_ADDED,
+ UsageEvent::SET_LOCAL_DESCRIPTION_SUCCEEDED,
+ UsageEvent::SET_REMOTE_DESCRIPTION_SUCCEEDED,
+ UsageEvent::CANDIDATE_COLLECTED, UsageEvent::MDNS_CANDIDATE_COLLECTED,
+ UsageEvent::ADD_ICE_CANDIDATE_SUCCEEDED, UsageEvent::ICE_STATE_CONNECTED,
+ UsageEvent::REMOTE_CANDIDATE_ADDED,
+ UsageEvent::DIRECT_CONNECTION_SELECTED, UsageEvent::CLOSE_CALLED});
+
+ // Without a resolver, the callee cannot resolve the received mDNS candidate
+ // but can still connect with the caller via a prflx candidate. As a result,
+ // the bit for the direct connection should not be logged.
+ int expected_fingerprint_callee = MakeUsageFingerprint(
+ {UsageEvent::AUDIO_ADDED, UsageEvent::VIDEO_ADDED,
+ UsageEvent::SET_LOCAL_DESCRIPTION_SUCCEEDED,
+ UsageEvent::SET_REMOTE_DESCRIPTION_SUCCEEDED,
+ UsageEvent::CANDIDATE_COLLECTED, UsageEvent::ADD_ICE_CANDIDATE_SUCCEEDED,
+ UsageEvent::REMOTE_MDNS_CANDIDATE_ADDED, UsageEvent::ICE_STATE_CONNECTED,
+ UsageEvent::REMOTE_CANDIDATE_ADDED, UsageEvent::CLOSE_CALLED});
+ EXPECT_METRIC_EQ(2, webrtc::metrics::NumSamples(kUsagePatternMetric));
+ EXPECT_METRIC_EQ(1, webrtc::metrics::NumEvents(kUsagePatternMetric,
+ expected_fingerprint_caller));
+ EXPECT_METRIC_EQ(1, webrtc::metrics::NumEvents(kUsagePatternMetric,
+ expected_fingerprint_callee));
+}
+
+// Test getting the usage fingerprint when the callee collects an mDNS
+// candidate.
+TEST_F(PeerConnectionUsageHistogramTest, FingerprintWithMdnsCallee) {
+ RTCConfiguration config;
+ config.sdp_semantics = SdpSemantics::kUnifiedPlan;
+
+ // Enable hostname candidates with mDNS names.
+ auto caller = CreatePeerConnection(config);
+ auto callee = CreatePeerConnectionWithMdns(config);
+
+ caller->AddAudioTrack("audio");
+ caller->AddVideoTrack("video");
+ ASSERT_TRUE(caller->ConnectTo(callee.get()));
+ caller->pc()->Close();
+ callee->pc()->Close();
+
+ // Similar to the test above, the caller connects with the callee via a prflx
+ // candidate.
+ int expected_fingerprint_caller = MakeUsageFingerprint(
+ {UsageEvent::AUDIO_ADDED, UsageEvent::VIDEO_ADDED,
+ UsageEvent::SET_LOCAL_DESCRIPTION_SUCCEEDED,
+ UsageEvent::SET_REMOTE_DESCRIPTION_SUCCEEDED,
+ UsageEvent::CANDIDATE_COLLECTED, UsageEvent::ADD_ICE_CANDIDATE_SUCCEEDED,
+ UsageEvent::REMOTE_MDNS_CANDIDATE_ADDED, UsageEvent::ICE_STATE_CONNECTED,
+ UsageEvent::REMOTE_CANDIDATE_ADDED, UsageEvent::CLOSE_CALLED});
+
+ int expected_fingerprint_callee = MakeUsageFingerprint(
+ {UsageEvent::AUDIO_ADDED, UsageEvent::VIDEO_ADDED,
+ UsageEvent::SET_LOCAL_DESCRIPTION_SUCCEEDED,
+ UsageEvent::SET_REMOTE_DESCRIPTION_SUCCEEDED,
+ UsageEvent::CANDIDATE_COLLECTED, UsageEvent::MDNS_CANDIDATE_COLLECTED,
+ UsageEvent::ADD_ICE_CANDIDATE_SUCCEEDED, UsageEvent::ICE_STATE_CONNECTED,
+ UsageEvent::REMOTE_CANDIDATE_ADDED,
+ UsageEvent::DIRECT_CONNECTION_SELECTED, UsageEvent::CLOSE_CALLED});
+ EXPECT_METRIC_EQ(2, webrtc::metrics::NumSamples(kUsagePatternMetric));
+ EXPECT_METRIC_EQ(1, webrtc::metrics::NumEvents(kUsagePatternMetric,
+ expected_fingerprint_caller));
+ EXPECT_METRIC_EQ(1, webrtc::metrics::NumEvents(kUsagePatternMetric,
+ expected_fingerprint_callee));
+}
+
+#ifdef WEBRTC_HAVE_SCTP
+TEST_F(PeerConnectionUsageHistogramTest, FingerprintDataOnly) {
+ auto caller = CreatePeerConnection();
+ auto callee = CreatePeerConnection();
+ caller->CreateDataChannel("foodata");
+ ASSERT_TRUE(caller->ConnectTo(callee.get()));
+ ASSERT_TRUE_WAIT(callee->HaveDataChannel(), kDefaultTimeout);
+ caller->pc()->Close();
+ callee->pc()->Close();
+ int expected_fingerprint = MakeUsageFingerprint(
+ {UsageEvent::DATA_ADDED, UsageEvent::SET_LOCAL_DESCRIPTION_SUCCEEDED,
+ UsageEvent::SET_REMOTE_DESCRIPTION_SUCCEEDED,
+ UsageEvent::CANDIDATE_COLLECTED, UsageEvent::ADD_ICE_CANDIDATE_SUCCEEDED,
+ UsageEvent::ICE_STATE_CONNECTED, UsageEvent::REMOTE_CANDIDATE_ADDED,
+ UsageEvent::DIRECT_CONNECTION_SELECTED, UsageEvent::CLOSE_CALLED});
+ EXPECT_METRIC_EQ(2, webrtc::metrics::NumSamples(kUsagePatternMetric));
+ EXPECT_METRIC_TRUE(
+ webrtc::metrics::NumEvents(kUsagePatternMetric, expected_fingerprint) ==
+ 2 ||
+ webrtc::metrics::NumEvents(
+ kUsagePatternMetric,
+ expected_fingerprint |
+ static_cast<int>(UsageEvent::PRIVATE_CANDIDATE_COLLECTED)) == 2);
+}
+#endif // WEBRTC_HAVE_SCTP
+#endif // WEBRTC_ANDROID
+
+TEST_F(PeerConnectionUsageHistogramTest, FingerprintStunTurn) {
+ RTCConfiguration configuration;
+ configuration.sdp_semantics = SdpSemantics::kUnifiedPlan;
+ PeerConnection::IceServer server;
+ server.urls = {"stun:dummy.stun.server"};
+ configuration.servers.push_back(server);
+ server.urls = {"turn:dummy.turn.server"};
+ server.username = "username";
+ server.password = "password";
+ configuration.servers.push_back(server);
+ auto caller = CreatePeerConnection(configuration);
+ ASSERT_TRUE(caller);
+ caller->pc()->Close();
+ int expected_fingerprint = MakeUsageFingerprint(
+ {UsageEvent::STUN_SERVER_ADDED, UsageEvent::TURN_SERVER_ADDED,
+ UsageEvent::CLOSE_CALLED});
+ EXPECT_METRIC_EQ(1, webrtc::metrics::NumSamples(kUsagePatternMetric));
+ EXPECT_METRIC_EQ(
+ 1, webrtc::metrics::NumEvents(kUsagePatternMetric, expected_fingerprint));
+}
+
+TEST_F(PeerConnectionUsageHistogramTest, FingerprintStunTurnInReconfiguration) {
+ RTCConfiguration configuration;
+ configuration.sdp_semantics = SdpSemantics::kUnifiedPlan;
+ PeerConnection::IceServer server;
+ server.urls = {"stun:dummy.stun.server"};
+ configuration.servers.push_back(server);
+ server.urls = {"turn:dummy.turn.server"};
+ server.username = "username";
+ server.password = "password";
+ configuration.servers.push_back(server);
+ auto caller = CreatePeerConnection();
+ ASSERT_TRUE(caller);
+ ASSERT_TRUE(caller->pc()->SetConfiguration(configuration).ok());
+ caller->pc()->Close();
+ int expected_fingerprint = MakeUsageFingerprint(
+ {UsageEvent::STUN_SERVER_ADDED, UsageEvent::TURN_SERVER_ADDED,
+ UsageEvent::CLOSE_CALLED});
+ EXPECT_METRIC_EQ(1, webrtc::metrics::NumSamples(kUsagePatternMetric));
+ EXPECT_METRIC_EQ(
+ 1, webrtc::metrics::NumEvents(kUsagePatternMetric, expected_fingerprint));
+}
+
+TEST_F(PeerConnectionUsageHistogramTest, FingerprintWithPrivateIPCaller) {
+ auto caller = CreatePeerConnectionWithPrivateLocalAddresses();
+ auto callee = CreatePeerConnection();
+ caller->AddAudioTrack("audio");
+ ASSERT_TRUE(caller->ConnectTo(callee.get()));
+ caller->pc()->Close();
+ callee->pc()->Close();
+
+ int expected_fingerprint_caller = MakeUsageFingerprint(
+ {UsageEvent::AUDIO_ADDED, UsageEvent::SET_LOCAL_DESCRIPTION_SUCCEEDED,
+ UsageEvent::SET_REMOTE_DESCRIPTION_SUCCEEDED,
+ UsageEvent::CANDIDATE_COLLECTED, UsageEvent::PRIVATE_CANDIDATE_COLLECTED,
+ UsageEvent::ADD_ICE_CANDIDATE_SUCCEEDED, UsageEvent::ICE_STATE_CONNECTED,
+ UsageEvent::REMOTE_CANDIDATE_ADDED,
+ UsageEvent::DIRECT_CONNECTION_SELECTED, UsageEvent::CLOSE_CALLED});
+
+ int expected_fingerprint_callee = MakeUsageFingerprint(
+ {UsageEvent::AUDIO_ADDED, UsageEvent::SET_LOCAL_DESCRIPTION_SUCCEEDED,
+ UsageEvent::SET_REMOTE_DESCRIPTION_SUCCEEDED,
+ UsageEvent::CANDIDATE_COLLECTED, UsageEvent::ADD_ICE_CANDIDATE_SUCCEEDED,
+ UsageEvent::REMOTE_PRIVATE_CANDIDATE_ADDED,
+ UsageEvent::ICE_STATE_CONNECTED, UsageEvent::REMOTE_CANDIDATE_ADDED,
+ UsageEvent::DIRECT_CONNECTION_SELECTED, UsageEvent::CLOSE_CALLED});
+ EXPECT_METRIC_EQ(2, webrtc::metrics::NumSamples(kUsagePatternMetric));
+ EXPECT_METRIC_EQ(1, webrtc::metrics::NumEvents(kUsagePatternMetric,
+ expected_fingerprint_caller));
+ EXPECT_METRIC_EQ(1, webrtc::metrics::NumEvents(kUsagePatternMetric,
+ expected_fingerprint_callee));
+}
+
+TEST_F(PeerConnectionUsageHistogramTest, FingerprintWithPrivateIpv6Callee) {
+ auto caller = CreatePeerConnection();
+ auto callee = CreatePeerConnectionWithPrivateIpv6LocalAddresses();
+ caller->AddAudioTrack("audio");
+ ASSERT_TRUE(caller->ConnectTo(callee.get()));
+ caller->pc()->Close();
+ callee->pc()->Close();
+
+ int expected_fingerprint_caller = MakeUsageFingerprint(
+ {UsageEvent::AUDIO_ADDED, UsageEvent::SET_LOCAL_DESCRIPTION_SUCCEEDED,
+ UsageEvent::SET_REMOTE_DESCRIPTION_SUCCEEDED,
+ UsageEvent::CANDIDATE_COLLECTED, UsageEvent::ADD_ICE_CANDIDATE_SUCCEEDED,
+ UsageEvent::REMOTE_PRIVATE_CANDIDATE_ADDED,
+ UsageEvent::ICE_STATE_CONNECTED, UsageEvent::REMOTE_CANDIDATE_ADDED,
+ UsageEvent::REMOTE_IPV6_CANDIDATE_ADDED,
+ UsageEvent::DIRECT_CONNECTION_SELECTED, UsageEvent::CLOSE_CALLED});
+
+ int expected_fingerprint_callee = MakeUsageFingerprint(
+ {UsageEvent::AUDIO_ADDED, UsageEvent::SET_LOCAL_DESCRIPTION_SUCCEEDED,
+ UsageEvent::SET_REMOTE_DESCRIPTION_SUCCEEDED,
+ UsageEvent::CANDIDATE_COLLECTED, UsageEvent::PRIVATE_CANDIDATE_COLLECTED,
+ UsageEvent::IPV6_CANDIDATE_COLLECTED,
+ UsageEvent::ADD_ICE_CANDIDATE_SUCCEEDED,
+ UsageEvent::REMOTE_CANDIDATE_ADDED, UsageEvent::ICE_STATE_CONNECTED,
+ UsageEvent::DIRECT_CONNECTION_SELECTED, UsageEvent::CLOSE_CALLED});
+ EXPECT_METRIC_EQ(2, webrtc::metrics::NumSamples(kUsagePatternMetric));
+ EXPECT_METRIC_EQ(1, webrtc::metrics::NumEvents(kUsagePatternMetric,
+ expected_fingerprint_caller));
+ EXPECT_METRIC_EQ(1, webrtc::metrics::NumEvents(kUsagePatternMetric,
+ expected_fingerprint_callee));
+}
+
+#ifndef WEBRTC_ANDROID
+#ifdef WEBRTC_HAVE_SCTP
+// Test that the usage pattern bits for adding remote (private IPv6) candidates
+// are set when the remote candidates are retrieved from the Offer SDP instead
+// of trickled ICE messages.
+TEST_F(PeerConnectionUsageHistogramTest,
+ AddRemoteCandidatesFromRemoteDescription) {
+ // We construct the following data-channel-only scenario. The caller collects
+ // IPv6 private local candidates and appends them in the Offer as in
+ // non-trickled sessions. The callee collects mDNS candidates that are not
+ // contained in the Answer as in Trickle ICE. Only the Offer and Answer are
+ // signaled and we expect a connection with prflx remote candidates at the
+ // caller side.
+ auto caller = CreatePeerConnectionWithPrivateIpv6LocalAddresses();
+ RTCConfiguration config;
+ config.sdp_semantics = SdpSemantics::kUnifiedPlan;
+ auto callee = CreatePeerConnectionWithMdns(config);
+ caller->CreateDataChannel("test_channel");
+ ASSERT_TRUE(caller->SetLocalDescription(caller->CreateOffer()));
+ // Wait until the gathering completes so that the session description would
+ // have contained ICE candidates.
+ EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceGatheringComplete,
+ caller->ice_gathering_state(), kDefaultTimeout);
+ EXPECT_TRUE(caller->observer()->candidate_gathered());
+ // Get the current offer that contains candidates and pass it to the callee.
+ //
+ // Note that we cannot use CloneSessionDescription on `cur_offer` to obtain an
+ // SDP with candidates. The method above does not strictly copy everything, in
+ // particular, not copying the ICE candidates.
+ // TODO(qingsi): Technically, this is a bug. Fix it.
+ auto cur_offer = caller->pc()->local_description();
+ ASSERT_TRUE(cur_offer);
+ std::string sdp_with_candidates_str;
+ cur_offer->ToString(&sdp_with_candidates_str);
+ auto offer = std::make_unique<JsepSessionDescription>(SdpType::kOffer);
+ ASSERT_TRUE(SdpDeserialize(sdp_with_candidates_str, offer.get(),
+ nullptr /* error */));
+ ASSERT_TRUE(callee->SetRemoteDescription(std::move(offer)));
+
+ // By default, the Answer created does not contain ICE candidates.
+ auto answer = callee->CreateAnswer();
+ callee->SetLocalDescription(CloneSessionDescription(answer.get()));
+ caller->SetRemoteDescription(std::move(answer));
+ EXPECT_TRUE_WAIT(caller->IsConnected(), kDefaultTimeout);
+ EXPECT_TRUE_WAIT(callee->IsConnected(), kDefaultTimeout);
+ // The callee needs to process the open message to have the data channel open.
+ EXPECT_TRUE_WAIT(callee->observer()->last_datachannel_ != nullptr,
+ kDefaultTimeout);
+ caller->pc()->Close();
+ callee->pc()->Close();
+
+ // The caller should not have added any remote candidate either via
+ // AddIceCandidate or from the remote description. Also, the caller connects
+ // with the callee via a prflx candidate and hence no direct connection bit
+ // should be set.
+ int expected_fingerprint_caller = MakeUsageFingerprint(
+ {UsageEvent::DATA_ADDED, UsageEvent::SET_LOCAL_DESCRIPTION_SUCCEEDED,
+ UsageEvent::SET_REMOTE_DESCRIPTION_SUCCEEDED,
+ UsageEvent::CANDIDATE_COLLECTED, UsageEvent::PRIVATE_CANDIDATE_COLLECTED,
+ UsageEvent::IPV6_CANDIDATE_COLLECTED, UsageEvent::ICE_STATE_CONNECTED,
+ UsageEvent::CLOSE_CALLED});
+
+ int expected_fingerprint_callee = MakeUsageFingerprint(
+ {UsageEvent::DATA_ADDED, UsageEvent::SET_LOCAL_DESCRIPTION_SUCCEEDED,
+ UsageEvent::SET_REMOTE_DESCRIPTION_SUCCEEDED,
+ UsageEvent::CANDIDATE_COLLECTED, UsageEvent::MDNS_CANDIDATE_COLLECTED,
+ UsageEvent::REMOTE_CANDIDATE_ADDED,
+ UsageEvent::REMOTE_PRIVATE_CANDIDATE_ADDED,
+ UsageEvent::REMOTE_IPV6_CANDIDATE_ADDED, UsageEvent::ICE_STATE_CONNECTED,
+ UsageEvent::DIRECT_CONNECTION_SELECTED, UsageEvent::CLOSE_CALLED});
+ EXPECT_METRIC_EQ(2, webrtc::metrics::NumSamples(kUsagePatternMetric));
+ EXPECT_METRIC_EQ(1, webrtc::metrics::NumEvents(kUsagePatternMetric,
+ expected_fingerprint_caller));
+ EXPECT_METRIC_EQ(1, webrtc::metrics::NumEvents(kUsagePatternMetric,
+ expected_fingerprint_callee));
+}
+
+TEST_F(PeerConnectionUsageHistogramTest, NotableUsageNoted) {
+ auto caller = CreatePeerConnection();
+ caller->CreateDataChannel("foo");
+ caller->GenerateOfferAndCollectCandidates();
+ caller->pc()->Close();
+ int expected_fingerprint = MakeUsageFingerprint(
+ {UsageEvent::DATA_ADDED, UsageEvent::SET_LOCAL_DESCRIPTION_SUCCEEDED,
+ UsageEvent::CANDIDATE_COLLECTED, UsageEvent::CLOSE_CALLED});
+ EXPECT_METRIC_EQ(1, webrtc::metrics::NumSamples(kUsagePatternMetric));
+ EXPECT_METRIC_TRUE(
+ expected_fingerprint == ObservedFingerprint() ||
+ (expected_fingerprint |
+ static_cast<int>(UsageEvent::PRIVATE_CANDIDATE_COLLECTED)) ==
+ ObservedFingerprint());
+ EXPECT_METRIC_EQ(absl::make_optional(ObservedFingerprint()),
+ caller->observer()->interesting_usage_detected());
+}
+
+TEST_F(PeerConnectionUsageHistogramTest, NotableUsageOnEventFiring) {
+ auto caller = CreatePeerConnection();
+ caller->CreateDataChannel("foo");
+ caller->GenerateOfferAndCollectCandidates();
+ int expected_fingerprint = MakeUsageFingerprint(
+ {UsageEvent::DATA_ADDED, UsageEvent::SET_LOCAL_DESCRIPTION_SUCCEEDED,
+ UsageEvent::CANDIDATE_COLLECTED});
+ EXPECT_METRIC_EQ(0, webrtc::metrics::NumSamples(kUsagePatternMetric));
+ caller->GetInternalPeerConnection()->RequestUsagePatternReportForTesting();
+ EXPECT_METRIC_EQ_WAIT(1, webrtc::metrics::NumSamples(kUsagePatternMetric),
+ kDefaultTimeout);
+ EXPECT_METRIC_TRUE(
+ expected_fingerprint == ObservedFingerprint() ||
+ (expected_fingerprint |
+ static_cast<int>(UsageEvent::PRIVATE_CANDIDATE_COLLECTED)) ==
+ ObservedFingerprint());
+ EXPECT_METRIC_EQ(absl::make_optional(ObservedFingerprint()),
+ caller->observer()->interesting_usage_detected());
+}
+
+TEST_F(PeerConnectionUsageHistogramTest,
+ NoNotableUsageOnEventFiringAfterClose) {
+ auto caller = CreatePeerConnection();
+ caller->CreateDataChannel("foo");
+ caller->GenerateOfferAndCollectCandidates();
+ int expected_fingerprint = MakeUsageFingerprint(
+ {UsageEvent::DATA_ADDED, UsageEvent::SET_LOCAL_DESCRIPTION_SUCCEEDED,
+ UsageEvent::CANDIDATE_COLLECTED, UsageEvent::CLOSE_CALLED});
+ EXPECT_METRIC_EQ(0, webrtc::metrics::NumSamples(kUsagePatternMetric));
+ caller->pc()->Close();
+ EXPECT_METRIC_EQ(1, webrtc::metrics::NumSamples(kUsagePatternMetric));
+ caller->GetInternalPeerConnection()->RequestUsagePatternReportForTesting();
+ caller->observer()->ClearInterestingUsageDetector();
+ EXPECT_METRIC_EQ_WAIT(2, webrtc::metrics::NumSamples(kUsagePatternMetric),
+ kDefaultTimeout);
+ EXPECT_METRIC_TRUE(
+ expected_fingerprint == ObservedFingerprint() ||
+ (expected_fingerprint |
+ static_cast<int>(UsageEvent::PRIVATE_CANDIDATE_COLLECTED)) ==
+ ObservedFingerprint());
+ // After close, the usage-detection callback should NOT have been called.
+ EXPECT_METRIC_FALSE(caller->observer()->interesting_usage_detected());
+}
+#endif
+#endif
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/peer_connection_ice_unittest.cc b/third_party/libwebrtc/pc/peer_connection_ice_unittest.cc
new file mode 100644
index 0000000000..532583f307
--- /dev/null
+++ b/third_party/libwebrtc/pc/peer_connection_ice_unittest.cc
@@ -0,0 +1,1589 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stddef.h>
+#include <stdint.h>
+
+#include <memory>
+#include <string>
+#include <tuple>
+#include <type_traits>
+#include <utility>
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "api/audio/audio_mixer.h"
+#include "api/candidate.h"
+#include "api/ice_transport_interface.h"
+#include "api/jsep.h"
+#include "api/media_types.h"
+#include "api/peer_connection_interface.h"
+#include "api/rtc_error.h"
+#include "api/scoped_refptr.h"
+#include "modules/audio_device/include/audio_device.h"
+#include "modules/audio_processing/include/audio_processing.h"
+#include "p2p/base/fake_port_allocator.h"
+#include "p2p/base/ice_transport_internal.h"
+#include "p2p/base/p2p_constants.h"
+#include "p2p/base/port.h"
+#include "p2p/base/port_allocator.h"
+#include "p2p/base/transport_description.h"
+#include "p2p/base/transport_info.h"
+#include "p2p/client/basic_port_allocator.h"
+#include "pc/channel_interface.h"
+#include "pc/dtls_transport.h"
+#include "pc/media_session.h"
+#include "pc/peer_connection.h"
+#include "pc/peer_connection_wrapper.h"
+#include "pc/rtp_transceiver.h"
+#include "pc/sdp_utils.h"
+#include "pc/session_description.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/internal/default_socket_server.h"
+#include "rtc_base/ip_address.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/net_helper.h"
+#include "rtc_base/rtc_certificate_generator.h"
+#include "rtc_base/socket_address.h"
+#include "rtc_base/thread.h"
+#include "test/gtest.h"
+#include "test/scoped_key_value_config.h"
+#ifdef WEBRTC_ANDROID
+#include "pc/test/android_test_initializer.h"
+#endif
+#include "api/audio_codecs/builtin_audio_decoder_factory.h"
+#include "api/audio_codecs/builtin_audio_encoder_factory.h"
+#include "api/create_peerconnection_factory.h"
+#include "api/uma_metrics.h"
+#include "api/video_codecs/video_decoder_factory_template.h"
+#include "api/video_codecs/video_decoder_factory_template_dav1d_adapter.h"
+#include "api/video_codecs/video_decoder_factory_template_libvpx_vp8_adapter.h"
+#include "api/video_codecs/video_decoder_factory_template_libvpx_vp9_adapter.h"
+#include "api/video_codecs/video_decoder_factory_template_open_h264_adapter.h"
+#include "api/video_codecs/video_encoder_factory_template.h"
+#include "api/video_codecs/video_encoder_factory_template_libaom_av1_adapter.h"
+#include "api/video_codecs/video_encoder_factory_template_libvpx_vp8_adapter.h"
+#include "api/video_codecs/video_encoder_factory_template_libvpx_vp9_adapter.h"
+#include "api/video_codecs/video_encoder_factory_template_open_h264_adapter.h"
+#include "pc/peer_connection_proxy.h"
+#include "pc/test/fake_audio_capture_module.h"
+#include "pc/test/mock_peer_connection_observers.h"
+#include "rtc_base/fake_network.h"
+#include "rtc_base/gunit.h"
+#include "rtc_base/strings/string_builder.h"
+#include "rtc_base/virtual_socket_server.h"
+#include "system_wrappers/include/metrics.h"
+#include "test/gmock.h"
+
+namespace webrtc {
+
+using RTCConfiguration = PeerConnectionInterface::RTCConfiguration;
+using RTCOfferAnswerOptions = PeerConnectionInterface::RTCOfferAnswerOptions;
+using rtc::SocketAddress;
+using ::testing::Combine;
+using ::testing::ElementsAre;
+using ::testing::Pair;
+using ::testing::Values;
+
+constexpr int kIceCandidatesTimeout = 10000;
+constexpr int64_t kWaitTimeout = 10000;
+constexpr uint64_t kTiebreakerDefault = 44444;
+
+class PeerConnectionWrapperForIceTest : public PeerConnectionWrapper {
+ public:
+ using PeerConnectionWrapper::PeerConnectionWrapper;
+
+ std::unique_ptr<IceCandidateInterface> CreateJsepCandidateForFirstTransport(
+ cricket::Candidate* candidate) {
+ RTC_DCHECK(pc()->remote_description());
+ const auto* desc = pc()->remote_description()->description();
+ RTC_DCHECK(desc->contents().size() > 0);
+ const auto& first_content = desc->contents()[0];
+ candidate->set_transport_name(first_content.name);
+ return CreateIceCandidate(first_content.name, -1, *candidate);
+ }
+
+ // Adds a new ICE candidate to the first transport.
+ bool AddIceCandidate(cricket::Candidate* candidate) {
+ return pc()->AddIceCandidate(
+ CreateJsepCandidateForFirstTransport(candidate).get());
+ }
+
+ // Returns ICE candidates from the remote session description.
+ std::vector<const IceCandidateInterface*>
+ GetIceCandidatesFromRemoteDescription() {
+ const SessionDescriptionInterface* sdesc = pc()->remote_description();
+ RTC_DCHECK(sdesc);
+ std::vector<const IceCandidateInterface*> candidates;
+ for (size_t mline_index = 0; mline_index < sdesc->number_of_mediasections();
+ mline_index++) {
+ const auto* candidate_collection = sdesc->candidates(mline_index);
+ for (size_t i = 0; i < candidate_collection->count(); i++) {
+ candidates.push_back(candidate_collection->at(i));
+ }
+ }
+ return candidates;
+ }
+
+ rtc::FakeNetworkManager* network() { return network_; }
+
+ void set_network(rtc::FakeNetworkManager* network) { network_ = network; }
+
+ // The port allocator used by this PC.
+ cricket::PortAllocator* port_allocator_;
+
+ private:
+ rtc::FakeNetworkManager* network_;
+};
+
+class PeerConnectionIceBaseTest : public ::testing::Test {
+ protected:
+ typedef std::unique_ptr<PeerConnectionWrapperForIceTest> WrapperPtr;
+
+ explicit PeerConnectionIceBaseTest(SdpSemantics sdp_semantics)
+ : vss_(new rtc::VirtualSocketServer()),
+ socket_factory_(new rtc::BasicPacketSocketFactory(vss_.get())),
+ main_(vss_.get()),
+ sdp_semantics_(sdp_semantics) {
+#ifdef WEBRTC_ANDROID
+ InitializeAndroidObjects();
+#endif
+ pc_factory_ = CreatePeerConnectionFactory(
+ rtc::Thread::Current(), rtc::Thread::Current(), rtc::Thread::Current(),
+ rtc::scoped_refptr<AudioDeviceModule>(FakeAudioCaptureModule::Create()),
+ CreateBuiltinAudioEncoderFactory(), CreateBuiltinAudioDecoderFactory(),
+ std::make_unique<VideoEncoderFactoryTemplate<
+ LibvpxVp8EncoderTemplateAdapter, LibvpxVp9EncoderTemplateAdapter,
+ OpenH264EncoderTemplateAdapter, LibaomAv1EncoderTemplateAdapter>>(),
+ std::make_unique<VideoDecoderFactoryTemplate<
+ LibvpxVp8DecoderTemplateAdapter, LibvpxVp9DecoderTemplateAdapter,
+ OpenH264DecoderTemplateAdapter, Dav1dDecoderTemplateAdapter>>(),
+ nullptr /* audio_mixer */, nullptr /* audio_processing */);
+ }
+
+ WrapperPtr CreatePeerConnection() {
+ return CreatePeerConnection(RTCConfiguration());
+ }
+
+ WrapperPtr CreatePeerConnection(const RTCConfiguration& config) {
+ auto* fake_network = NewFakeNetwork();
+ auto port_allocator = std::make_unique<cricket::BasicPortAllocator>(
+ fake_network, socket_factory_.get());
+ port_allocator->set_flags(cricket::PORTALLOCATOR_DISABLE_TCP |
+ cricket::PORTALLOCATOR_DISABLE_RELAY);
+ port_allocator->set_step_delay(cricket::kMinimumStepDelay);
+ RTCConfiguration modified_config = config;
+ modified_config.sdp_semantics = sdp_semantics_;
+ auto observer = std::make_unique<MockPeerConnectionObserver>();
+ auto port_allocator_copy = port_allocator.get();
+ PeerConnectionDependencies pc_dependencies(observer.get());
+ pc_dependencies.allocator = std::move(port_allocator);
+ auto result = pc_factory_->CreatePeerConnectionOrError(
+ modified_config, std::move(pc_dependencies));
+ if (!result.ok()) {
+ return nullptr;
+ }
+
+ observer->SetPeerConnectionInterface(result.value().get());
+ auto wrapper = std::make_unique<PeerConnectionWrapperForIceTest>(
+ pc_factory_, result.MoveValue(), std::move(observer));
+ wrapper->set_network(fake_network);
+ wrapper->port_allocator_ = port_allocator_copy;
+ return wrapper;
+ }
+
+ // Accepts the same arguments as CreatePeerConnection and adds default audio
+ // and video tracks.
+ template <typename... Args>
+ WrapperPtr CreatePeerConnectionWithAudioVideo(Args&&... args) {
+ auto wrapper = CreatePeerConnection(std::forward<Args>(args)...);
+ if (!wrapper) {
+ return nullptr;
+ }
+ wrapper->AddAudioTrack("a");
+ wrapper->AddVideoTrack("v");
+ return wrapper;
+ }
+
+ cricket::Candidate CreateLocalUdpCandidate(
+ const rtc::SocketAddress& address) {
+ cricket::Candidate candidate;
+ candidate.set_component(cricket::ICE_CANDIDATE_COMPONENT_DEFAULT);
+ candidate.set_protocol(cricket::UDP_PROTOCOL_NAME);
+ candidate.set_address(address);
+ candidate.set_type(cricket::LOCAL_PORT_TYPE);
+ return candidate;
+ }
+
+ // Remove all ICE ufrag/pwd lines from the given session description.
+ void RemoveIceUfragPwd(SessionDescriptionInterface* sdesc) {
+ SetIceUfragPwd(sdesc, "", "");
+ }
+
+ // Sets all ICE ufrag/pwds on the given session description.
+ void SetIceUfragPwd(SessionDescriptionInterface* sdesc,
+ const std::string& ufrag,
+ const std::string& pwd) {
+ auto* desc = sdesc->description();
+ for (const auto& content : desc->contents()) {
+ auto* transport_info = desc->GetTransportInfoByName(content.name);
+ transport_info->description.ice_ufrag = ufrag;
+ transport_info->description.ice_pwd = pwd;
+ }
+ }
+
+ // Set ICE mode on the given session description.
+ void SetIceMode(SessionDescriptionInterface* sdesc,
+ const cricket::IceMode ice_mode) {
+ auto* desc = sdesc->description();
+ for (const auto& content : desc->contents()) {
+ auto* transport_info = desc->GetTransportInfoByName(content.name);
+ transport_info->description.ice_mode = ice_mode;
+ }
+ }
+
+ cricket::TransportDescription* GetFirstTransportDescription(
+ SessionDescriptionInterface* sdesc) {
+ auto* desc = sdesc->description();
+ RTC_DCHECK(desc->contents().size() > 0);
+ auto* transport_info =
+ desc->GetTransportInfoByName(desc->contents()[0].name);
+ RTC_DCHECK(transport_info);
+ return &transport_info->description;
+ }
+
+ const cricket::TransportDescription* GetFirstTransportDescription(
+ const SessionDescriptionInterface* sdesc) {
+ auto* desc = sdesc->description();
+ RTC_DCHECK(desc->contents().size() > 0);
+ auto* transport_info =
+ desc->GetTransportInfoByName(desc->contents()[0].name);
+ RTC_DCHECK(transport_info);
+ return &transport_info->description;
+ }
+
+ // TODO(qingsi): Rewrite this method in terms of the standard IceTransport
+ // after it is implemented.
+ cricket::IceRole GetIceRole(const WrapperPtr& pc_wrapper_ptr) {
+ auto* pc_proxy =
+ static_cast<PeerConnectionProxyWithInternal<PeerConnectionInterface>*>(
+ pc_wrapper_ptr->pc());
+ PeerConnection* pc = static_cast<PeerConnection*>(pc_proxy->internal());
+ for (const auto& transceiver : pc->GetTransceiversInternal()) {
+ if (transceiver->media_type() == cricket::MEDIA_TYPE_AUDIO) {
+ auto dtls_transport = pc->LookupDtlsTransportByMidInternal(
+ transceiver->internal()->channel()->mid());
+ return dtls_transport->ice_transport()->internal()->GetIceRole();
+ }
+ }
+ RTC_DCHECK_NOTREACHED();
+ return cricket::ICEROLE_UNKNOWN;
+ }
+
+ // Returns a list of (ufrag, pwd) pairs in the order that they appear in
+ // `description`, or the empty list if `description` is null.
+ std::vector<std::pair<std::string, std::string>> GetIceCredentials(
+ const SessionDescriptionInterface* description) {
+ std::vector<std::pair<std::string, std::string>> ice_credentials;
+ if (!description)
+ return ice_credentials;
+ const auto* desc = description->description();
+ for (const auto& content_info : desc->contents()) {
+ const auto* transport_info =
+ desc->GetTransportInfoByName(content_info.name);
+ if (transport_info) {
+ ice_credentials.push_back(
+ std::make_pair(transport_info->description.ice_ufrag,
+ transport_info->description.ice_pwd));
+ }
+ }
+ return ice_credentials;
+ }
+
+ bool AddCandidateToFirstTransport(cricket::Candidate* candidate,
+ SessionDescriptionInterface* sdesc) {
+ auto* desc = sdesc->description();
+ RTC_DCHECK(desc->contents().size() > 0);
+ const auto& first_content = desc->contents()[0];
+ candidate->set_transport_name(first_content.name);
+ std::unique_ptr<IceCandidateInterface> jsep_candidate =
+ CreateIceCandidate(first_content.name, 0, *candidate);
+ return sdesc->AddCandidate(jsep_candidate.get());
+ }
+
+ rtc::FakeNetworkManager* NewFakeNetwork() {
+ // The PeerConnection's port allocator is tied to the PeerConnection's
+ // lifetime and expects the underlying NetworkManager to outlive it. That
+ // prevents us from having the PeerConnectionWrapper own the fake network.
+ // Therefore, the test fixture will own all the fake networks even though
+ // tests should access the fake network through the PeerConnectionWrapper.
+ auto* fake_network = new rtc::FakeNetworkManager();
+ fake_networks_.emplace_back(fake_network);
+ return fake_network;
+ }
+
+ std::unique_ptr<rtc::VirtualSocketServer> vss_;
+ std::unique_ptr<rtc::BasicPacketSocketFactory> socket_factory_;
+ rtc::AutoSocketServerThread main_;
+ rtc::scoped_refptr<PeerConnectionFactoryInterface> pc_factory_;
+ std::vector<std::unique_ptr<rtc::FakeNetworkManager>> fake_networks_;
+ const SdpSemantics sdp_semantics_;
+};
+
+class PeerConnectionIceTest
+ : public PeerConnectionIceBaseTest,
+ public ::testing::WithParamInterface<SdpSemantics> {
+ protected:
+ PeerConnectionIceTest() : PeerConnectionIceBaseTest(GetParam()) {
+ webrtc::metrics::Reset();
+ }
+};
+
+::testing::AssertionResult AssertCandidatesEqual(const char* a_expr,
+ const char* b_expr,
+ const cricket::Candidate& a,
+ const cricket::Candidate& b) {
+ rtc::StringBuilder failure_info;
+ if (a.component() != b.component()) {
+ failure_info << "\ncomponent: " << a.component() << " != " << b.component();
+ }
+ if (a.protocol() != b.protocol()) {
+ failure_info << "\nprotocol: " << a.protocol() << " != " << b.protocol();
+ }
+ if (a.address() != b.address()) {
+ failure_info << "\naddress: " << a.address().ToString()
+ << " != " << b.address().ToString();
+ }
+ if (a.type() != b.type()) {
+ failure_info << "\ntype: " << a.type() << " != " << b.type();
+ }
+ std::string failure_info_str = failure_info.str();
+ if (failure_info_str.empty()) {
+ return ::testing::AssertionSuccess();
+ } else {
+ return ::testing::AssertionFailure()
+ << a_expr << " and " << b_expr << " are not equal"
+ << failure_info_str;
+ }
+}
+
+TEST_P(PeerConnectionIceTest, OfferContainsGatheredCandidates) {
+ const SocketAddress kLocalAddress("1.1.1.1", 0);
+
+ auto caller = CreatePeerConnectionWithAudioVideo();
+ caller->network()->AddInterface(kLocalAddress);
+
+ // Start ICE candidate gathering by setting the local offer.
+ ASSERT_TRUE(caller->SetLocalDescription(caller->CreateOffer()));
+
+ EXPECT_TRUE_WAIT(caller->IsIceGatheringDone(), kIceCandidatesTimeout);
+
+ auto offer = caller->CreateOffer();
+ EXPECT_LT(0u, caller->observer()->GetCandidatesByMline(0).size());
+ EXPECT_EQ(caller->observer()->GetCandidatesByMline(0).size(),
+ offer->candidates(0)->count());
+ EXPECT_LT(0u, caller->observer()->GetCandidatesByMline(1).size());
+ EXPECT_EQ(caller->observer()->GetCandidatesByMline(1).size(),
+ offer->candidates(1)->count());
+}
+
+TEST_P(PeerConnectionIceTest, AnswerContainsGatheredCandidates) {
+ const SocketAddress kCallerAddress("1.1.1.1", 0);
+
+ auto caller = CreatePeerConnectionWithAudioVideo();
+ auto callee = CreatePeerConnectionWithAudioVideo();
+ caller->network()->AddInterface(kCallerAddress);
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+ ASSERT_TRUE(callee->SetLocalDescription(callee->CreateAnswer()));
+
+ EXPECT_TRUE_WAIT(callee->IsIceGatheringDone(), kIceCandidatesTimeout);
+
+ auto* answer = callee->pc()->local_description();
+ EXPECT_LT(0u, caller->observer()->GetCandidatesByMline(0).size());
+ EXPECT_EQ(callee->observer()->GetCandidatesByMline(0).size(),
+ answer->candidates(0)->count());
+ EXPECT_LT(0u, caller->observer()->GetCandidatesByMline(1).size());
+ EXPECT_EQ(callee->observer()->GetCandidatesByMline(1).size(),
+ answer->candidates(1)->count());
+}
+
+TEST_P(PeerConnectionIceTest,
+ CanSetRemoteSessionDescriptionWithRemoteCandidates) {
+ const SocketAddress kCallerAddress("1.1.1.1", 1111);
+
+ auto caller = CreatePeerConnectionWithAudioVideo();
+ auto callee = CreatePeerConnectionWithAudioVideo();
+
+ auto offer = caller->CreateOfferAndSetAsLocal();
+ cricket::Candidate candidate = CreateLocalUdpCandidate(kCallerAddress);
+ AddCandidateToFirstTransport(&candidate, offer.get());
+
+ ASSERT_TRUE(callee->SetRemoteDescription(std::move(offer)));
+ auto remote_candidates = callee->GetIceCandidatesFromRemoteDescription();
+ ASSERT_EQ(1u, remote_candidates.size());
+ EXPECT_PRED_FORMAT2(AssertCandidatesEqual, candidate,
+ remote_candidates[0]->candidate());
+}
+
+TEST_P(PeerConnectionIceTest, SetLocalDescriptionFailsIfNoIceCredentials) {
+ auto caller = CreatePeerConnectionWithAudioVideo();
+
+ auto offer = caller->CreateOffer();
+ RemoveIceUfragPwd(offer.get());
+
+ EXPECT_FALSE(caller->SetLocalDescription(std::move(offer)));
+}
+
+TEST_P(PeerConnectionIceTest, SetRemoteDescriptionFailsIfNoIceCredentials) {
+ auto caller = CreatePeerConnectionWithAudioVideo();
+ auto callee = CreatePeerConnectionWithAudioVideo();
+
+ auto offer = caller->CreateOfferAndSetAsLocal();
+ RemoveIceUfragPwd(offer.get());
+
+ EXPECT_FALSE(callee->SetRemoteDescription(std::move(offer)));
+}
+
+// Test that doing an offer/answer exchange with no transport (i.e., no data
+// channel or media) results in the ICE connection state staying at New.
+TEST_P(PeerConnectionIceTest,
+ OfferAnswerWithNoTransportsDoesNotChangeIceConnectionState) {
+ auto caller = CreatePeerConnection();
+ auto callee = CreatePeerConnection();
+
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+
+ EXPECT_EQ(PeerConnectionInterface::kIceConnectionNew,
+ caller->pc()->ice_connection_state());
+ EXPECT_EQ(PeerConnectionInterface::kIceConnectionNew,
+ callee->pc()->ice_connection_state());
+}
+
+// The following group tests that ICE candidates are not generated before
+// SetLocalDescription is called on a PeerConnection.
+
+TEST_P(PeerConnectionIceTest, NoIceCandidatesBeforeSetLocalDescription) {
+ const SocketAddress kLocalAddress("1.1.1.1", 0);
+
+ auto caller = CreatePeerConnectionWithAudioVideo();
+ caller->network()->AddInterface(kLocalAddress);
+
+ // Pump for 1 second and verify that no candidates are generated.
+ rtc::Thread::Current()->ProcessMessages(1000);
+
+ EXPECT_EQ(0u, caller->observer()->candidates_.size());
+}
+TEST_P(PeerConnectionIceTest,
+ NoIceCandidatesBeforeAnswerSetAsLocalDescription) {
+ const SocketAddress kCallerAddress("1.1.1.1", 1111);
+
+ auto caller = CreatePeerConnectionWithAudioVideo();
+ auto callee = CreatePeerConnectionWithAudioVideo();
+ caller->network()->AddInterface(kCallerAddress);
+
+ auto offer = caller->CreateOfferAndSetAsLocal();
+ cricket::Candidate candidate = CreateLocalUdpCandidate(kCallerAddress);
+ AddCandidateToFirstTransport(&candidate, offer.get());
+ ASSERT_TRUE(callee->SetRemoteDescription(std::move(offer)));
+
+ // Pump for 1 second and verify that no candidates are generated.
+ rtc::Thread::Current()->ProcessMessages(1000);
+
+ EXPECT_EQ(0u, callee->observer()->candidates_.size());
+}
+
+TEST_P(PeerConnectionIceTest, CannotAddCandidateWhenRemoteDescriptionNotSet) {
+ const SocketAddress kCalleeAddress("1.1.1.1", 1111);
+
+ auto caller = CreatePeerConnectionWithAudioVideo();
+ cricket::Candidate candidate = CreateLocalUdpCandidate(kCalleeAddress);
+ std::unique_ptr<IceCandidateInterface> jsep_candidate =
+ CreateIceCandidate(cricket::CN_AUDIO, 0, candidate);
+
+ EXPECT_FALSE(caller->pc()->AddIceCandidate(jsep_candidate.get()));
+
+ caller->CreateOfferAndSetAsLocal();
+
+ EXPECT_FALSE(caller->pc()->AddIceCandidate(jsep_candidate.get()));
+ EXPECT_METRIC_THAT(
+ webrtc::metrics::Samples("WebRTC.PeerConnection.AddIceCandidate"),
+ ElementsAre(Pair(kAddIceCandidateFailNoRemoteDescription, 2)));
+}
+
+TEST_P(PeerConnectionIceTest, CannotAddCandidateWhenPeerConnectionClosed) {
+ const SocketAddress kCalleeAddress("1.1.1.1", 1111);
+
+ auto caller = CreatePeerConnectionWithAudioVideo();
+ auto callee = CreatePeerConnectionWithAudioVideo();
+
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+
+ cricket::Candidate candidate = CreateLocalUdpCandidate(kCalleeAddress);
+ auto* audio_content = cricket::GetFirstAudioContent(
+ caller->pc()->local_description()->description());
+ std::unique_ptr<IceCandidateInterface> jsep_candidate =
+ CreateIceCandidate(audio_content->name, 0, candidate);
+
+ caller->pc()->Close();
+
+ EXPECT_FALSE(caller->pc()->AddIceCandidate(jsep_candidate.get()));
+}
+
+TEST_P(PeerConnectionIceTest, DuplicateIceCandidateIgnoredWhenAdded) {
+ const SocketAddress kCalleeAddress("1.1.1.1", 1111);
+
+ auto caller = CreatePeerConnectionWithAudioVideo();
+ auto callee = CreatePeerConnectionWithAudioVideo();
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+ ASSERT_TRUE(
+ caller->SetRemoteDescription(callee->CreateAnswerAndSetAsLocal()));
+
+ cricket::Candidate candidate = CreateLocalUdpCandidate(kCalleeAddress);
+ caller->AddIceCandidate(&candidate);
+ EXPECT_TRUE(caller->AddIceCandidate(&candidate));
+ EXPECT_EQ(1u, caller->GetIceCandidatesFromRemoteDescription().size());
+}
+
+// TODO(tommi): Re-enable after updating RTCPeerConnection-blockedPorts.html in
+// Chromium (the test needs setRemoteDescription to succeed for an invalid
+// candidate).
+TEST_P(PeerConnectionIceTest, DISABLED_ErrorOnInvalidRemoteIceCandidateAdded) {
+ auto caller = CreatePeerConnectionWithAudioVideo();
+ auto callee = CreatePeerConnectionWithAudioVideo();
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+ // Add a candidate to the remote description with a candidate that has an
+ // invalid address (port number == 2).
+ auto answer = callee->CreateAnswerAndSetAsLocal();
+ cricket::Candidate bad_candidate =
+ CreateLocalUdpCandidate(SocketAddress("2.2.2.2", 2));
+ RTC_LOG(LS_INFO) << "Bad candidate: " << bad_candidate.ToString();
+ AddCandidateToFirstTransport(&bad_candidate, answer.get());
+ // Now the call to SetRemoteDescription should fail.
+ EXPECT_FALSE(caller->SetRemoteDescription(std::move(answer)));
+}
+
+TEST_P(PeerConnectionIceTest,
+ CannotRemoveIceCandidatesWhenPeerConnectionClosed) {
+ const SocketAddress kCalleeAddress("1.1.1.1", 1111);
+
+ auto caller = CreatePeerConnectionWithAudioVideo();
+ auto callee = CreatePeerConnectionWithAudioVideo();
+
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+
+ cricket::Candidate candidate = CreateLocalUdpCandidate(kCalleeAddress);
+ auto* audio_content = cricket::GetFirstAudioContent(
+ caller->pc()->local_description()->description());
+ std::unique_ptr<IceCandidateInterface> ice_candidate =
+ CreateIceCandidate(audio_content->name, 0, candidate);
+
+ ASSERT_TRUE(caller->pc()->AddIceCandidate(ice_candidate.get()));
+
+ caller->pc()->Close();
+
+ EXPECT_FALSE(caller->pc()->RemoveIceCandidates({candidate}));
+}
+
+TEST_P(PeerConnectionIceTest,
+ AddRemoveCandidateWithEmptyTransportDoesNotCrash) {
+ const SocketAddress kCalleeAddress("1.1.1.1", 1111);
+
+ auto caller = CreatePeerConnectionWithAudioVideo();
+ auto callee = CreatePeerConnectionWithAudioVideo();
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+ ASSERT_TRUE(
+ caller->SetRemoteDescription(callee->CreateAnswerAndSetAsLocal()));
+
+ // `candidate.transport_name()` is empty.
+ cricket::Candidate candidate = CreateLocalUdpCandidate(kCalleeAddress);
+ auto* audio_content = cricket::GetFirstAudioContent(
+ caller->pc()->local_description()->description());
+ std::unique_ptr<IceCandidateInterface> ice_candidate =
+ CreateIceCandidate(audio_content->name, 0, candidate);
+ EXPECT_TRUE(caller->pc()->AddIceCandidate(ice_candidate.get()));
+ EXPECT_TRUE(caller->pc()->RemoveIceCandidates({candidate}));
+}
+
+TEST_P(PeerConnectionIceTest, RemoveCandidateRemovesFromRemoteDescription) {
+ const SocketAddress kCalleeAddress("1.1.1.1", 1111);
+
+ auto caller = CreatePeerConnectionWithAudioVideo();
+ auto callee = CreatePeerConnectionWithAudioVideo();
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+ ASSERT_TRUE(
+ caller->SetRemoteDescription(callee->CreateAnswerAndSetAsLocal()));
+
+ cricket::Candidate candidate = CreateLocalUdpCandidate(kCalleeAddress);
+ ASSERT_TRUE(caller->AddIceCandidate(&candidate));
+ EXPECT_TRUE(caller->pc()->RemoveIceCandidates({candidate}));
+ EXPECT_EQ(0u, caller->GetIceCandidatesFromRemoteDescription().size());
+}
+
+// Test that if a candidate is added via AddIceCandidate and via an updated
+// remote description, then both candidates appear in the stored remote
+// description.
+TEST_P(PeerConnectionIceTest,
+ CandidateInSubsequentOfferIsAddedToRemoteDescription) {
+ const SocketAddress kCallerAddress1("1.1.1.1", 1111);
+ const SocketAddress kCallerAddress2("2.2.2.2", 2222);
+
+ auto caller = CreatePeerConnectionWithAudioVideo();
+ auto callee = CreatePeerConnectionWithAudioVideo();
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+ ASSERT_TRUE(
+ caller->SetRemoteDescription(callee->CreateAnswerAndSetAsLocal()));
+
+ // Add one candidate via `AddIceCandidate`.
+ cricket::Candidate candidate1 = CreateLocalUdpCandidate(kCallerAddress1);
+ ASSERT_TRUE(callee->AddIceCandidate(&candidate1));
+
+ // Add the second candidate via a reoffer.
+ auto offer = caller->CreateOffer();
+ cricket::Candidate candidate2 = CreateLocalUdpCandidate(kCallerAddress2);
+ AddCandidateToFirstTransport(&candidate2, offer.get());
+
+ // Expect both candidates to appear in the callee's remote description.
+ ASSERT_TRUE(callee->SetRemoteDescription(std::move(offer)));
+ EXPECT_EQ(2u, callee->GetIceCandidatesFromRemoteDescription().size());
+}
+
+// The follow test verifies that SetLocal/RemoteDescription fails when an offer
+// has either ICE ufrag/pwd too short or too long and succeeds otherwise.
+// The standard (https://tools.ietf.org/html/rfc5245#section-15.4) says that
+// pwd must be 22-256 characters and ufrag must be 4-256 characters.
+TEST_P(PeerConnectionIceTest, VerifyUfragPwdLength) {
+ auto set_local_description_with_ufrag_pwd_length = [this](int ufrag_len,
+ int pwd_len) {
+ auto pc = CreatePeerConnectionWithAudioVideo();
+ auto offer = pc->CreateOffer();
+ SetIceUfragPwd(offer.get(), std::string(ufrag_len, 'x'),
+ std::string(pwd_len, 'x'));
+ return pc->SetLocalDescription(std::move(offer));
+ };
+
+ auto set_remote_description_with_ufrag_pwd_length = [this](int ufrag_len,
+ int pwd_len) {
+ auto pc = CreatePeerConnectionWithAudioVideo();
+ auto offer = pc->CreateOffer();
+ SetIceUfragPwd(offer.get(), std::string(ufrag_len, 'x'),
+ std::string(pwd_len, 'x'));
+ return pc->SetRemoteDescription(std::move(offer));
+ };
+
+ EXPECT_FALSE(set_local_description_with_ufrag_pwd_length(3, 22));
+ EXPECT_FALSE(set_remote_description_with_ufrag_pwd_length(3, 22));
+ EXPECT_FALSE(set_local_description_with_ufrag_pwd_length(257, 22));
+ EXPECT_FALSE(set_remote_description_with_ufrag_pwd_length(257, 22));
+ EXPECT_FALSE(set_local_description_with_ufrag_pwd_length(4, 21));
+ EXPECT_FALSE(set_remote_description_with_ufrag_pwd_length(4, 21));
+ EXPECT_FALSE(set_local_description_with_ufrag_pwd_length(4, 257));
+ EXPECT_FALSE(set_remote_description_with_ufrag_pwd_length(4, 257));
+ EXPECT_TRUE(set_local_description_with_ufrag_pwd_length(4, 22));
+ EXPECT_TRUE(set_remote_description_with_ufrag_pwd_length(4, 22));
+ EXPECT_TRUE(set_local_description_with_ufrag_pwd_length(256, 256));
+ EXPECT_TRUE(set_remote_description_with_ufrag_pwd_length(256, 256));
+}
+
+::testing::AssertionResult AssertIpInCandidates(
+ const char* address_expr,
+ const char* candidates_expr,
+ const SocketAddress& address,
+ const std::vector<IceCandidateInterface*> candidates) {
+ rtc::StringBuilder candidate_hosts;
+ for (const auto* candidate : candidates) {
+ const auto& candidate_ip = candidate->candidate().address().ipaddr();
+ if (candidate_ip == address.ipaddr()) {
+ return ::testing::AssertionSuccess();
+ }
+ candidate_hosts << "\n" << candidate_ip.ToString();
+ }
+ return ::testing::AssertionFailure()
+ << address_expr << " (host " << address.HostAsURIString()
+ << ") not in " << candidates_expr
+ << " which have the following address hosts:" << candidate_hosts.str();
+}
+
+TEST_P(PeerConnectionIceTest, CandidatesGeneratedForEachLocalInterface) {
+ const SocketAddress kLocalAddress1("1.1.1.1", 0);
+ const SocketAddress kLocalAddress2("2.2.2.2", 0);
+
+ auto caller = CreatePeerConnectionWithAudioVideo();
+ caller->network()->AddInterface(kLocalAddress1);
+ caller->network()->AddInterface(kLocalAddress2);
+
+ caller->CreateOfferAndSetAsLocal();
+ EXPECT_TRUE_WAIT(caller->IsIceGatheringDone(), kIceCandidatesTimeout);
+
+ auto candidates = caller->observer()->GetCandidatesByMline(0);
+ EXPECT_PRED_FORMAT2(AssertIpInCandidates, kLocalAddress1, candidates);
+ EXPECT_PRED_FORMAT2(AssertIpInCandidates, kLocalAddress2, candidates);
+}
+
+TEST_P(PeerConnectionIceTest, TrickledSingleCandidateAddedToRemoteDescription) {
+ const SocketAddress kCallerAddress("1.1.1.1", 1111);
+
+ auto caller = CreatePeerConnectionWithAudioVideo();
+ auto callee = CreatePeerConnectionWithAudioVideo();
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+
+ cricket::Candidate candidate = CreateLocalUdpCandidate(kCallerAddress);
+ callee->AddIceCandidate(&candidate);
+ auto candidates = callee->GetIceCandidatesFromRemoteDescription();
+ ASSERT_EQ(1u, candidates.size());
+ EXPECT_PRED_FORMAT2(AssertCandidatesEqual, candidate,
+ candidates[0]->candidate());
+}
+
+TEST_P(PeerConnectionIceTest, TwoTrickledCandidatesAddedToRemoteDescription) {
+ const SocketAddress kCalleeAddress1("1.1.1.1", 1111);
+ const SocketAddress kCalleeAddress2("2.2.2.2", 2222);
+
+ auto caller = CreatePeerConnectionWithAudioVideo();
+ auto callee = CreatePeerConnectionWithAudioVideo();
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+ ASSERT_TRUE(
+ caller->SetRemoteDescription(callee->CreateAnswerAndSetAsLocal()));
+
+ cricket::Candidate candidate1 = CreateLocalUdpCandidate(kCalleeAddress1);
+ caller->AddIceCandidate(&candidate1);
+
+ cricket::Candidate candidate2 = CreateLocalUdpCandidate(kCalleeAddress2);
+ caller->AddIceCandidate(&candidate2);
+
+ auto candidates = caller->GetIceCandidatesFromRemoteDescription();
+ ASSERT_EQ(2u, candidates.size());
+ EXPECT_PRED_FORMAT2(AssertCandidatesEqual, candidate1,
+ candidates[0]->candidate());
+ EXPECT_PRED_FORMAT2(AssertCandidatesEqual, candidate2,
+ candidates[1]->candidate());
+}
+
+TEST_P(PeerConnectionIceTest, AsyncAddIceCandidateIsAddedToRemoteDescription) {
+ auto candidate = CreateLocalUdpCandidate(SocketAddress("1.1.1.1", 1111));
+
+ auto caller = CreatePeerConnectionWithAudioVideo();
+ auto callee = CreatePeerConnectionWithAudioVideo();
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+
+ auto jsep_candidate =
+ callee->CreateJsepCandidateForFirstTransport(&candidate);
+ bool operation_completed = false;
+ callee->pc()->AddIceCandidate(std::move(jsep_candidate),
+ [&operation_completed](RTCError result) {
+ EXPECT_TRUE(result.ok());
+ operation_completed = true;
+ });
+ EXPECT_TRUE_WAIT(operation_completed, kWaitTimeout);
+
+ auto candidates = callee->GetIceCandidatesFromRemoteDescription();
+ ASSERT_EQ(1u, candidates.size());
+ EXPECT_PRED_FORMAT2(AssertCandidatesEqual, candidate,
+ candidates[0]->candidate());
+}
+
+TEST_P(PeerConnectionIceTest,
+ AsyncAddIceCandidateCompletesImmediatelyIfNoPendingOperation) {
+ auto candidate = CreateLocalUdpCandidate(SocketAddress("1.1.1.1", 1111));
+
+ auto caller = CreatePeerConnectionWithAudioVideo();
+ auto callee = CreatePeerConnectionWithAudioVideo();
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+
+ auto jsep_candidate =
+ callee->CreateJsepCandidateForFirstTransport(&candidate);
+ bool operation_completed = false;
+ callee->pc()->AddIceCandidate(
+ std::move(jsep_candidate),
+ [&operation_completed](RTCError result) { operation_completed = true; });
+ EXPECT_TRUE(operation_completed);
+}
+
+TEST_P(PeerConnectionIceTest,
+ AsyncAddIceCandidateCompletesWhenPendingOperationCompletes) {
+ auto candidate = CreateLocalUdpCandidate(SocketAddress("1.1.1.1", 1111));
+
+ auto caller = CreatePeerConnectionWithAudioVideo();
+ auto callee = CreatePeerConnectionWithAudioVideo();
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+
+ // Chain an operation that will block AddIceCandidate() from executing.
+ auto answer_observer =
+ rtc::make_ref_counted<MockCreateSessionDescriptionObserver>();
+ callee->pc()->CreateAnswer(answer_observer.get(), RTCOfferAnswerOptions());
+
+ auto jsep_candidate =
+ callee->CreateJsepCandidateForFirstTransport(&candidate);
+ bool operation_completed = false;
+ callee->pc()->AddIceCandidate(
+ std::move(jsep_candidate),
+ [&operation_completed](RTCError result) { operation_completed = true; });
+ // The operation will not be able to complete until we EXPECT_TRUE_WAIT()
+ // allowing CreateAnswer() to complete.
+ EXPECT_FALSE(operation_completed);
+ EXPECT_TRUE_WAIT(answer_observer->called(), kWaitTimeout);
+ // As soon as it does, AddIceCandidate() will execute without delay, so it
+ // must also have completed.
+ EXPECT_TRUE(operation_completed);
+}
+
+TEST_P(PeerConnectionIceTest,
+ AsyncAddIceCandidateFailsBeforeSetRemoteDescription) {
+ auto candidate = CreateLocalUdpCandidate(SocketAddress("1.1.1.1", 1111));
+
+ auto caller = CreatePeerConnectionWithAudioVideo();
+ std::unique_ptr<IceCandidateInterface> jsep_candidate =
+ CreateIceCandidate(cricket::CN_AUDIO, 0, candidate);
+
+ bool operation_completed = false;
+ caller->pc()->AddIceCandidate(
+ std::move(jsep_candidate), [&operation_completed](RTCError result) {
+ EXPECT_FALSE(result.ok());
+ EXPECT_EQ(result.message(),
+ std::string("The remote description was null"));
+ operation_completed = true;
+ });
+ EXPECT_TRUE_WAIT(operation_completed, kWaitTimeout);
+}
+
+TEST_P(PeerConnectionIceTest,
+ AsyncAddIceCandidateFailsIfPeerConnectionDestroyed) {
+ auto candidate = CreateLocalUdpCandidate(SocketAddress("1.1.1.1", 1111));
+
+ auto caller = CreatePeerConnectionWithAudioVideo();
+ auto callee = CreatePeerConnectionWithAudioVideo();
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+
+ // Chain an operation that will block AddIceCandidate() from executing.
+ auto answer_observer =
+ rtc::make_ref_counted<MockCreateSessionDescriptionObserver>();
+ callee->pc()->CreateAnswer(answer_observer.get(), RTCOfferAnswerOptions());
+
+ auto jsep_candidate =
+ callee->CreateJsepCandidateForFirstTransport(&candidate);
+ bool operation_completed = false;
+ callee->pc()->AddIceCandidate(
+ std::move(jsep_candidate), [&operation_completed](RTCError result) {
+ EXPECT_FALSE(result.ok());
+ EXPECT_EQ(
+ result.message(),
+ std::string(
+ "AddIceCandidate failed because the session was shut down"));
+ operation_completed = true;
+ });
+ // The operation will not be able to run until EXPECT_TRUE_WAIT(), giving us
+ // time to remove all references to the PeerConnection.
+ EXPECT_FALSE(operation_completed);
+ // This should delete the callee PC.
+ callee = nullptr;
+ EXPECT_TRUE_WAIT(operation_completed, kWaitTimeout);
+}
+
+TEST_P(PeerConnectionIceTest, LocalDescriptionUpdatedWhenContinualGathering) {
+ const SocketAddress kLocalAddress("1.1.1.1", 0);
+
+ RTCConfiguration config;
+ config.sdp_semantics = SdpSemantics::kUnifiedPlan;
+ config.continual_gathering_policy =
+ PeerConnectionInterface::GATHER_CONTINUALLY;
+ auto caller = CreatePeerConnectionWithAudioVideo(config);
+ caller->network()->AddInterface(kLocalAddress);
+
+ // Start ICE candidate gathering by setting the local offer.
+ ASSERT_TRUE(caller->SetLocalDescription(caller->CreateOffer()));
+
+ // Since we're using continual gathering, we won't get "gathering done".
+ EXPECT_TRUE_WAIT(
+ caller->pc()->local_description()->candidates(0)->count() > 0,
+ kIceCandidatesTimeout);
+}
+
+// Test that when continual gathering is enabled, and a network interface goes
+// down, the candidate is signaled as removed and removed from the local
+// description.
+TEST_P(PeerConnectionIceTest,
+ LocalCandidatesRemovedWhenNetworkDownIfGatheringContinually) {
+ const SocketAddress kLocalAddress("1.1.1.1", 0);
+
+ RTCConfiguration config;
+ config.sdp_semantics = SdpSemantics::kUnifiedPlan;
+ config.continual_gathering_policy =
+ PeerConnectionInterface::GATHER_CONTINUALLY;
+ auto caller = CreatePeerConnectionWithAudioVideo(config);
+ caller->network()->AddInterface(kLocalAddress);
+
+ // Start ICE candidate gathering by setting the local offer.
+ ASSERT_TRUE(caller->SetLocalDescription(caller->CreateOffer()));
+
+ EXPECT_TRUE_WAIT(
+ caller->pc()->local_description()->candidates(0)->count() > 0,
+ kIceCandidatesTimeout);
+
+ // Remove the only network interface, causing the PeerConnection to signal
+ // the removal of all candidates derived from this interface.
+ caller->network()->RemoveInterface(kLocalAddress);
+
+ EXPECT_EQ_WAIT(0u, caller->pc()->local_description()->candidates(0)->count(),
+ kIceCandidatesTimeout);
+ EXPECT_LT(0, caller->observer()->num_candidates_removed_);
+}
+
+TEST_P(PeerConnectionIceTest,
+ LocalCandidatesNotRemovedWhenNetworkDownIfGatheringOnce) {
+ const SocketAddress kLocalAddress("1.1.1.1", 0);
+
+ RTCConfiguration config;
+ config.sdp_semantics = SdpSemantics::kUnifiedPlan;
+ config.continual_gathering_policy = PeerConnectionInterface::GATHER_ONCE;
+ auto caller = CreatePeerConnectionWithAudioVideo(config);
+ caller->network()->AddInterface(kLocalAddress);
+
+ // Start ICE candidate gathering by setting the local offer.
+ ASSERT_TRUE(caller->SetLocalDescription(caller->CreateOffer()));
+
+ EXPECT_TRUE_WAIT(caller->IsIceGatheringDone(), kIceCandidatesTimeout);
+
+ caller->network()->RemoveInterface(kLocalAddress);
+
+ // Verify that the local candidates are not removed;
+ rtc::Thread::Current()->ProcessMessages(1000);
+ EXPECT_EQ(0, caller->observer()->num_candidates_removed_);
+}
+
+// The following group tests that when an offer includes a new ufrag or pwd
+// (indicating an ICE restart) the old candidates are removed and new candidates
+// added to the remote description.
+
+TEST_P(PeerConnectionIceTest, IceRestartOfferClearsExistingCandidate) {
+ const SocketAddress kCallerAddress("1.1.1.1", 1111);
+
+ auto caller = CreatePeerConnectionWithAudioVideo();
+ auto callee = CreatePeerConnectionWithAudioVideo();
+
+ auto offer = caller->CreateOfferAndSetAsLocal();
+ cricket::Candidate candidate = CreateLocalUdpCandidate(kCallerAddress);
+ AddCandidateToFirstTransport(&candidate, offer.get());
+
+ ASSERT_TRUE(callee->SetRemoteDescription(std::move(offer)));
+
+ RTCOfferAnswerOptions options;
+ options.ice_restart = true;
+ ASSERT_TRUE(
+ callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal(options)));
+
+ EXPECT_EQ(0u, callee->GetIceCandidatesFromRemoteDescription().size());
+}
+TEST_P(PeerConnectionIceTest,
+ IceRestartOfferCandidateReplacesExistingCandidate) {
+ const SocketAddress kFirstCallerAddress("1.1.1.1", 1111);
+ const SocketAddress kRestartedCallerAddress("2.2.2.2", 2222);
+
+ auto caller = CreatePeerConnectionWithAudioVideo();
+ auto callee = CreatePeerConnectionWithAudioVideo();
+
+ auto offer = caller->CreateOfferAndSetAsLocal();
+ cricket::Candidate old_candidate =
+ CreateLocalUdpCandidate(kFirstCallerAddress);
+ AddCandidateToFirstTransport(&old_candidate, offer.get());
+
+ ASSERT_TRUE(callee->SetRemoteDescription(std::move(offer)));
+
+ RTCOfferAnswerOptions options;
+ options.ice_restart = true;
+ auto restart_offer = caller->CreateOfferAndSetAsLocal(options);
+ cricket::Candidate new_candidate =
+ CreateLocalUdpCandidate(kRestartedCallerAddress);
+ AddCandidateToFirstTransport(&new_candidate, restart_offer.get());
+
+ ASSERT_TRUE(callee->SetRemoteDescription(std::move(restart_offer)));
+
+ auto remote_candidates = callee->GetIceCandidatesFromRemoteDescription();
+ ASSERT_EQ(1u, remote_candidates.size());
+ EXPECT_PRED_FORMAT2(AssertCandidatesEqual, new_candidate,
+ remote_candidates[0]->candidate());
+}
+
+// Test that if there is not an ICE restart (i.e., nothing changes), then the
+// answer to a later offer should have the same ufrag/pwd as the first answer.
+TEST_P(PeerConnectionIceTest, LaterAnswerHasSameIceCredentialsIfNoIceRestart) {
+ auto caller = CreatePeerConnectionWithAudioVideo();
+ auto callee = CreatePeerConnectionWithAudioVideo();
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+ ASSERT_TRUE(
+ caller->SetRemoteDescription(callee->CreateAnswerAndSetAsLocal()));
+
+ // Re-offer.
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+
+ auto answer = callee->CreateAnswer();
+ auto* answer_transport_desc = GetFirstTransportDescription(answer.get());
+ auto* local_transport_desc =
+ GetFirstTransportDescription(callee->pc()->local_description());
+
+ EXPECT_EQ(answer_transport_desc->ice_ufrag, local_transport_desc->ice_ufrag);
+ EXPECT_EQ(answer_transport_desc->ice_pwd, local_transport_desc->ice_pwd);
+}
+
+TEST_P(PeerConnectionIceTest, RestartIceGeneratesNewCredentials) {
+ auto caller = CreatePeerConnectionWithAudioVideo();
+ auto callee = CreatePeerConnectionWithAudioVideo();
+
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+ auto initial_ice_credentials =
+ GetIceCredentials(caller->pc()->local_description());
+ caller->pc()->RestartIce();
+ ASSERT_TRUE(caller->CreateOfferAndSetAsLocal());
+ auto restarted_ice_credentials =
+ GetIceCredentials(caller->pc()->local_description());
+ EXPECT_NE(initial_ice_credentials, restarted_ice_credentials);
+}
+
+TEST_P(PeerConnectionIceTest,
+ RestartIceWhileLocalOfferIsPendingGeneratesNewCredentialsInNextOffer) {
+ auto caller = CreatePeerConnectionWithAudioVideo();
+ auto callee = CreatePeerConnectionWithAudioVideo();
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+ auto initial_ice_credentials =
+ GetIceCredentials(caller->pc()->local_description());
+ // ICE restart becomes needed while an O/A is pending and `caller` is the
+ // offerer.
+ caller->pc()->RestartIce();
+ ASSERT_TRUE(
+ caller->SetRemoteDescription(callee->CreateAnswerAndSetAsLocal()));
+ ASSERT_TRUE(caller->CreateOfferAndSetAsLocal());
+ auto restarted_ice_credentials =
+ GetIceCredentials(caller->pc()->local_description());
+ EXPECT_NE(initial_ice_credentials, restarted_ice_credentials);
+}
+
+TEST_P(PeerConnectionIceTest,
+ RestartIceWhileRemoteOfferIsPendingGeneratesNewCredentialsInNextOffer) {
+ auto caller = CreatePeerConnectionWithAudioVideo();
+ auto callee = CreatePeerConnectionWithAudioVideo();
+
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+ auto initial_ice_credentials =
+ GetIceCredentials(caller->pc()->local_description());
+ ASSERT_TRUE(caller->SetRemoteDescription(callee->CreateOfferAndSetAsLocal()));
+ // ICE restart becomes needed while an O/A is pending and `caller` is the
+ // answerer.
+ caller->pc()->RestartIce();
+ ASSERT_TRUE(
+ callee->SetRemoteDescription(caller->CreateAnswerAndSetAsLocal()));
+ ASSERT_TRUE(caller->CreateOfferAndSetAsLocal());
+ auto restarted_ice_credentials =
+ GetIceCredentials(caller->pc()->local_description());
+ EXPECT_NE(initial_ice_credentials, restarted_ice_credentials);
+}
+
+TEST_P(PeerConnectionIceTest, RestartIceTriggeredByRemoteSide) {
+ auto caller = CreatePeerConnectionWithAudioVideo();
+ auto callee = CreatePeerConnectionWithAudioVideo();
+
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+ auto initial_ice_credentials =
+ GetIceCredentials(caller->pc()->local_description());
+
+ // Remote restart and O/A exchange with `caller` as the answerer should
+ // restart ICE locally as well.
+ callee->pc()->RestartIce();
+ ASSERT_TRUE(callee->ExchangeOfferAnswerWith(caller.get()));
+
+ auto restarted_ice_credentials =
+ GetIceCredentials(caller->pc()->local_description());
+ EXPECT_NE(initial_ice_credentials, restarted_ice_credentials);
+}
+
+TEST_P(PeerConnectionIceTest, RestartIceCausesNegotiationNeeded) {
+ auto caller = CreatePeerConnectionWithAudioVideo();
+ auto callee = CreatePeerConnectionWithAudioVideo();
+
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+ caller->observer()->clear_legacy_renegotiation_needed();
+ caller->observer()->clear_latest_negotiation_needed_event();
+ caller->pc()->RestartIce();
+ EXPECT_TRUE(caller->observer()->legacy_renegotiation_needed());
+ EXPECT_TRUE(caller->observer()->has_negotiation_needed_event());
+}
+
+// In Unified Plan, "onnegotiationneeded" is spec-compliant, including not
+// firing multipe times in a row, or firing when returning to the stable
+// signaling state if negotiation is still needed. In Plan B it fires any time
+// something changes. As such, some tests are SdpSemantics-specific.
+class PeerConnectionIceTestUnifiedPlan : public PeerConnectionIceBaseTest {
+ protected:
+ PeerConnectionIceTestUnifiedPlan()
+ : PeerConnectionIceBaseTest(SdpSemantics::kUnifiedPlan) {}
+};
+
+TEST_F(PeerConnectionIceTestUnifiedPlan,
+ RestartIceWhileLocalOfferIsPendingCausesNegotiationNeededWhenStable) {
+ auto caller = CreatePeerConnectionWithAudioVideo();
+ auto callee = CreatePeerConnectionWithAudioVideo();
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+ // ICE restart becomes needed while an O/A is pending and `caller` is the
+ // offerer.
+ caller->observer()->clear_legacy_renegotiation_needed();
+ caller->observer()->clear_latest_negotiation_needed_event();
+ caller->pc()->RestartIce();
+ // In Unified Plan, the event should not fire until we are back in the stable
+ // signaling state.
+ EXPECT_FALSE(caller->observer()->legacy_renegotiation_needed());
+ EXPECT_FALSE(caller->observer()->has_negotiation_needed_event());
+ ASSERT_TRUE(
+ caller->SetRemoteDescription(callee->CreateAnswerAndSetAsLocal()));
+ EXPECT_TRUE(caller->observer()->legacy_renegotiation_needed());
+ EXPECT_TRUE(caller->observer()->has_negotiation_needed_event());
+}
+
+TEST_F(PeerConnectionIceTestUnifiedPlan,
+ RestartIceWhileRemoteOfferIsPendingCausesNegotiationNeededWhenStable) {
+ auto caller = CreatePeerConnectionWithAudioVideo();
+ auto callee = CreatePeerConnectionWithAudioVideo();
+
+ // Establish initial credentials as the caller.
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+ ASSERT_TRUE(caller->SetRemoteDescription(callee->CreateOfferAndSetAsLocal()));
+ // ICE restart becomes needed while an O/A is pending and `caller` is the
+ // answerer.
+ caller->observer()->clear_legacy_renegotiation_needed();
+ caller->observer()->clear_latest_negotiation_needed_event();
+ caller->pc()->RestartIce();
+ // In Unified Plan, the event should not fire until we are back in the stable
+ // signaling state.
+ EXPECT_FALSE(caller->observer()->legacy_renegotiation_needed());
+ EXPECT_FALSE(caller->observer()->has_negotiation_needed_event());
+ ASSERT_TRUE(
+ callee->SetRemoteDescription(caller->CreateAnswerAndSetAsLocal()));
+ EXPECT_TRUE(caller->observer()->legacy_renegotiation_needed());
+ EXPECT_TRUE(caller->observer()->has_negotiation_needed_event());
+}
+
+TEST_F(PeerConnectionIceTestUnifiedPlan,
+ RestartIceTriggeredByRemoteSideCauseNegotiationNotNeeded) {
+ auto caller = CreatePeerConnectionWithAudioVideo();
+ auto callee = CreatePeerConnectionWithAudioVideo();
+
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+ // Local restart.
+ caller->pc()->RestartIce();
+ caller->observer()->clear_legacy_renegotiation_needed();
+ caller->observer()->clear_latest_negotiation_needed_event();
+ // Remote restart and O/A exchange with `caller` as the answerer should
+ // restart ICE locally as well.
+ callee->pc()->RestartIce();
+ ASSERT_TRUE(callee->ExchangeOfferAnswerWith(caller.get()));
+ // Having restarted ICE by the remote offer, we do not need to renegotiate ICE
+ // credentials when back in the stable signaling state.
+ EXPECT_FALSE(caller->observer()->legacy_renegotiation_needed());
+ EXPECT_FALSE(caller->observer()->has_negotiation_needed_event());
+}
+
+TEST_F(PeerConnectionIceTestUnifiedPlan,
+ RestartIceTwiceDoesNotFireNegotiationNeededTwice) {
+ auto caller = CreatePeerConnectionWithAudioVideo();
+ auto callee = CreatePeerConnectionWithAudioVideo();
+
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+ caller->pc()->RestartIce();
+ EXPECT_TRUE(caller->observer()->legacy_renegotiation_needed());
+ EXPECT_TRUE(caller->observer()->has_negotiation_needed_event());
+ caller->observer()->clear_legacy_renegotiation_needed();
+ caller->observer()->clear_latest_negotiation_needed_event();
+ caller->pc()->RestartIce();
+ EXPECT_FALSE(caller->observer()->legacy_renegotiation_needed());
+ EXPECT_FALSE(caller->observer()->has_negotiation_needed_event());
+}
+
+// In Plan B, "onnegotiationneeded" is not spec-compliant, firing based on if
+// something changed rather than if negotiation is needed. In Unified Plan it
+// fires according to spec. As such, some tests are SdpSemantics-specific.
+class PeerConnectionIceTestPlanB : public PeerConnectionIceBaseTest {
+ protected:
+ PeerConnectionIceTestPlanB()
+ : PeerConnectionIceBaseTest(SdpSemantics::kPlanB_DEPRECATED) {}
+};
+
+TEST_F(PeerConnectionIceTestPlanB,
+ RestartIceWhileOfferIsPendingCausesNegotiationNeededImmediately) {
+ auto caller = CreatePeerConnectionWithAudioVideo();
+ auto callee = CreatePeerConnectionWithAudioVideo();
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+ caller->observer()->clear_legacy_renegotiation_needed();
+ caller->observer()->clear_latest_negotiation_needed_event();
+ caller->pc()->RestartIce();
+ EXPECT_TRUE(caller->observer()->legacy_renegotiation_needed());
+ EXPECT_TRUE(caller->observer()->has_negotiation_needed_event());
+ caller->observer()->clear_legacy_renegotiation_needed();
+ caller->observer()->clear_latest_negotiation_needed_event();
+ ASSERT_TRUE(
+ caller->SetRemoteDescription(callee->CreateAnswerAndSetAsLocal()));
+ // In Plan B, the event fired early so we don't expect it to fire now. This is
+ // not spec-compliant but follows the pattern of existing Plan B behavior.
+ EXPECT_FALSE(caller->observer()->legacy_renegotiation_needed());
+ EXPECT_FALSE(caller->observer()->has_negotiation_needed_event());
+}
+
+TEST_F(PeerConnectionIceTestPlanB,
+ RestartIceTwiceDoesFireNegotiationNeededTwice) {
+ auto caller = CreatePeerConnectionWithAudioVideo();
+ auto callee = CreatePeerConnectionWithAudioVideo();
+
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+ caller->observer()->clear_legacy_renegotiation_needed();
+ caller->observer()->clear_latest_negotiation_needed_event();
+ caller->pc()->RestartIce();
+ EXPECT_TRUE(caller->observer()->legacy_renegotiation_needed());
+ EXPECT_TRUE(caller->observer()->has_negotiation_needed_event());
+ caller->observer()->clear_legacy_renegotiation_needed();
+ caller->observer()->clear_latest_negotiation_needed_event();
+ caller->pc()->RestartIce();
+ // In Plan B, the event fires every time something changed, even if we have
+ // already fired the event. This is not spec-compliant but follows the same
+ // pattern of existing Plan B behavior.
+ EXPECT_TRUE(caller->observer()->legacy_renegotiation_needed());
+ EXPECT_TRUE(caller->observer()->has_negotiation_needed_event());
+}
+
+// The following parameterized test verifies that if an offer is sent with a
+// modified ICE ufrag and/or ICE pwd, then the answer should identify that the
+// other side has initiated an ICE restart and generate a new ufrag and pwd.
+// RFC 5245 says: "If the offer contained a change in the a=ice-ufrag or
+// a=ice-pwd attributes compared to the previous SDP from the peer, it
+// indicates that ICE is restarting for this media stream."
+
+class PeerConnectionIceUfragPwdAnswerTest
+ : public PeerConnectionIceBaseTest,
+ public ::testing::WithParamInterface<
+ std::tuple<SdpSemantics, std::tuple<bool, bool>>> {
+ protected:
+ PeerConnectionIceUfragPwdAnswerTest()
+ : PeerConnectionIceBaseTest(std::get<0>(GetParam())) {
+ auto param = std::get<1>(GetParam());
+ offer_new_ufrag_ = std::get<0>(param);
+ offer_new_pwd_ = std::get<1>(param);
+ }
+
+ bool offer_new_ufrag_;
+ bool offer_new_pwd_;
+};
+
+TEST_P(PeerConnectionIceUfragPwdAnswerTest, TestIncludedInAnswer) {
+ auto caller = CreatePeerConnectionWithAudioVideo();
+ auto callee = CreatePeerConnectionWithAudioVideo();
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+ ASSERT_TRUE(
+ caller->SetRemoteDescription(callee->CreateAnswerAndSetAsLocal()));
+
+ auto offer = caller->CreateOffer();
+ auto* offer_transport_desc = GetFirstTransportDescription(offer.get());
+ if (offer_new_ufrag_) {
+ offer_transport_desc->ice_ufrag += "+new";
+ }
+ if (offer_new_pwd_) {
+ offer_transport_desc->ice_pwd += "+new";
+ }
+
+ ASSERT_TRUE(callee->SetRemoteDescription(std::move(offer)));
+
+ auto answer = callee->CreateAnswer();
+ auto* answer_transport_desc = GetFirstTransportDescription(answer.get());
+ auto* local_transport_desc =
+ GetFirstTransportDescription(callee->pc()->local_description());
+
+ EXPECT_NE(answer_transport_desc->ice_ufrag, local_transport_desc->ice_ufrag);
+ EXPECT_NE(answer_transport_desc->ice_pwd, local_transport_desc->ice_pwd);
+}
+
+INSTANTIATE_TEST_SUITE_P(
+ PeerConnectionIceTest,
+ PeerConnectionIceUfragPwdAnswerTest,
+ Combine(Values(SdpSemantics::kPlanB_DEPRECATED, SdpSemantics::kUnifiedPlan),
+ Values(std::make_pair(true, true), // Both changed.
+ std::make_pair(true, false), // Only ufrag changed.
+ std::make_pair(false, true)))); // Only pwd changed.
+
+// Test that if an ICE restart is offered on one media section, then the answer
+// will only change ICE ufrag/pwd for that section and keep the other sections
+// the same.
+// Note that this only works if we have disabled BUNDLE, otherwise all media
+// sections will share the same transport.
+TEST_P(PeerConnectionIceTest,
+ CreateAnswerHasNewUfragPwdForOnlyMediaSectionWhichRestarted) {
+ auto caller = CreatePeerConnectionWithAudioVideo();
+ auto callee = CreatePeerConnectionWithAudioVideo();
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+ ASSERT_TRUE(
+ caller->SetRemoteDescription(callee->CreateAnswerAndSetAsLocal()));
+
+ RTCOfferAnswerOptions disable_bundle_options;
+ disable_bundle_options.use_rtp_mux = false;
+
+ auto offer = caller->CreateOffer(disable_bundle_options);
+
+ // Signal ICE restart on the first media section.
+ auto* offer_transport_desc = GetFirstTransportDescription(offer.get());
+ offer_transport_desc->ice_ufrag += "+new";
+ offer_transport_desc->ice_pwd += "+new";
+
+ ASSERT_TRUE(callee->SetRemoteDescription(std::move(offer)));
+
+ auto answer = callee->CreateAnswer(disable_bundle_options);
+ const auto& answer_transports = answer->description()->transport_infos();
+ const auto& local_transports =
+ callee->pc()->local_description()->description()->transport_infos();
+
+ EXPECT_NE(answer_transports[0].description.ice_ufrag,
+ local_transports[0].description.ice_ufrag);
+ EXPECT_NE(answer_transports[0].description.ice_pwd,
+ local_transports[0].description.ice_pwd);
+ EXPECT_EQ(answer_transports[1].description.ice_ufrag,
+ local_transports[1].description.ice_ufrag);
+ EXPECT_EQ(answer_transports[1].description.ice_pwd,
+ local_transports[1].description.ice_pwd);
+}
+
+// Test that when the initial offerer (caller) uses the lite implementation of
+// ICE and the callee uses the full implementation, the caller takes the
+// CONTROLLED role and the callee takes the CONTROLLING role. This is specified
+// in RFC5245 Section 5.1.1.
+TEST_P(PeerConnectionIceTest,
+ OfferFromLiteIceControlledAndAnswerFromFullIceControlling) {
+ auto caller = CreatePeerConnectionWithAudioVideo();
+ auto callee = CreatePeerConnectionWithAudioVideo();
+
+ auto offer = caller->CreateOffer();
+ SetIceMode(offer.get(), cricket::IceMode::ICEMODE_LITE);
+ ASSERT_TRUE(
+ caller->SetLocalDescription(CloneSessionDescription(offer.get())));
+ ASSERT_TRUE(callee->SetRemoteDescription(std::move(offer)));
+
+ auto answer = callee->CreateAnswer();
+ SetIceMode(answer.get(), cricket::IceMode::ICEMODE_FULL);
+ ASSERT_TRUE(
+ callee->SetLocalDescription(CloneSessionDescription(answer.get())));
+ ASSERT_TRUE(caller->SetRemoteDescription(std::move(answer)));
+
+ EXPECT_EQ(cricket::ICEROLE_CONTROLLED, GetIceRole(caller));
+ EXPECT_EQ(cricket::ICEROLE_CONTROLLING, GetIceRole(callee));
+}
+
+// Test that when the caller and the callee both use the lite implementation of
+// ICE, the initial offerer (caller) takes the CONTROLLING role and the callee
+// takes the CONTROLLED role. This is specified in RFC5245 Section 5.1.1.
+TEST_P(PeerConnectionIceTest,
+ OfferFromLiteIceControllingAndAnswerFromLiteIceControlled) {
+ auto caller = CreatePeerConnectionWithAudioVideo();
+ auto callee = CreatePeerConnectionWithAudioVideo();
+
+ auto offer = caller->CreateOffer();
+ SetIceMode(offer.get(), cricket::IceMode::ICEMODE_LITE);
+ ASSERT_TRUE(
+ caller->SetLocalDescription(CloneSessionDescription(offer.get())));
+ ASSERT_TRUE(callee->SetRemoteDescription(std::move(offer)));
+
+ auto answer = callee->CreateAnswer();
+ SetIceMode(answer.get(), cricket::IceMode::ICEMODE_LITE);
+ ASSERT_TRUE(
+ callee->SetLocalDescription(CloneSessionDescription(answer.get())));
+ ASSERT_TRUE(caller->SetRemoteDescription(std::move(answer)));
+
+ EXPECT_EQ(cricket::ICEROLE_CONTROLLING, GetIceRole(caller));
+ EXPECT_EQ(cricket::ICEROLE_CONTROLLED, GetIceRole(callee));
+}
+
+INSTANTIATE_TEST_SUITE_P(PeerConnectionIceTest,
+ PeerConnectionIceTest,
+ Values(SdpSemantics::kPlanB_DEPRECATED,
+ SdpSemantics::kUnifiedPlan));
+
+class PeerConnectionIceConfigTest : public ::testing::Test {
+ public:
+ PeerConnectionIceConfigTest()
+ : socket_server_(rtc::CreateDefaultSocketServer()),
+ main_thread_(socket_server_.get()) {}
+
+ protected:
+ void SetUp() override {
+ pc_factory_ = CreatePeerConnectionFactory(
+ rtc::Thread::Current(), rtc::Thread::Current(), rtc::Thread::Current(),
+ FakeAudioCaptureModule::Create(), CreateBuiltinAudioEncoderFactory(),
+ CreateBuiltinAudioDecoderFactory(),
+ std::make_unique<VideoEncoderFactoryTemplate<
+ LibvpxVp8EncoderTemplateAdapter, LibvpxVp9EncoderTemplateAdapter,
+ OpenH264EncoderTemplateAdapter, LibaomAv1EncoderTemplateAdapter>>(),
+ std::make_unique<VideoDecoderFactoryTemplate<
+ LibvpxVp8DecoderTemplateAdapter, LibvpxVp9DecoderTemplateAdapter,
+ OpenH264DecoderTemplateAdapter, Dav1dDecoderTemplateAdapter>>(),
+ nullptr /* audio_mixer */, nullptr /* audio_processing */);
+ }
+ void CreatePeerConnection(const RTCConfiguration& config) {
+ packet_socket_factory_.reset(
+ new rtc::BasicPacketSocketFactory(socket_server_.get()));
+ std::unique_ptr<cricket::FakePortAllocator> port_allocator(
+ new cricket::FakePortAllocator(rtc::Thread::Current(),
+ packet_socket_factory_.get(),
+ &field_trials_));
+ port_allocator_ = port_allocator.get();
+ port_allocator_->SetIceTiebreaker(kTiebreakerDefault);
+ PeerConnectionDependencies pc_dependencies(&observer_);
+ pc_dependencies.allocator = std::move(port_allocator);
+ auto result = pc_factory_->CreatePeerConnectionOrError(
+ config, std::move(pc_dependencies));
+ EXPECT_TRUE(result.ok());
+ pc_ = result.MoveValue();
+ }
+
+ webrtc::test::ScopedKeyValueConfig field_trials_;
+ std::unique_ptr<rtc::SocketServer> socket_server_;
+ rtc::AutoSocketServerThread main_thread_;
+ rtc::scoped_refptr<PeerConnectionFactoryInterface> pc_factory_ = nullptr;
+ rtc::scoped_refptr<PeerConnectionInterface> pc_ = nullptr;
+ std::unique_ptr<rtc::PacketSocketFactory> packet_socket_factory_;
+ cricket::FakePortAllocator* port_allocator_ = nullptr;
+
+ MockPeerConnectionObserver observer_;
+};
+
+TEST_F(PeerConnectionIceConfigTest, SetStunCandidateKeepaliveInterval) {
+ RTCConfiguration config;
+ config.sdp_semantics = SdpSemantics::kUnifiedPlan;
+ config.stun_candidate_keepalive_interval = 123;
+ config.ice_candidate_pool_size = 1;
+ CreatePeerConnection(config);
+ ASSERT_NE(port_allocator_, nullptr);
+ absl::optional<int> actual_stun_keepalive_interval =
+ port_allocator_->stun_candidate_keepalive_interval();
+ EXPECT_EQ(actual_stun_keepalive_interval.value_or(-1), 123);
+ config.stun_candidate_keepalive_interval = 321;
+ ASSERT_TRUE(pc_->SetConfiguration(config).ok());
+ actual_stun_keepalive_interval =
+ port_allocator_->stun_candidate_keepalive_interval();
+ EXPECT_EQ(actual_stun_keepalive_interval.value_or(-1), 321);
+}
+
+TEST_F(PeerConnectionIceConfigTest, SetStableWritableConnectionInterval) {
+ RTCConfiguration config;
+ config.sdp_semantics = SdpSemantics::kUnifiedPlan;
+ config.stable_writable_connection_ping_interval_ms = 3500;
+ CreatePeerConnection(config);
+ EXPECT_TRUE(pc_->SetConfiguration(config).ok());
+ EXPECT_EQ(pc_->GetConfiguration().stable_writable_connection_ping_interval_ms,
+ config.stable_writable_connection_ping_interval_ms);
+}
+
+TEST_F(PeerConnectionIceConfigTest,
+ SetStableWritableConnectionInterval_FailsValidation) {
+ RTCConfiguration config;
+ config.sdp_semantics = SdpSemantics::kUnifiedPlan;
+ CreatePeerConnection(config);
+ ASSERT_TRUE(pc_->SetConfiguration(config).ok());
+ config.stable_writable_connection_ping_interval_ms = 5000;
+ config.ice_check_interval_strong_connectivity = 7500;
+ EXPECT_FALSE(pc_->SetConfiguration(config).ok());
+}
+
+TEST_F(PeerConnectionIceConfigTest,
+ SetStableWritableConnectionInterval_DefaultValue_FailsValidation) {
+ RTCConfiguration config;
+ config.sdp_semantics = SdpSemantics::kUnifiedPlan;
+ CreatePeerConnection(config);
+ ASSERT_TRUE(pc_->SetConfiguration(config).ok());
+ config.ice_check_interval_strong_connectivity = 2500;
+ EXPECT_TRUE(pc_->SetConfiguration(config).ok());
+ config.ice_check_interval_strong_connectivity = 2501;
+ EXPECT_FALSE(pc_->SetConfiguration(config).ok());
+}
+
+TEST_P(PeerConnectionIceTest, IceCredentialsCreateOffer) {
+ RTCConfiguration config;
+ config.sdp_semantics = SdpSemantics::kUnifiedPlan;
+ config.ice_candidate_pool_size = 1;
+ auto pc = CreatePeerConnectionWithAudioVideo(config);
+ ASSERT_NE(pc->port_allocator_, nullptr);
+ auto offer = pc->CreateOffer();
+ auto credentials = pc->port_allocator_->GetPooledIceCredentials();
+ ASSERT_EQ(1u, credentials.size());
+
+ auto* desc = offer->description();
+ for (const auto& content : desc->contents()) {
+ auto* transport_info = desc->GetTransportInfoByName(content.name);
+ EXPECT_EQ(transport_info->description.ice_ufrag, credentials[0].ufrag);
+ EXPECT_EQ(transport_info->description.ice_pwd, credentials[0].pwd);
+ }
+}
+
+TEST_P(PeerConnectionIceTest, IceCredentialsCreateAnswer) {
+ RTCConfiguration config;
+ config.sdp_semantics = SdpSemantics::kUnifiedPlan;
+ config.ice_candidate_pool_size = 1;
+ auto pc = CreatePeerConnectionWithAudioVideo(config);
+ ASSERT_NE(pc->port_allocator_, nullptr);
+ auto offer = pc->CreateOffer();
+ ASSERT_TRUE(pc->SetRemoteDescription(std::move(offer)));
+ auto answer = pc->CreateAnswer();
+
+ auto credentials = pc->port_allocator_->GetPooledIceCredentials();
+ ASSERT_EQ(1u, credentials.size());
+
+ auto* desc = answer->description();
+ for (const auto& content : desc->contents()) {
+ auto* transport_info = desc->GetTransportInfoByName(content.name);
+ EXPECT_EQ(transport_info->description.ice_ufrag, credentials[0].ufrag);
+ EXPECT_EQ(transport_info->description.ice_pwd, credentials[0].pwd);
+ }
+}
+
+// Regression test for https://bugs.chromium.org/p/webrtc/issues/detail?id=4728
+TEST_P(PeerConnectionIceTest, CloseDoesNotTransitionGatheringStateToComplete) {
+ auto pc = CreatePeerConnectionWithAudioVideo();
+ pc->pc()->Close();
+ EXPECT_FALSE(pc->IsIceGatheringDone());
+ EXPECT_EQ(PeerConnectionInterface::kIceGatheringNew,
+ pc->pc()->ice_gathering_state());
+}
+
+TEST_P(PeerConnectionIceTest, PrefersMidOverMLineIndex) {
+ const SocketAddress kCalleeAddress("1.1.1.1", 1111);
+
+ auto caller = CreatePeerConnectionWithAudioVideo();
+ auto callee = CreatePeerConnectionWithAudioVideo();
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+ ASSERT_TRUE(
+ caller->SetRemoteDescription(callee->CreateAnswerAndSetAsLocal()));
+
+ // `candidate.transport_name()` is empty.
+ cricket::Candidate candidate = CreateLocalUdpCandidate(kCalleeAddress);
+ auto* audio_content = cricket::GetFirstAudioContent(
+ caller->pc()->local_description()->description());
+ std::unique_ptr<IceCandidateInterface> ice_candidate =
+ CreateIceCandidate(audio_content->name, 65535, candidate);
+ EXPECT_TRUE(caller->pc()->AddIceCandidate(ice_candidate.get()));
+ EXPECT_TRUE(caller->pc()->RemoveIceCandidates({candidate}));
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/peer_connection_integrationtest.cc b/third_party/libwebrtc/pc/peer_connection_integrationtest.cc
new file mode 100644
index 0000000000..d76e5e27d5
--- /dev/null
+++ b/third_party/libwebrtc/pc/peer_connection_integrationtest.cc
@@ -0,0 +1,3853 @@
+/*
+ * Copyright 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Integration tests for PeerConnection.
+// These tests exercise a full stack over a simulated network.
+//
+// NOTE: If your test takes a while (guideline: more than 5 seconds),
+// do NOT add it here, but instead add it to the file
+// slow_peer_connection_integrationtest.cc
+
+#include <stdint.h>
+
+#include <algorithm>
+#include <memory>
+#include <string>
+#include <tuple>
+#include <utility>
+#include <vector>
+
+#include "absl/algorithm/container.h"
+#include "absl/memory/memory.h"
+#include "absl/strings/string_view.h"
+#include "absl/types/optional.h"
+#include "api/async_resolver_factory.h"
+#include "api/candidate.h"
+#include "api/crypto/crypto_options.h"
+#include "api/dtmf_sender_interface.h"
+#include "api/ice_transport_interface.h"
+#include "api/jsep.h"
+#include "api/media_stream_interface.h"
+#include "api/media_types.h"
+#include "api/peer_connection_interface.h"
+#include "api/rtc_error.h"
+#include "api/rtc_event_log/rtc_event.h"
+#include "api/rtc_event_log/rtc_event_log.h"
+#include "api/rtc_event_log_output.h"
+#include "api/rtp_parameters.h"
+#include "api/rtp_receiver_interface.h"
+#include "api/rtp_sender_interface.h"
+#include "api/rtp_transceiver_direction.h"
+#include "api/rtp_transceiver_interface.h"
+#include "api/scoped_refptr.h"
+#include "api/stats/rtc_stats.h"
+#include "api/stats/rtc_stats_report.h"
+#include "api/stats/rtcstats_objects.h"
+#include "api/test/mock_async_dns_resolver.h"
+#include "api/test/mock_encoder_selector.h"
+#include "api/transport/rtp/rtp_source.h"
+#include "api/uma_metrics.h"
+#include "api/units/time_delta.h"
+#include "api/video/video_rotation.h"
+#include "logging/rtc_event_log/fake_rtc_event_log.h"
+#include "logging/rtc_event_log/fake_rtc_event_log_factory.h"
+#include "media/base/codec.h"
+#include "media/base/media_constants.h"
+#include "media/base/stream_params.h"
+#include "p2p/base/mock_async_resolver.h"
+#include "p2p/base/port.h"
+#include "p2p/base/port_allocator.h"
+#include "p2p/base/port_interface.h"
+#include "p2p/base/test_stun_server.h"
+#include "p2p/base/test_turn_customizer.h"
+#include "p2p/base/test_turn_server.h"
+#include "p2p/base/transport_description.h"
+#include "p2p/base/transport_info.h"
+#include "pc/channel.h"
+#include "pc/media_session.h"
+#include "pc/peer_connection.h"
+#include "pc/peer_connection_factory.h"
+#include "pc/rtp_transceiver.h"
+#include "pc/session_description.h"
+#include "pc/test/fake_periodic_video_source.h"
+#include "pc/test/integration_test_helpers.h"
+#include "pc/test/mock_peer_connection_observers.h"
+#include "rtc_base/fake_clock.h"
+#include "rtc_base/fake_mdns_responder.h"
+#include "rtc_base/fake_network.h"
+#include "rtc_base/firewall_socket_server.h"
+#include "rtc_base/gunit.h"
+#include "rtc_base/helpers.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/socket_address.h"
+#include "rtc_base/ssl_certificate.h"
+#include "rtc_base/ssl_fingerprint.h"
+#include "rtc_base/ssl_identity.h"
+#include "rtc_base/ssl_stream_adapter.h"
+#include "rtc_base/task_queue_for_test.h"
+#include "rtc_base/test_certificate_verifier.h"
+#include "rtc_base/thread.h"
+#include "rtc_base/time_utils.h"
+#include "rtc_base/virtual_socket_server.h"
+#include "system_wrappers/include/metrics.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+
+namespace {
+
+class PeerConnectionIntegrationTest
+ : public PeerConnectionIntegrationBaseTest,
+ public ::testing::WithParamInterface<SdpSemantics> {
+ protected:
+ PeerConnectionIntegrationTest()
+ : PeerConnectionIntegrationBaseTest(GetParam()) {}
+};
+
+// Fake clock must be set before threads are started to prevent race on
+// Set/GetClockForTesting().
+// To achieve that, multiple inheritance is used as a mixin pattern
+// where order of construction is finely controlled.
+// This also ensures peerconnection is closed before switching back to non-fake
+// clock, avoiding other races and DCHECK failures such as in rtp_sender.cc.
+class FakeClockForTest : public rtc::ScopedFakeClock {
+ protected:
+ FakeClockForTest() {
+ // Some things use a time of "0" as a special value, so we need to start out
+ // the fake clock at a nonzero time.
+ // TODO(deadbeef): Fix this.
+ AdvanceTime(webrtc::TimeDelta::Seconds(1));
+ }
+
+ // Explicit handle.
+ ScopedFakeClock& FakeClock() { return *this; }
+};
+
+// Ensure FakeClockForTest is constructed first (see class for rationale).
+class PeerConnectionIntegrationTestWithFakeClock
+ : public FakeClockForTest,
+ public PeerConnectionIntegrationTest {};
+
+class PeerConnectionIntegrationTestPlanB
+ : public PeerConnectionIntegrationBaseTest {
+ protected:
+ PeerConnectionIntegrationTestPlanB()
+ : PeerConnectionIntegrationBaseTest(SdpSemantics::kPlanB_DEPRECATED) {}
+};
+
+class PeerConnectionIntegrationTestUnifiedPlan
+ : public PeerConnectionIntegrationBaseTest {
+ protected:
+ PeerConnectionIntegrationTestUnifiedPlan()
+ : PeerConnectionIntegrationBaseTest(SdpSemantics::kUnifiedPlan) {}
+};
+
+// Test the OnFirstPacketReceived callback from audio/video RtpReceivers. This
+// includes testing that the callback is invoked if an observer is connected
+// after the first packet has already been received.
+TEST_P(PeerConnectionIntegrationTest,
+ RtpReceiverObserverOnFirstPacketReceived) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ caller()->AddAudioVideoTracks();
+ callee()->AddAudioVideoTracks();
+ // Start offer/answer exchange and wait for it to complete.
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ // Should be one receiver each for audio/video.
+ EXPECT_EQ(2U, caller()->rtp_receiver_observers().size());
+ EXPECT_EQ(2U, callee()->rtp_receiver_observers().size());
+ // Wait for all "first packet received" callbacks to be fired.
+ EXPECT_TRUE_WAIT(
+ absl::c_all_of(caller()->rtp_receiver_observers(),
+ [](const std::unique_ptr<MockRtpReceiverObserver>& o) {
+ return o->first_packet_received();
+ }),
+ kMaxWaitForFramesMs);
+ EXPECT_TRUE_WAIT(
+ absl::c_all_of(callee()->rtp_receiver_observers(),
+ [](const std::unique_ptr<MockRtpReceiverObserver>& o) {
+ return o->first_packet_received();
+ }),
+ kMaxWaitForFramesMs);
+ // If new observers are set after the first packet was already received, the
+ // callback should still be invoked.
+ caller()->ResetRtpReceiverObservers();
+ callee()->ResetRtpReceiverObservers();
+ EXPECT_EQ(2U, caller()->rtp_receiver_observers().size());
+ EXPECT_EQ(2U, callee()->rtp_receiver_observers().size());
+ EXPECT_TRUE(
+ absl::c_all_of(caller()->rtp_receiver_observers(),
+ [](const std::unique_ptr<MockRtpReceiverObserver>& o) {
+ return o->first_packet_received();
+ }));
+ EXPECT_TRUE(
+ absl::c_all_of(callee()->rtp_receiver_observers(),
+ [](const std::unique_ptr<MockRtpReceiverObserver>& o) {
+ return o->first_packet_received();
+ }));
+}
+
+class DummyDtmfObserver : public DtmfSenderObserverInterface {
+ public:
+ DummyDtmfObserver() : completed_(false) {}
+
+ // Implements DtmfSenderObserverInterface.
+ void OnToneChange(const std::string& tone) override {
+ tones_.push_back(tone);
+ if (tone.empty()) {
+ completed_ = true;
+ }
+ }
+
+ const std::vector<std::string>& tones() const { return tones_; }
+ bool completed() const { return completed_; }
+
+ private:
+ bool completed_;
+ std::vector<std::string> tones_;
+};
+
+// Assumes `sender` already has an audio track added and the offer/answer
+// exchange is done.
+void TestDtmfFromSenderToReceiver(PeerConnectionIntegrationWrapper* sender,
+ PeerConnectionIntegrationWrapper* receiver) {
+ // We should be able to get a DTMF sender from the local sender.
+ rtc::scoped_refptr<DtmfSenderInterface> dtmf_sender =
+ sender->pc()->GetSenders().at(0)->GetDtmfSender();
+ ASSERT_TRUE(dtmf_sender);
+ DummyDtmfObserver observer;
+ dtmf_sender->RegisterObserver(&observer);
+
+ // Test the DtmfSender object just created.
+ EXPECT_TRUE(dtmf_sender->CanInsertDtmf());
+ EXPECT_TRUE(dtmf_sender->InsertDtmf("1a", 100, 50));
+
+ EXPECT_TRUE_WAIT(observer.completed(), kDefaultTimeout);
+ std::vector<std::string> tones = {"1", "a", ""};
+ EXPECT_EQ(tones, observer.tones());
+ dtmf_sender->UnregisterObserver();
+ // TODO(deadbeef): Verify the tones were actually received end-to-end.
+}
+
+// Verifies the DtmfSenderObserver callbacks for a DtmfSender (one in each
+// direction).
+TEST_P(PeerConnectionIntegrationTest, DtmfSenderObserver) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ // Only need audio for DTMF.
+ caller()->AddAudioTrack();
+ callee()->AddAudioTrack();
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ // DTLS must finish before the DTMF sender can be used reliably.
+ ASSERT_TRUE_WAIT(DtlsConnected(), kDefaultTimeout);
+ TestDtmfFromSenderToReceiver(caller(), callee());
+ TestDtmfFromSenderToReceiver(callee(), caller());
+}
+
+// Basic end-to-end test, verifying media can be encoded/transmitted/decoded
+// between two connections, using DTLS-SRTP.
+TEST_P(PeerConnectionIntegrationTest, EndToEndCallWithDtls) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+
+ // Do normal offer/answer and wait for some frames to be received in each
+ // direction.
+ caller()->AddAudioVideoTracks();
+ callee()->AddAudioVideoTracks();
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ MediaExpectations media_expectations;
+ media_expectations.ExpectBidirectionalAudioAndVideo();
+ ASSERT_TRUE(ExpectNewFrames(media_expectations));
+}
+
+#if defined(WEBRTC_FUCHSIA)
+// Uses SDES instead of DTLS for key agreement.
+TEST_P(PeerConnectionIntegrationTest, EndToEndCallWithSdes) {
+ PeerConnectionInterface::RTCConfiguration sdes_config;
+ sdes_config.enable_dtls_srtp.emplace(false);
+ ASSERT_TRUE(CreatePeerConnectionWrappersWithConfig(sdes_config, sdes_config));
+ ConnectFakeSignaling();
+
+ // Do normal offer/answer and wait for some frames to be received in each
+ // direction.
+ caller()->AddAudioVideoTracks();
+ callee()->AddAudioVideoTracks();
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ MediaExpectations media_expectations;
+ media_expectations.ExpectBidirectionalAudioAndVideo();
+ ASSERT_TRUE(ExpectNewFrames(media_expectations));
+}
+#endif
+
+// Basic end-to-end test specifying the `enable_encrypted_rtp_header_extensions`
+// option to offer encrypted versions of all header extensions alongside the
+// unencrypted versions.
+TEST_P(PeerConnectionIntegrationTest,
+ EndToEndCallWithEncryptedRtpHeaderExtensions) {
+ CryptoOptions crypto_options;
+ crypto_options.srtp.enable_encrypted_rtp_header_extensions = true;
+ PeerConnectionInterface::RTCConfiguration config;
+ config.crypto_options = crypto_options;
+ // Note: This allows offering >14 RTP header extensions.
+ config.offer_extmap_allow_mixed = true;
+ ASSERT_TRUE(CreatePeerConnectionWrappersWithConfig(config, config));
+ ConnectFakeSignaling();
+
+ // Do normal offer/answer and wait for some frames to be received in each
+ // direction.
+ caller()->AddAudioVideoTracks();
+ callee()->AddAudioVideoTracks();
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ MediaExpectations media_expectations;
+ media_expectations.ExpectBidirectionalAudioAndVideo();
+ ASSERT_TRUE(ExpectNewFrames(media_expectations));
+}
+
+// This test sets up a call between two parties with a source resolution of
+// 1280x720 and verifies that a 16:9 aspect ratio is received.
+TEST_P(PeerConnectionIntegrationTest,
+ Send1280By720ResolutionAndReceive16To9AspectRatio) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+
+ // Add video tracks with 16:9 aspect ratio, size 1280 x 720.
+ webrtc::FakePeriodicVideoSource::Config config;
+ config.width = 1280;
+ config.height = 720;
+ config.timestamp_offset_ms = rtc::TimeMillis();
+ caller()->AddTrack(caller()->CreateLocalVideoTrackWithConfig(config));
+ callee()->AddTrack(callee()->CreateLocalVideoTrackWithConfig(config));
+
+ // Do normal offer/answer and wait for at least one frame to be received in
+ // each direction.
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(caller()->min_video_frames_received_per_track() > 0 &&
+ callee()->min_video_frames_received_per_track() > 0,
+ kMaxWaitForFramesMs);
+
+ // Check rendered aspect ratio.
+ EXPECT_EQ(16.0 / 9, caller()->local_rendered_aspect_ratio());
+ EXPECT_EQ(16.0 / 9, caller()->rendered_aspect_ratio());
+ EXPECT_EQ(16.0 / 9, callee()->local_rendered_aspect_ratio());
+ EXPECT_EQ(16.0 / 9, callee()->rendered_aspect_ratio());
+}
+
+// This test sets up an one-way call, with media only from caller to
+// callee.
+TEST_P(PeerConnectionIntegrationTest, OneWayMediaCall) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ caller()->AddAudioVideoTracks();
+ caller()->CreateAndSetAndSignalOffer();
+ MediaExpectations media_expectations;
+ media_expectations.CalleeExpectsSomeAudioAndVideo();
+ media_expectations.CallerExpectsNoAudio();
+ media_expectations.CallerExpectsNoVideo();
+ ASSERT_TRUE(ExpectNewFrames(media_expectations));
+}
+
+// Tests that send only works without the caller having a decoder factory and
+// the callee having an encoder factory.
+TEST_P(PeerConnectionIntegrationTest, EndToEndCallWithSendOnlyVideo) {
+ ASSERT_TRUE(
+ CreateOneDirectionalPeerConnectionWrappers(/*caller_to_callee=*/true));
+ ConnectFakeSignaling();
+ // Add one-directional video, from caller to callee.
+ rtc::scoped_refptr<webrtc::VideoTrackInterface> caller_track =
+ caller()->CreateLocalVideoTrack();
+ caller()->AddTrack(caller_track);
+ PeerConnectionInterface::RTCOfferAnswerOptions options;
+ options.offer_to_receive_video = 0;
+ caller()->SetOfferAnswerOptions(options);
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ ASSERT_EQ(callee()->pc()->GetReceivers().size(), 1u);
+
+ // Expect video to be received in one direction.
+ MediaExpectations media_expectations;
+ media_expectations.CallerExpectsNoVideo();
+ media_expectations.CalleeExpectsSomeVideo();
+
+ EXPECT_TRUE(ExpectNewFrames(media_expectations));
+}
+
+// Tests that receive only works without the caller having an encoder factory
+// and the callee having a decoder factory.
+TEST_P(PeerConnectionIntegrationTest, EndToEndCallWithReceiveOnlyVideo) {
+ ASSERT_TRUE(
+ CreateOneDirectionalPeerConnectionWrappers(/*caller_to_callee=*/false));
+ ConnectFakeSignaling();
+ // Add one-directional video, from callee to caller.
+ rtc::scoped_refptr<webrtc::VideoTrackInterface> callee_track =
+ callee()->CreateLocalVideoTrack();
+ callee()->AddTrack(callee_track);
+ PeerConnectionInterface::RTCOfferAnswerOptions options;
+ options.offer_to_receive_video = 1;
+ caller()->SetOfferAnswerOptions(options);
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ ASSERT_EQ(caller()->pc()->GetReceivers().size(), 1u);
+
+ // Expect video to be received in one direction.
+ MediaExpectations media_expectations;
+ media_expectations.CallerExpectsSomeVideo();
+ media_expectations.CalleeExpectsNoVideo();
+
+ EXPECT_TRUE(ExpectNewFrames(media_expectations));
+}
+
+TEST_P(PeerConnectionIntegrationTest,
+ EndToEndCallAddReceiveVideoToSendOnlyCall) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ // Add one-directional video, from caller to callee.
+ rtc::scoped_refptr<webrtc::VideoTrackInterface> caller_track =
+ caller()->CreateLocalVideoTrack();
+ caller()->AddTrack(caller_track);
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+
+ // Add receive video.
+ rtc::scoped_refptr<webrtc::VideoTrackInterface> callee_track =
+ callee()->CreateLocalVideoTrack();
+ callee()->AddTrack(callee_track);
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+
+ // Ensure that video frames are received end-to-end.
+ MediaExpectations media_expectations;
+ media_expectations.ExpectBidirectionalVideo();
+ ASSERT_TRUE(ExpectNewFrames(media_expectations));
+}
+
+TEST_P(PeerConnectionIntegrationTest,
+ EndToEndCallAddSendVideoToReceiveOnlyCall) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ // Add one-directional video, from callee to caller.
+ rtc::scoped_refptr<webrtc::VideoTrackInterface> callee_track =
+ callee()->CreateLocalVideoTrack();
+ callee()->AddTrack(callee_track);
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+
+ // Add send video.
+ rtc::scoped_refptr<webrtc::VideoTrackInterface> caller_track =
+ caller()->CreateLocalVideoTrack();
+ caller()->AddTrack(caller_track);
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+
+ // Expect video to be received in one direction.
+ MediaExpectations media_expectations;
+ media_expectations.ExpectBidirectionalVideo();
+ ASSERT_TRUE(ExpectNewFrames(media_expectations));
+}
+
+TEST_P(PeerConnectionIntegrationTest,
+ EndToEndCallRemoveReceiveVideoFromSendReceiveCall) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ // Add send video, from caller to callee.
+ rtc::scoped_refptr<webrtc::VideoTrackInterface> caller_track =
+ caller()->CreateLocalVideoTrack();
+ rtc::scoped_refptr<webrtc::RtpSenderInterface> caller_sender =
+ caller()->AddTrack(caller_track);
+ // Add receive video, from callee to caller.
+ rtc::scoped_refptr<webrtc::VideoTrackInterface> callee_track =
+ callee()->CreateLocalVideoTrack();
+
+ rtc::scoped_refptr<webrtc::RtpSenderInterface> callee_sender =
+ callee()->AddTrack(callee_track);
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+
+ // Remove receive video (i.e., callee sender track).
+ callee()->pc()->RemoveTrackOrError(callee_sender);
+
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+
+ // Expect one-directional video.
+ MediaExpectations media_expectations;
+ media_expectations.CallerExpectsNoVideo();
+ media_expectations.CalleeExpectsSomeVideo();
+
+ ASSERT_TRUE(ExpectNewFrames(media_expectations));
+}
+
+TEST_P(PeerConnectionIntegrationTest,
+ EndToEndCallRemoveSendVideoFromSendReceiveCall) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ // Add send video, from caller to callee.
+ rtc::scoped_refptr<webrtc::VideoTrackInterface> caller_track =
+ caller()->CreateLocalVideoTrack();
+ rtc::scoped_refptr<webrtc::RtpSenderInterface> caller_sender =
+ caller()->AddTrack(caller_track);
+ // Add receive video, from callee to caller.
+ rtc::scoped_refptr<webrtc::VideoTrackInterface> callee_track =
+ callee()->CreateLocalVideoTrack();
+
+ rtc::scoped_refptr<webrtc::RtpSenderInterface> callee_sender =
+ callee()->AddTrack(callee_track);
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+
+ // Remove send video (i.e., caller sender track).
+ caller()->pc()->RemoveTrackOrError(caller_sender);
+
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+
+ // Expect one-directional video.
+ MediaExpectations media_expectations;
+ media_expectations.CalleeExpectsNoVideo();
+ media_expectations.CallerExpectsSomeVideo();
+
+ ASSERT_TRUE(ExpectNewFrames(media_expectations));
+}
+
+// This test sets up a audio call initially, with the callee rejecting video
+// initially. Then later the callee decides to upgrade to audio/video, and
+// initiates a new offer/answer exchange.
+TEST_P(PeerConnectionIntegrationTest, AudioToVideoUpgrade) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ // Initially, offer an audio/video stream from the caller, but refuse to
+ // send/receive video on the callee side.
+ caller()->AddAudioVideoTracks();
+ callee()->AddAudioTrack();
+ if (sdp_semantics_ == SdpSemantics::kPlanB_DEPRECATED) {
+ PeerConnectionInterface::RTCOfferAnswerOptions options;
+ options.offer_to_receive_video = 0;
+ callee()->SetOfferAnswerOptions(options);
+ } else {
+ callee()->SetRemoteOfferHandler([this] {
+ callee()
+ ->GetFirstTransceiverOfType(cricket::MEDIA_TYPE_VIDEO)
+ ->StopInternal();
+ });
+ }
+ // Do offer/answer and make sure audio is still received end-to-end.
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ {
+ MediaExpectations media_expectations;
+ media_expectations.ExpectBidirectionalAudio();
+ media_expectations.ExpectNoVideo();
+ ASSERT_TRUE(ExpectNewFrames(media_expectations));
+ }
+ // Sanity check that the callee's description has a rejected video section.
+ ASSERT_NE(nullptr, callee()->pc()->local_description());
+ const ContentInfo* callee_video_content =
+ GetFirstVideoContent(callee()->pc()->local_description()->description());
+ ASSERT_NE(nullptr, callee_video_content);
+ EXPECT_TRUE(callee_video_content->rejected);
+
+ // Now negotiate with video and ensure negotiation succeeds, with video
+ // frames and additional audio frames being received.
+ callee()->AddVideoTrack();
+ if (sdp_semantics_ == SdpSemantics::kPlanB_DEPRECATED) {
+ PeerConnectionInterface::RTCOfferAnswerOptions options;
+ options.offer_to_receive_video = 1;
+ callee()->SetOfferAnswerOptions(options);
+ } else {
+ callee()->SetRemoteOfferHandler(nullptr);
+ caller()->SetRemoteOfferHandler([this] {
+ // The caller creates a new transceiver to receive video on when receiving
+ // the offer, but by default it is send only.
+ auto transceivers = caller()->pc()->GetTransceivers();
+ ASSERT_EQ(2U, transceivers.size());
+ ASSERT_EQ(cricket::MEDIA_TYPE_VIDEO,
+ transceivers[1]->receiver()->media_type());
+ transceivers[1]->sender()->SetTrack(
+ caller()->CreateLocalVideoTrack().get());
+ transceivers[1]->SetDirectionWithError(
+ RtpTransceiverDirection::kSendRecv);
+ });
+ }
+ callee()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ {
+ // Expect additional audio frames to be received after the upgrade.
+ MediaExpectations media_expectations;
+ media_expectations.ExpectBidirectionalAudioAndVideo();
+ ASSERT_TRUE(ExpectNewFrames(media_expectations));
+ }
+}
+
+// Simpler than the above test; just add an audio track to an established
+// video-only connection.
+TEST_P(PeerConnectionIntegrationTest, AddAudioToVideoOnlyCall) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ // Do initial offer/answer with just a video track.
+ caller()->AddVideoTrack();
+ callee()->AddVideoTrack();
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ // Now add an audio track and do another offer/answer.
+ caller()->AddAudioTrack();
+ callee()->AddAudioTrack();
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ // Ensure both audio and video frames are received end-to-end.
+ MediaExpectations media_expectations;
+ media_expectations.ExpectBidirectionalAudioAndVideo();
+ ASSERT_TRUE(ExpectNewFrames(media_expectations));
+}
+
+// This test sets up a non-bundled call and negotiates bundling at the same
+// time as starting an ICE restart. When bundling is in effect in the restart,
+// the DTLS-SRTP context should be successfully reset.
+TEST_P(PeerConnectionIntegrationTest, BundlingEnabledWhileIceRestartOccurs) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+
+ caller()->AddAudioVideoTracks();
+ callee()->AddAudioVideoTracks();
+ // Remove the bundle group from the SDP received by the callee.
+ callee()->SetReceivedSdpMunger([](cricket::SessionDescription* desc) {
+ desc->RemoveGroupByName("BUNDLE");
+ });
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ {
+ MediaExpectations media_expectations;
+ media_expectations.ExpectBidirectionalAudioAndVideo();
+ ASSERT_TRUE(ExpectNewFrames(media_expectations));
+ }
+ // Now stop removing the BUNDLE group, and trigger an ICE restart.
+ callee()->SetReceivedSdpMunger(nullptr);
+ caller()->SetOfferAnswerOptions(IceRestartOfferAnswerOptions());
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+
+ // Expect additional frames to be received after the ICE restart.
+ {
+ MediaExpectations media_expectations;
+ media_expectations.ExpectBidirectionalAudioAndVideo();
+ ASSERT_TRUE(ExpectNewFrames(media_expectations));
+ }
+}
+
+// Test CVO (Coordination of Video Orientation). If a video source is rotated
+// and both peers support the CVO RTP header extension, the actual video frames
+// don't need to be encoded in different resolutions, since the rotation is
+// communicated through the RTP header extension.
+TEST_P(PeerConnectionIntegrationTest, RotatedVideoWithCVOExtension) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ // Add rotated video tracks.
+ caller()->AddTrack(
+ caller()->CreateLocalVideoTrackWithRotation(webrtc::kVideoRotation_90));
+ callee()->AddTrack(
+ callee()->CreateLocalVideoTrackWithRotation(webrtc::kVideoRotation_270));
+
+ // Wait for video frames to be received by both sides.
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ ASSERT_TRUE_WAIT(caller()->min_video_frames_received_per_track() > 0 &&
+ callee()->min_video_frames_received_per_track() > 0,
+ kMaxWaitForFramesMs);
+
+ // Ensure that the aspect ratio is unmodified.
+ // TODO(deadbeef): Where does 4:3 come from? Should be explicit in the test,
+ // not just assumed.
+ EXPECT_EQ(4.0 / 3, caller()->local_rendered_aspect_ratio());
+ EXPECT_EQ(4.0 / 3, caller()->rendered_aspect_ratio());
+ EXPECT_EQ(4.0 / 3, callee()->local_rendered_aspect_ratio());
+ EXPECT_EQ(4.0 / 3, callee()->rendered_aspect_ratio());
+ // Ensure that the CVO bits were surfaced to the renderer.
+ EXPECT_EQ(webrtc::kVideoRotation_270, caller()->rendered_rotation());
+ EXPECT_EQ(webrtc::kVideoRotation_90, callee()->rendered_rotation());
+}
+
+// Test that when the CVO extension isn't supported, video is rotated the
+// old-fashioned way, by encoding rotated frames.
+TEST_P(PeerConnectionIntegrationTest, RotatedVideoWithoutCVOExtension) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ // Add rotated video tracks.
+ caller()->AddTrack(
+ caller()->CreateLocalVideoTrackWithRotation(webrtc::kVideoRotation_90));
+ callee()->AddTrack(
+ callee()->CreateLocalVideoTrackWithRotation(webrtc::kVideoRotation_270));
+
+ // Remove the CVO extension from the offered SDP.
+ callee()->SetReceivedSdpMunger([](cricket::SessionDescription* desc) {
+ cricket::VideoContentDescription* video =
+ GetFirstVideoContentDescription(desc);
+ video->ClearRtpHeaderExtensions();
+ });
+ // Wait for video frames to be received by both sides.
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ ASSERT_TRUE_WAIT(caller()->min_video_frames_received_per_track() > 0 &&
+ callee()->min_video_frames_received_per_track() > 0,
+ kMaxWaitForFramesMs);
+
+ // Expect that the aspect ratio is inversed to account for the 90/270 degree
+ // rotation.
+ // TODO(deadbeef): Where does 4:3 come from? Should be explicit in the test,
+ // not just assumed.
+ EXPECT_EQ(3.0 / 4, caller()->local_rendered_aspect_ratio());
+ EXPECT_EQ(3.0 / 4, caller()->rendered_aspect_ratio());
+ EXPECT_EQ(3.0 / 4, callee()->local_rendered_aspect_ratio());
+ EXPECT_EQ(3.0 / 4, callee()->rendered_aspect_ratio());
+ // Expect that each endpoint is unaware of the rotation of the other endpoint.
+ EXPECT_EQ(webrtc::kVideoRotation_0, caller()->rendered_rotation());
+ EXPECT_EQ(webrtc::kVideoRotation_0, callee()->rendered_rotation());
+}
+
+// Test that if the answerer rejects the audio m= section, no audio is sent or
+// received, but video still can be.
+TEST_P(PeerConnectionIntegrationTest, AnswererRejectsAudioSection) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ caller()->AddAudioVideoTracks();
+ if (sdp_semantics_ == SdpSemantics::kPlanB_DEPRECATED) {
+ // Only add video track for callee, and set offer_to_receive_audio to 0, so
+ // it will reject the audio m= section completely.
+ PeerConnectionInterface::RTCOfferAnswerOptions options;
+ options.offer_to_receive_audio = 0;
+ callee()->SetOfferAnswerOptions(options);
+ } else {
+ // Stopping the audio RtpTransceiver will cause the media section to be
+ // rejected in the answer.
+ callee()->SetRemoteOfferHandler([this] {
+ callee()
+ ->GetFirstTransceiverOfType(cricket::MEDIA_TYPE_AUDIO)
+ ->StopInternal();
+ });
+ }
+ callee()->AddTrack(callee()->CreateLocalVideoTrack());
+ // Do offer/answer and wait for successful end-to-end video frames.
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ MediaExpectations media_expectations;
+ media_expectations.ExpectBidirectionalVideo();
+ media_expectations.ExpectNoAudio();
+ ASSERT_TRUE(ExpectNewFrames(media_expectations));
+
+ // Sanity check that the callee's description has a rejected audio section.
+ ASSERT_NE(nullptr, callee()->pc()->local_description());
+ const ContentInfo* callee_audio_content =
+ GetFirstAudioContent(callee()->pc()->local_description()->description());
+ ASSERT_NE(nullptr, callee_audio_content);
+ EXPECT_TRUE(callee_audio_content->rejected);
+ if (sdp_semantics_ == SdpSemantics::kUnifiedPlan) {
+ // The caller's transceiver should have stopped after receiving the answer,
+ // and thus no longer listed in transceivers.
+ EXPECT_EQ(nullptr,
+ caller()->GetFirstTransceiverOfType(cricket::MEDIA_TYPE_AUDIO));
+ }
+}
+
+// Test that if the answerer rejects the video m= section, no video is sent or
+// received, but audio still can be.
+TEST_P(PeerConnectionIntegrationTest, AnswererRejectsVideoSection) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ caller()->AddAudioVideoTracks();
+ if (sdp_semantics_ == SdpSemantics::kPlanB_DEPRECATED) {
+ // Only add audio track for callee, and set offer_to_receive_video to 0, so
+ // it will reject the video m= section completely.
+ PeerConnectionInterface::RTCOfferAnswerOptions options;
+ options.offer_to_receive_video = 0;
+ callee()->SetOfferAnswerOptions(options);
+ } else {
+ // Stopping the video RtpTransceiver will cause the media section to be
+ // rejected in the answer.
+ callee()->SetRemoteOfferHandler([this] {
+ callee()
+ ->GetFirstTransceiverOfType(cricket::MEDIA_TYPE_VIDEO)
+ ->StopInternal();
+ });
+ }
+ callee()->AddTrack(callee()->CreateLocalAudioTrack());
+ // Do offer/answer and wait for successful end-to-end audio frames.
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ MediaExpectations media_expectations;
+ media_expectations.ExpectBidirectionalAudio();
+ media_expectations.ExpectNoVideo();
+ ASSERT_TRUE(ExpectNewFrames(media_expectations));
+
+ // Sanity check that the callee's description has a rejected video section.
+ ASSERT_NE(nullptr, callee()->pc()->local_description());
+ const ContentInfo* callee_video_content =
+ GetFirstVideoContent(callee()->pc()->local_description()->description());
+ ASSERT_NE(nullptr, callee_video_content);
+ EXPECT_TRUE(callee_video_content->rejected);
+ if (sdp_semantics_ == SdpSemantics::kUnifiedPlan) {
+ // The caller's transceiver should have stopped after receiving the answer,
+ // and thus is no longer present.
+ EXPECT_EQ(nullptr,
+ caller()->GetFirstTransceiverOfType(cricket::MEDIA_TYPE_VIDEO));
+ }
+}
+
+// Test that if the answerer rejects both audio and video m= sections, nothing
+// bad happens.
+// TODO(deadbeef): Test that a data channel still works. Currently this doesn't
+// test anything but the fact that negotiation succeeds, which doesn't mean
+// much.
+TEST_P(PeerConnectionIntegrationTest, AnswererRejectsAudioAndVideoSections) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ caller()->AddAudioVideoTracks();
+ if (sdp_semantics_ == SdpSemantics::kPlanB_DEPRECATED) {
+ // Don't give the callee any tracks, and set offer_to_receive_X to 0, so it
+ // will reject both audio and video m= sections.
+ PeerConnectionInterface::RTCOfferAnswerOptions options;
+ options.offer_to_receive_audio = 0;
+ options.offer_to_receive_video = 0;
+ callee()->SetOfferAnswerOptions(options);
+ } else {
+ callee()->SetRemoteOfferHandler([this] {
+ // Stopping all transceivers will cause all media sections to be rejected.
+ for (const auto& transceiver : callee()->pc()->GetTransceivers()) {
+ transceiver->StopInternal();
+ }
+ });
+ }
+ // Do offer/answer and wait for stable signaling state.
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+
+ // Sanity check that the callee's description has rejected m= sections.
+ ASSERT_NE(nullptr, callee()->pc()->local_description());
+ const ContentInfo* callee_audio_content =
+ GetFirstAudioContent(callee()->pc()->local_description()->description());
+ ASSERT_NE(nullptr, callee_audio_content);
+ EXPECT_TRUE(callee_audio_content->rejected);
+ const ContentInfo* callee_video_content =
+ GetFirstVideoContent(callee()->pc()->local_description()->description());
+ ASSERT_NE(nullptr, callee_video_content);
+ EXPECT_TRUE(callee_video_content->rejected);
+}
+
+// This test sets up an audio and video call between two parties. After the
+// call runs for a while, the caller sends an updated offer with video being
+// rejected. Once the re-negotiation is done, the video flow should stop and
+// the audio flow should continue.
+TEST_P(PeerConnectionIntegrationTest, VideoRejectedInSubsequentOffer) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ caller()->AddAudioVideoTracks();
+ callee()->AddAudioVideoTracks();
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ {
+ MediaExpectations media_expectations;
+ media_expectations.ExpectBidirectionalAudioAndVideo();
+ ASSERT_TRUE(ExpectNewFrames(media_expectations));
+ }
+ // Renegotiate, rejecting the video m= section.
+ if (sdp_semantics_ == SdpSemantics::kPlanB_DEPRECATED) {
+ caller()->SetGeneratedSdpMunger(
+ [](cricket::SessionDescription* description) {
+ for (cricket::ContentInfo& content : description->contents()) {
+ if (cricket::IsVideoContent(&content)) {
+ content.rejected = true;
+ }
+ }
+ });
+ } else {
+ caller()
+ ->GetFirstTransceiverOfType(cricket::MEDIA_TYPE_VIDEO)
+ ->StopInternal();
+ }
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kMaxWaitForActivationMs);
+
+ // Sanity check that the caller's description has a rejected video section.
+ ASSERT_NE(nullptr, caller()->pc()->local_description());
+ const ContentInfo* caller_video_content =
+ GetFirstVideoContent(caller()->pc()->local_description()->description());
+ ASSERT_NE(nullptr, caller_video_content);
+ EXPECT_TRUE(caller_video_content->rejected);
+ // Wait for some additional audio frames to be received.
+ {
+ MediaExpectations media_expectations;
+ media_expectations.ExpectBidirectionalAudio();
+ media_expectations.ExpectNoVideo();
+ ASSERT_TRUE(ExpectNewFrames(media_expectations));
+ }
+}
+
+// Do one offer/answer with audio, another that disables it (rejecting the m=
+// section), and another that re-enables it. Regression test for:
+// bugs.webrtc.org/6023
+TEST_F(PeerConnectionIntegrationTestPlanB, EnableAudioAfterRejecting) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+
+ // Add audio track, do normal offer/answer.
+ rtc::scoped_refptr<webrtc::AudioTrackInterface> track =
+ caller()->CreateLocalAudioTrack();
+ rtc::scoped_refptr<webrtc::RtpSenderInterface> sender =
+ caller()->pc()->AddTrack(track, {"stream"}).MoveValue();
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+
+ // Remove audio track, and set offer_to_receive_audio to false to cause the
+ // m= section to be completely disabled, not just "recvonly".
+ caller()->pc()->RemoveTrackOrError(sender);
+ PeerConnectionInterface::RTCOfferAnswerOptions options;
+ options.offer_to_receive_audio = 0;
+ caller()->SetOfferAnswerOptions(options);
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+
+ // Add the audio track again, expecting negotiation to succeed and frames to
+ // flow.
+ sender = caller()->pc()->AddTrack(track, {"stream"}).MoveValue();
+ options.offer_to_receive_audio = 1;
+ caller()->SetOfferAnswerOptions(options);
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+
+ MediaExpectations media_expectations;
+ media_expectations.CalleeExpectsSomeAudio();
+ EXPECT_TRUE(ExpectNewFrames(media_expectations));
+}
+
+// Basic end-to-end test, but without SSRC/MSID signaling. This functionality
+// is needed to support legacy endpoints.
+// TODO(deadbeef): When we support the MID extension and demuxing on MID, also
+// add a test for an end-to-end test without MID signaling either (basically,
+// the minimum acceptable SDP).
+TEST_P(PeerConnectionIntegrationTest, EndToEndCallWithoutSsrcOrMsidSignaling) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ // Add audio and video, testing that packets can be demuxed on payload type.
+ caller()->AddAudioVideoTracks();
+ callee()->AddAudioVideoTracks();
+ // Remove SSRCs and MSIDs from the received offer SDP.
+ callee()->SetReceivedSdpMunger(RemoveSsrcsAndMsids);
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ MediaExpectations media_expectations;
+ media_expectations.ExpectBidirectionalAudioAndVideo();
+ ASSERT_TRUE(ExpectNewFrames(media_expectations));
+}
+
+// Basic end-to-end test, without SSRC signaling. This means that the track
+// was created properly and frames are delivered when the MSIDs are communicated
+// with a=msid lines and no a=ssrc lines.
+TEST_F(PeerConnectionIntegrationTestUnifiedPlan,
+ EndToEndCallWithoutSsrcSignaling) {
+ const char kStreamId[] = "streamId";
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ // Add just audio tracks.
+ caller()->AddTrack(caller()->CreateLocalAudioTrack(), {kStreamId});
+ callee()->AddAudioTrack();
+
+ // Remove SSRCs from the received offer SDP.
+ callee()->SetReceivedSdpMunger(RemoveSsrcsAndKeepMsids);
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ MediaExpectations media_expectations;
+ media_expectations.ExpectBidirectionalAudio();
+ ASSERT_TRUE(ExpectNewFrames(media_expectations));
+}
+
+TEST_F(PeerConnectionIntegrationTestUnifiedPlan,
+ EndToEndCallAddReceiveVideoToSendOnlyCall) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ // Add one-directional video, from caller to callee.
+ rtc::scoped_refptr<webrtc::VideoTrackInterface> track =
+ caller()->CreateLocalVideoTrack();
+
+ RtpTransceiverInit video_transceiver_init;
+ video_transceiver_init.stream_ids = {"video1"};
+ video_transceiver_init.direction = RtpTransceiverDirection::kSendOnly;
+ auto video_sender =
+ caller()->pc()->AddTransceiver(track, video_transceiver_init).MoveValue();
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+
+ // Add receive direction.
+ video_sender->SetDirectionWithError(RtpTransceiverDirection::kSendRecv);
+
+ rtc::scoped_refptr<webrtc::VideoTrackInterface> callee_track =
+ callee()->CreateLocalVideoTrack();
+
+ callee()->AddTrack(callee_track);
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ // Ensure that video frames are received end-to-end.
+ MediaExpectations media_expectations;
+ media_expectations.ExpectBidirectionalVideo();
+ ASSERT_TRUE(ExpectNewFrames(media_expectations));
+}
+
+// Tests that video flows between multiple video tracks when SSRCs are not
+// signaled. This exercises the MID RTP header extension which is needed to
+// demux the incoming video tracks.
+TEST_F(PeerConnectionIntegrationTestUnifiedPlan,
+ EndToEndCallWithTwoVideoTracksAndNoSignaledSsrc) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ caller()->AddVideoTrack();
+ caller()->AddVideoTrack();
+ callee()->AddVideoTrack();
+ callee()->AddVideoTrack();
+
+ caller()->SetReceivedSdpMunger(&RemoveSsrcsAndKeepMsids);
+ callee()->SetReceivedSdpMunger(&RemoveSsrcsAndKeepMsids);
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ ASSERT_EQ(2u, caller()->pc()->GetReceivers().size());
+ ASSERT_EQ(2u, callee()->pc()->GetReceivers().size());
+
+ // Expect video to be received in both directions on both tracks.
+ MediaExpectations media_expectations;
+ media_expectations.ExpectBidirectionalVideo();
+ EXPECT_TRUE(ExpectNewFrames(media_expectations));
+}
+
+// Used for the test below.
+void RemoveBundleGroupSsrcsAndMidExtension(cricket::SessionDescription* desc) {
+ RemoveSsrcsAndKeepMsids(desc);
+ desc->RemoveGroupByName("BUNDLE");
+ for (ContentInfo& content : desc->contents()) {
+ cricket::MediaContentDescription* media = content.media_description();
+ cricket::RtpHeaderExtensions extensions = media->rtp_header_extensions();
+ extensions.erase(std::remove_if(extensions.begin(), extensions.end(),
+ [](const RtpExtension& extension) {
+ return extension.uri ==
+ RtpExtension::kMidUri;
+ }),
+ extensions.end());
+ media->set_rtp_header_extensions(extensions);
+ }
+}
+
+// Tests that video flows between multiple video tracks when BUNDLE is not used,
+// SSRCs are not signaled and the MID RTP header extension is not used. This
+// relies on demuxing by payload type, which normally doesn't work if you have
+// multiple media sections using the same payload type, but which should work as
+// long as the media sections aren't bundled.
+// Regression test for: http://crbug.com/webrtc/12023
+TEST_F(PeerConnectionIntegrationTestUnifiedPlan,
+ EndToEndCallWithTwoVideoTracksNoBundleNoSignaledSsrcAndNoMid) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ caller()->AddVideoTrack();
+ caller()->AddVideoTrack();
+ callee()->AddVideoTrack();
+ callee()->AddVideoTrack();
+ caller()->SetReceivedSdpMunger(&RemoveBundleGroupSsrcsAndMidExtension);
+ callee()->SetReceivedSdpMunger(&RemoveBundleGroupSsrcsAndMidExtension);
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ ASSERT_EQ(2u, caller()->pc()->GetReceivers().size());
+ ASSERT_EQ(2u, callee()->pc()->GetReceivers().size());
+ // Make sure we are not bundled.
+ ASSERT_NE(caller()->pc()->GetSenders()[0]->dtls_transport(),
+ caller()->pc()->GetSenders()[1]->dtls_transport());
+
+ // Expect video to be received in both directions on both tracks.
+ MediaExpectations media_expectations;
+ media_expectations.ExpectBidirectionalVideo();
+ EXPECT_TRUE(ExpectNewFrames(media_expectations));
+}
+
+// Used for the test below.
+void ModifyPayloadTypesAndRemoveMidExtension(
+ cricket::SessionDescription* desc) {
+ int pt = 96;
+ for (ContentInfo& content : desc->contents()) {
+ cricket::MediaContentDescription* media = content.media_description();
+ cricket::RtpHeaderExtensions extensions = media->rtp_header_extensions();
+ extensions.erase(std::remove_if(extensions.begin(), extensions.end(),
+ [](const RtpExtension& extension) {
+ return extension.uri ==
+ RtpExtension::kMidUri;
+ }),
+ extensions.end());
+ media->set_rtp_header_extensions(extensions);
+ cricket::VideoContentDescription* video = media->as_video();
+ ASSERT_TRUE(video != nullptr);
+ std::vector<cricket::VideoCodec> codecs = {
+ cricket::CreateVideoCodec(pt++, "VP8")};
+ video->set_codecs(codecs);
+ }
+}
+
+// Tests that two video tracks can be demultiplexed by payload type alone, by
+// using different payload types for the same codec in different m= sections.
+// This practice is discouraged but historically has been supported.
+// Regression test for: http://crbug.com/webrtc/12029
+TEST_F(PeerConnectionIntegrationTestUnifiedPlan,
+ EndToEndCallWithTwoVideoTracksDemultiplexedByPayloadType) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ caller()->AddVideoTrack();
+ caller()->AddVideoTrack();
+ callee()->AddVideoTrack();
+ callee()->AddVideoTrack();
+ caller()->SetGeneratedSdpMunger(&ModifyPayloadTypesAndRemoveMidExtension);
+ callee()->SetGeneratedSdpMunger(&ModifyPayloadTypesAndRemoveMidExtension);
+ // We can't remove SSRCs from the generated SDP because then no send streams
+ // would be created.
+ caller()->SetReceivedSdpMunger(&RemoveSsrcsAndKeepMsids);
+ callee()->SetReceivedSdpMunger(&RemoveSsrcsAndKeepMsids);
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ ASSERT_EQ(2u, caller()->pc()->GetReceivers().size());
+ ASSERT_EQ(2u, callee()->pc()->GetReceivers().size());
+ // Make sure we are bundled.
+ ASSERT_EQ(caller()->pc()->GetSenders()[0]->dtls_transport(),
+ caller()->pc()->GetSenders()[1]->dtls_transport());
+
+ // Expect video to be received in both directions on both tracks.
+ MediaExpectations media_expectations;
+ media_expectations.ExpectBidirectionalVideo();
+ EXPECT_TRUE(ExpectNewFrames(media_expectations));
+}
+
+TEST_F(PeerConnectionIntegrationTestUnifiedPlan, NoStreamsMsidLinePresent) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ caller()->AddAudioTrack();
+ caller()->AddVideoTrack();
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ auto callee_receivers = callee()->pc()->GetReceivers();
+ ASSERT_EQ(2u, callee_receivers.size());
+ EXPECT_TRUE(callee_receivers[0]->stream_ids().empty());
+ EXPECT_TRUE(callee_receivers[1]->stream_ids().empty());
+}
+
+TEST_F(PeerConnectionIntegrationTestUnifiedPlan, NoStreamsMsidLineMissing) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ caller()->AddAudioTrack();
+ caller()->AddVideoTrack();
+ callee()->SetReceivedSdpMunger(RemoveSsrcsAndMsids);
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ auto callee_receivers = callee()->pc()->GetReceivers();
+ ASSERT_EQ(2u, callee_receivers.size());
+ ASSERT_EQ(1u, callee_receivers[0]->stream_ids().size());
+ ASSERT_EQ(1u, callee_receivers[1]->stream_ids().size());
+ EXPECT_EQ(callee_receivers[0]->stream_ids()[0],
+ callee_receivers[1]->stream_ids()[0]);
+ EXPECT_EQ(callee_receivers[0]->streams()[0],
+ callee_receivers[1]->streams()[0]);
+}
+
+// Test that if two video tracks are sent (from caller to callee, in this test),
+// they're transmitted correctly end-to-end.
+TEST_P(PeerConnectionIntegrationTest, EndToEndCallWithTwoVideoTracks) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ // Add one audio/video stream, and one video-only stream.
+ caller()->AddAudioVideoTracks();
+ caller()->AddVideoTrack();
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ ASSERT_EQ(3u, callee()->pc()->GetReceivers().size());
+
+ MediaExpectations media_expectations;
+ media_expectations.CalleeExpectsSomeAudioAndVideo();
+ ASSERT_TRUE(ExpectNewFrames(media_expectations));
+}
+
+static void MakeSpecCompliantMaxBundleOffer(cricket::SessionDescription* desc) {
+ bool first = true;
+ for (cricket::ContentInfo& content : desc->contents()) {
+ if (first) {
+ first = false;
+ continue;
+ }
+ content.bundle_only = true;
+ }
+ first = true;
+ for (cricket::TransportInfo& transport : desc->transport_infos()) {
+ if (first) {
+ first = false;
+ continue;
+ }
+ transport.description.ice_ufrag.clear();
+ transport.description.ice_pwd.clear();
+ transport.description.connection_role = cricket::CONNECTIONROLE_NONE;
+ transport.description.identity_fingerprint.reset(nullptr);
+ }
+}
+
+// Test that if applying a true "max bundle" offer, which uses ports of 0,
+// "a=bundle-only", omitting "a=fingerprint", "a=setup", "a=ice-ufrag" and
+// "a=ice-pwd" for all but the audio "m=" section, negotiation still completes
+// successfully and media flows.
+// TODO(deadbeef): Update this test to also omit "a=rtcp-mux", once that works.
+// TODO(deadbeef): Won't need this test once we start generating actual
+// standards-compliant SDP.
+TEST_P(PeerConnectionIntegrationTest,
+ EndToEndCallWithSpecCompliantMaxBundleOffer) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ caller()->AddAudioVideoTracks();
+ callee()->AddAudioVideoTracks();
+ // Do the equivalent of setting the port to 0, adding a=bundle-only, and
+ // removing a=ice-ufrag, a=ice-pwd, a=fingerprint and a=setup from all
+ // but the first m= section.
+ callee()->SetReceivedSdpMunger(MakeSpecCompliantMaxBundleOffer);
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ MediaExpectations media_expectations;
+ media_expectations.ExpectBidirectionalAudioAndVideo();
+ ASSERT_TRUE(ExpectNewFrames(media_expectations));
+}
+
+// Test that we can receive the audio output level from a remote audio track.
+// TODO(deadbeef): Use a fake audio source and verify that the output level is
+// exactly what the source on the other side was configured with.
+TEST_P(PeerConnectionIntegrationTest, GetAudioOutputLevelStatsWithOldStatsApi) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ // Just add an audio track.
+ caller()->AddAudioTrack();
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+
+ // Get the audio output level stats. Note that the level is not available
+ // until an RTCP packet has been received.
+ EXPECT_TRUE_WAIT(callee()->OldGetStats()->AudioOutputLevel() > 0,
+ kMaxWaitForFramesMs);
+}
+
+// Test that an audio input level is reported.
+// TODO(deadbeef): Use a fake audio source and verify that the input level is
+// exactly what the source was configured with.
+TEST_P(PeerConnectionIntegrationTest, GetAudioInputLevelStatsWithOldStatsApi) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ // Just add an audio track.
+ caller()->AddAudioTrack();
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+
+ // Get the audio input level stats. The level should be available very
+ // soon after the test starts.
+ EXPECT_TRUE_WAIT(caller()->OldGetStats()->AudioInputLevel() > 0,
+ kMaxWaitForStatsMs);
+}
+
+// Test that we can get incoming byte counts from both audio and video tracks.
+TEST_P(PeerConnectionIntegrationTest, GetBytesReceivedStatsWithOldStatsApi) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ caller()->AddAudioVideoTracks();
+ // Do offer/answer, wait for the callee to receive some frames.
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+
+ MediaExpectations media_expectations;
+ media_expectations.CalleeExpectsSomeAudioAndVideo();
+ ASSERT_TRUE(ExpectNewFrames(media_expectations));
+
+ // Get a handle to the remote tracks created, so they can be used as GetStats
+ // filters.
+ for (const auto& receiver : callee()->pc()->GetReceivers()) {
+ // We received frames, so we definitely should have nonzero "received bytes"
+ // stats at this point.
+ EXPECT_GT(
+ callee()->OldGetStatsForTrack(receiver->track().get())->BytesReceived(),
+ 0);
+ }
+}
+
+// Test that we can get outgoing byte counts from both audio and video tracks.
+TEST_P(PeerConnectionIntegrationTest, GetBytesSentStatsWithOldStatsApi) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ auto audio_track = caller()->CreateLocalAudioTrack();
+ auto video_track = caller()->CreateLocalVideoTrack();
+ caller()->AddTrack(audio_track);
+ caller()->AddTrack(video_track);
+ // Do offer/answer, wait for the callee to receive some frames.
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ MediaExpectations media_expectations;
+ media_expectations.CalleeExpectsSomeAudioAndVideo();
+ ASSERT_TRUE(ExpectNewFrames(media_expectations));
+
+ // The callee received frames, so we definitely should have nonzero "sent
+ // bytes" stats at this point.
+ EXPECT_GT(caller()->OldGetStatsForTrack(audio_track.get())->BytesSent(), 0);
+ EXPECT_GT(caller()->OldGetStatsForTrack(video_track.get())->BytesSent(), 0);
+}
+
+// Test that the track ID is associated with all local and remote SSRC stats
+// using the old GetStats() and more than 1 audio and more than 1 video track.
+// This is a regression test for crbug.com/906988
+TEST_F(PeerConnectionIntegrationTestUnifiedPlan,
+ OldGetStatsAssociatesTrackIdForManyMediaSections) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ auto audio_sender_1 = caller()->AddAudioTrack();
+ auto video_sender_1 = caller()->AddVideoTrack();
+ auto audio_sender_2 = caller()->AddAudioTrack();
+ auto video_sender_2 = caller()->AddVideoTrack();
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+
+ MediaExpectations media_expectations;
+ media_expectations.CalleeExpectsSomeAudioAndVideo();
+ ASSERT_TRUE_WAIT(ExpectNewFrames(media_expectations), kDefaultTimeout);
+
+ std::vector<std::string> track_ids = {
+ audio_sender_1->track()->id(), video_sender_1->track()->id(),
+ audio_sender_2->track()->id(), video_sender_2->track()->id()};
+
+ auto caller_stats = caller()->OldGetStats();
+ EXPECT_THAT(caller_stats->TrackIds(), UnorderedElementsAreArray(track_ids));
+ auto callee_stats = callee()->OldGetStats();
+ EXPECT_THAT(callee_stats->TrackIds(), UnorderedElementsAreArray(track_ids));
+}
+
+// Test that the new GetStats() returns stats for all outgoing/incoming streams
+// with the correct track identifiers if there are more than one audio and more
+// than one video senders/receivers.
+TEST_P(PeerConnectionIntegrationTest, NewGetStatsManyAudioAndManyVideoStreams) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ auto audio_sender_1 = caller()->AddAudioTrack();
+ auto video_sender_1 = caller()->AddVideoTrack();
+ auto audio_sender_2 = caller()->AddAudioTrack();
+ auto video_sender_2 = caller()->AddVideoTrack();
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+
+ MediaExpectations media_expectations;
+ media_expectations.CalleeExpectsSomeAudioAndVideo();
+ ASSERT_TRUE_WAIT(ExpectNewFrames(media_expectations), kDefaultTimeout);
+
+ std::vector<std::string> track_ids = {
+ audio_sender_1->track()->id(), video_sender_1->track()->id(),
+ audio_sender_2->track()->id(), video_sender_2->track()->id()};
+
+ rtc::scoped_refptr<const webrtc::RTCStatsReport> caller_report =
+ caller()->NewGetStats();
+ ASSERT_TRUE(caller_report);
+ auto outbound_stream_stats =
+ caller_report->GetStatsOfType<webrtc::RTCOutboundRtpStreamStats>();
+ ASSERT_EQ(outbound_stream_stats.size(), 4u);
+ std::vector<std::string> outbound_track_ids;
+ for (const auto& stat : outbound_stream_stats) {
+ ASSERT_TRUE(stat->bytes_sent.is_defined());
+ EXPECT_LT(0u, *stat->bytes_sent);
+ if (*stat->kind == "video") {
+ ASSERT_TRUE(stat->key_frames_encoded.is_defined());
+ EXPECT_GT(*stat->key_frames_encoded, 0u);
+ ASSERT_TRUE(stat->frames_encoded.is_defined());
+ EXPECT_GE(*stat->frames_encoded, *stat->key_frames_encoded);
+ }
+ ASSERT_TRUE(stat->media_source_id.is_defined());
+ const RTCMediaSourceStats* media_source =
+ static_cast<const RTCMediaSourceStats*>(
+ caller_report->Get(*stat->media_source_id));
+ ASSERT_TRUE(media_source);
+ outbound_track_ids.push_back(*media_source->track_identifier);
+ }
+ EXPECT_THAT(outbound_track_ids, UnorderedElementsAreArray(track_ids));
+
+ rtc::scoped_refptr<const webrtc::RTCStatsReport> callee_report =
+ callee()->NewGetStats();
+ ASSERT_TRUE(callee_report);
+ auto inbound_stream_stats =
+ callee_report->GetStatsOfType<webrtc::RTCInboundRtpStreamStats>();
+ ASSERT_EQ(4u, inbound_stream_stats.size());
+ std::vector<std::string> inbound_track_ids;
+ for (const auto& stat : inbound_stream_stats) {
+ ASSERT_TRUE(stat->bytes_received.is_defined());
+ EXPECT_LT(0u, *stat->bytes_received);
+ if (*stat->kind == "video") {
+ ASSERT_TRUE(stat->key_frames_decoded.is_defined());
+ EXPECT_GT(*stat->key_frames_decoded, 0u);
+ ASSERT_TRUE(stat->frames_decoded.is_defined());
+ EXPECT_GE(*stat->frames_decoded, *stat->key_frames_decoded);
+ }
+ inbound_track_ids.push_back(*stat->track_identifier);
+ }
+ EXPECT_THAT(inbound_track_ids, UnorderedElementsAreArray(track_ids));
+}
+
+// Test that we can get stats (using the new stats implementation) for
+// unsignaled streams. Meaning when SSRCs/MSIDs aren't signaled explicitly in
+// SDP.
+TEST_P(PeerConnectionIntegrationTest,
+ GetStatsForUnsignaledStreamWithNewStatsApi) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ caller()->AddAudioTrack();
+ // Remove SSRCs and MSIDs from the received offer SDP.
+ callee()->SetReceivedSdpMunger(RemoveSsrcsAndMsids);
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ MediaExpectations media_expectations;
+ media_expectations.CalleeExpectsSomeAudio(1);
+ ASSERT_TRUE(ExpectNewFrames(media_expectations));
+
+ // We received a frame, so we should have nonzero "bytes received" stats for
+ // the unsignaled stream, if stats are working for it.
+ rtc::scoped_refptr<const webrtc::RTCStatsReport> report =
+ callee()->NewGetStats();
+ ASSERT_NE(nullptr, report);
+ auto inbound_stream_stats =
+ report->GetStatsOfType<webrtc::RTCInboundRtpStreamStats>();
+ ASSERT_EQ(1U, inbound_stream_stats.size());
+ ASSERT_TRUE(inbound_stream_stats[0]->bytes_received.is_defined());
+ ASSERT_GT(*inbound_stream_stats[0]->bytes_received, 0U);
+}
+
+// Same as above but for the legacy stats implementation.
+TEST_P(PeerConnectionIntegrationTest,
+ GetStatsForUnsignaledStreamWithOldStatsApi) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ caller()->AddAudioTrack();
+ // Remove SSRCs and MSIDs from the received offer SDP.
+ callee()->SetReceivedSdpMunger(RemoveSsrcsAndMsids);
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+
+ // Note that, since the old stats implementation associates SSRCs with tracks
+ // using SDP, when SSRCs aren't signaled in SDP these stats won't have an
+ // associated track ID. So we can't use the track "selector" argument.
+ //
+ // Also, we use "EXPECT_TRUE_WAIT" because the stats collector may decide to
+ // return cached stats if not enough time has passed since the last update.
+ EXPECT_TRUE_WAIT(callee()->OldGetStats()->BytesReceived() > 0,
+ kDefaultTimeout);
+}
+
+// Test that we can successfully get the media related stats (audio level
+// etc.) for the unsignaled stream.
+TEST_P(PeerConnectionIntegrationTest,
+ GetMediaStatsForUnsignaledStreamWithNewStatsApi) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ caller()->AddAudioVideoTracks();
+ // Remove SSRCs and MSIDs from the received offer SDP.
+ callee()->SetReceivedSdpMunger(RemoveSsrcsAndMsids);
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ MediaExpectations media_expectations;
+ media_expectations.CalleeExpectsSomeAudio(1);
+ media_expectations.CalleeExpectsSomeVideo(1);
+ ASSERT_TRUE(ExpectNewFrames(media_expectations));
+
+ rtc::scoped_refptr<const webrtc::RTCStatsReport> report =
+ callee()->NewGetStats();
+ ASSERT_NE(nullptr, report);
+
+ auto inbound_rtps =
+ report->GetStatsOfType<webrtc::RTCInboundRtpStreamStats>();
+ auto index = FindFirstMediaStatsIndexByKind("audio", inbound_rtps);
+ ASSERT_GE(index, 0);
+ EXPECT_TRUE(inbound_rtps[index]->audio_level.is_defined());
+}
+
+// Test that DTLS 1.0 is used if both sides only support DTLS 1.0.
+TEST_P(PeerConnectionIntegrationTest, EndToEndCallWithDtls10) {
+ PeerConnectionFactory::Options dtls_10_options;
+ dtls_10_options.ssl_max_version = rtc::SSL_PROTOCOL_DTLS_10;
+ ASSERT_TRUE(CreatePeerConnectionWrappersWithOptions(dtls_10_options,
+ dtls_10_options));
+ ConnectFakeSignaling();
+ // Do normal offer/answer and wait for some frames to be received in each
+ // direction.
+ caller()->AddAudioVideoTracks();
+ callee()->AddAudioVideoTracks();
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ MediaExpectations media_expectations;
+ media_expectations.ExpectBidirectionalAudioAndVideo();
+ ASSERT_TRUE(ExpectNewFrames(media_expectations));
+}
+
+// Test getting cipher stats and UMA metrics when DTLS 1.0 is negotiated.
+TEST_P(PeerConnectionIntegrationTest, Dtls10CipherStatsAndUmaMetrics) {
+ PeerConnectionFactory::Options dtls_10_options;
+ dtls_10_options.ssl_max_version = rtc::SSL_PROTOCOL_DTLS_10;
+ ASSERT_TRUE(CreatePeerConnectionWrappersWithOptions(dtls_10_options,
+ dtls_10_options));
+ ConnectFakeSignaling();
+ caller()->AddAudioVideoTracks();
+ callee()->AddAudioVideoTracks();
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(DtlsConnected(), kDefaultTimeout);
+ EXPECT_TRUE_WAIT(rtc::SSLStreamAdapter::IsAcceptableCipher(
+ caller()->OldGetStats()->DtlsCipher(), rtc::KT_DEFAULT),
+ kDefaultTimeout);
+ EXPECT_EQ_WAIT(rtc::SrtpCryptoSuiteToName(kDefaultSrtpCryptoSuite),
+ caller()->OldGetStats()->SrtpCipher(), kDefaultTimeout);
+}
+
+// Test getting cipher stats and UMA metrics when DTLS 1.2 is negotiated.
+TEST_P(PeerConnectionIntegrationTest, Dtls12CipherStatsAndUmaMetrics) {
+ PeerConnectionFactory::Options dtls_12_options;
+ dtls_12_options.ssl_max_version = rtc::SSL_PROTOCOL_DTLS_12;
+ ASSERT_TRUE(CreatePeerConnectionWrappersWithOptions(dtls_12_options,
+ dtls_12_options));
+ ConnectFakeSignaling();
+ caller()->AddAudioVideoTracks();
+ callee()->AddAudioVideoTracks();
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(DtlsConnected(), kDefaultTimeout);
+ EXPECT_TRUE_WAIT(rtc::SSLStreamAdapter::IsAcceptableCipher(
+ caller()->OldGetStats()->DtlsCipher(), rtc::KT_DEFAULT),
+ kDefaultTimeout);
+ EXPECT_EQ_WAIT(rtc::SrtpCryptoSuiteToName(kDefaultSrtpCryptoSuite),
+ caller()->OldGetStats()->SrtpCipher(), kDefaultTimeout);
+}
+
+// Test that DTLS 1.0 can be used if the caller supports DTLS 1.2 and the
+// callee only supports 1.0.
+TEST_P(PeerConnectionIntegrationTest, CallerDtls12ToCalleeDtls10) {
+ PeerConnectionFactory::Options caller_options;
+ caller_options.ssl_max_version = rtc::SSL_PROTOCOL_DTLS_12;
+ PeerConnectionFactory::Options callee_options;
+ callee_options.ssl_max_version = rtc::SSL_PROTOCOL_DTLS_10;
+ ASSERT_TRUE(
+ CreatePeerConnectionWrappersWithOptions(caller_options, callee_options));
+ ConnectFakeSignaling();
+ // Do normal offer/answer and wait for some frames to be received in each
+ // direction.
+ caller()->AddAudioVideoTracks();
+ callee()->AddAudioVideoTracks();
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ MediaExpectations media_expectations;
+ media_expectations.ExpectBidirectionalAudioAndVideo();
+ ASSERT_TRUE(ExpectNewFrames(media_expectations));
+}
+
+// Test that DTLS 1.0 can be used if the caller only supports DTLS 1.0 and the
+// callee supports 1.2.
+TEST_P(PeerConnectionIntegrationTest, CallerDtls10ToCalleeDtls12) {
+ PeerConnectionFactory::Options caller_options;
+ caller_options.ssl_max_version = rtc::SSL_PROTOCOL_DTLS_10;
+ PeerConnectionFactory::Options callee_options;
+ callee_options.ssl_max_version = rtc::SSL_PROTOCOL_DTLS_12;
+ ASSERT_TRUE(
+ CreatePeerConnectionWrappersWithOptions(caller_options, callee_options));
+ ConnectFakeSignaling();
+ // Do normal offer/answer and wait for some frames to be received in each
+ // direction.
+ caller()->AddAudioVideoTracks();
+ callee()->AddAudioVideoTracks();
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ MediaExpectations media_expectations;
+ media_expectations.ExpectBidirectionalAudioAndVideo();
+ ASSERT_TRUE(ExpectNewFrames(media_expectations));
+}
+
+// The three tests below verify that "enable_aes128_sha1_32_crypto_cipher"
+// works as expected; the cipher should only be used if enabled by both sides.
+TEST_P(PeerConnectionIntegrationTest,
+ Aes128Sha1_32_CipherNotUsedWhenOnlyCallerSupported) {
+ PeerConnectionFactory::Options caller_options;
+ caller_options.crypto_options.srtp.enable_aes128_sha1_32_crypto_cipher = true;
+ PeerConnectionFactory::Options callee_options;
+ callee_options.crypto_options.srtp.enable_aes128_sha1_32_crypto_cipher =
+ false;
+ int expected_cipher_suite = rtc::kSrtpAes128CmSha1_80;
+ TestNegotiatedCipherSuite(caller_options, callee_options,
+ expected_cipher_suite);
+}
+
+TEST_P(PeerConnectionIntegrationTest,
+ Aes128Sha1_32_CipherNotUsedWhenOnlyCalleeSupported) {
+ PeerConnectionFactory::Options caller_options;
+ caller_options.crypto_options.srtp.enable_aes128_sha1_32_crypto_cipher =
+ false;
+ PeerConnectionFactory::Options callee_options;
+ callee_options.crypto_options.srtp.enable_aes128_sha1_32_crypto_cipher = true;
+ int expected_cipher_suite = rtc::kSrtpAes128CmSha1_80;
+ TestNegotiatedCipherSuite(caller_options, callee_options,
+ expected_cipher_suite);
+}
+
+TEST_P(PeerConnectionIntegrationTest, Aes128Sha1_32_CipherUsedWhenSupported) {
+ PeerConnectionFactory::Options caller_options;
+ caller_options.crypto_options.srtp.enable_aes128_sha1_32_crypto_cipher = true;
+ PeerConnectionFactory::Options callee_options;
+ callee_options.crypto_options.srtp.enable_aes128_sha1_32_crypto_cipher = true;
+ int expected_cipher_suite = rtc::kSrtpAes128CmSha1_32;
+ TestNegotiatedCipherSuite(caller_options, callee_options,
+ expected_cipher_suite);
+}
+
+// Test that a non-GCM cipher is used if both sides only support non-GCM.
+TEST_P(PeerConnectionIntegrationTest, NonGcmCipherUsedWhenGcmNotSupported) {
+ bool local_gcm_enabled = false;
+ bool remote_gcm_enabled = false;
+ bool aes_ctr_enabled = true;
+ int expected_cipher_suite = kDefaultSrtpCryptoSuite;
+ TestGcmNegotiationUsesCipherSuite(local_gcm_enabled, remote_gcm_enabled,
+ aes_ctr_enabled, expected_cipher_suite);
+}
+
+// Test that a GCM cipher is used if both ends support it and non-GCM is
+// disabled.
+TEST_P(PeerConnectionIntegrationTest, GcmCipherUsedWhenOnlyGcmSupported) {
+ bool local_gcm_enabled = true;
+ bool remote_gcm_enabled = true;
+ bool aes_ctr_enabled = false;
+ int expected_cipher_suite = kDefaultSrtpCryptoSuiteGcm;
+ TestGcmNegotiationUsesCipherSuite(local_gcm_enabled, remote_gcm_enabled,
+ aes_ctr_enabled, expected_cipher_suite);
+}
+
+// Verify that media can be transmitted end-to-end when GCM crypto suites are
+// enabled. Note that the above tests, such as GcmCipherUsedWhenGcmSupported,
+// only verify that a GCM cipher is negotiated, and not necessarily that SRTP
+// works with it.
+TEST_P(PeerConnectionIntegrationTest, EndToEndCallWithGcmCipher) {
+ PeerConnectionFactory::Options gcm_options;
+ gcm_options.crypto_options.srtp.enable_gcm_crypto_suites = true;
+ gcm_options.crypto_options.srtp.enable_aes128_sha1_80_crypto_cipher = false;
+ ASSERT_TRUE(
+ CreatePeerConnectionWrappersWithOptions(gcm_options, gcm_options));
+ ConnectFakeSignaling();
+ // Do normal offer/answer and wait for some frames to be received in each
+ // direction.
+ caller()->AddAudioVideoTracks();
+ callee()->AddAudioVideoTracks();
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ MediaExpectations media_expectations;
+ media_expectations.ExpectBidirectionalAudioAndVideo();
+ ASSERT_TRUE(ExpectNewFrames(media_expectations));
+}
+
+// Test that the ICE connection and gathering states eventually reach
+// "complete".
+TEST_P(PeerConnectionIntegrationTest, IceStatesReachCompletion) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ // Do normal offer/answer.
+ caller()->AddAudioVideoTracks();
+ callee()->AddAudioVideoTracks();
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceGatheringComplete,
+ caller()->ice_gathering_state(), kMaxWaitForFramesMs);
+ EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceGatheringComplete,
+ callee()->ice_gathering_state(), kMaxWaitForFramesMs);
+ // After the best candidate pair is selected and all candidates are signaled,
+ // the ICE connection state should reach "complete".
+ // TODO(deadbeef): Currently, the ICE "controlled" agent (the
+ // answerer/"callee" by default) only reaches "connected". When this is
+ // fixed, this test should be updated.
+ EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceConnectionCompleted,
+ caller()->ice_connection_state(), kDefaultTimeout);
+ EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceConnectionConnected,
+ callee()->ice_connection_state(), kDefaultTimeout);
+}
+
+constexpr int kOnlyLocalPorts = cricket::PORTALLOCATOR_DISABLE_STUN |
+ cricket::PORTALLOCATOR_DISABLE_RELAY |
+ cricket::PORTALLOCATOR_DISABLE_TCP;
+
+// Use a mock resolver to resolve the hostname back to the original IP on both
+// sides and check that the ICE connection connects.
+TEST_P(PeerConnectionIntegrationTest,
+ IceStatesReachCompletionWithRemoteHostname) {
+ auto caller_resolver_factory =
+ std::make_unique<NiceMock<webrtc::MockAsyncDnsResolverFactory>>();
+ auto callee_resolver_factory =
+ std::make_unique<NiceMock<webrtc::MockAsyncDnsResolverFactory>>();
+ auto callee_async_resolver =
+ std::make_unique<NiceMock<MockAsyncDnsResolver>>();
+ auto caller_async_resolver =
+ std::make_unique<NiceMock<MockAsyncDnsResolver>>();
+ // Keep raw pointers to the mock resolvers, for use after init,
+ // where the std::unique_ptr values have been moved away.
+ auto* callee_resolver_ptr = callee_async_resolver.get();
+ auto* caller_resolver_ptr = caller_async_resolver.get();
+
+ // This also verifies that the injected AsyncResolverFactory is used by
+ // P2PTransportChannel.
+ EXPECT_CALL(*caller_resolver_factory, Create())
+ .WillOnce(Return(ByMove(std::move(caller_async_resolver))));
+ webrtc::PeerConnectionDependencies caller_deps(nullptr);
+ caller_deps.async_dns_resolver_factory = std::move(caller_resolver_factory);
+
+ EXPECT_CALL(*callee_resolver_factory, Create())
+ .WillOnce(Return(ByMove(std::move(callee_async_resolver))));
+ webrtc::PeerConnectionDependencies callee_deps(nullptr);
+ callee_deps.async_dns_resolver_factory = std::move(callee_resolver_factory);
+
+ PeerConnectionInterface::RTCConfiguration config;
+ config.bundle_policy = PeerConnectionInterface::kBundlePolicyMaxBundle;
+ config.rtcp_mux_policy = PeerConnectionInterface::kRtcpMuxPolicyRequire;
+
+ ASSERT_TRUE(CreatePeerConnectionWrappersWithConfigAndDeps(
+ config, std::move(caller_deps), config, std::move(callee_deps)));
+
+ // TEMP NOTE - this is probably bogus since the resolvers have been
+ // moved out of their slots prior to these code lines.
+ RTC_LOG(LS_ERROR) << "callee async resolver is "
+ << callee_async_resolver.get();
+ caller()->SetRemoteAsyncResolver(callee_resolver_ptr);
+ callee()->SetRemoteAsyncResolver(caller_resolver_ptr);
+
+ // Enable hostname candidates with mDNS names.
+ caller()->SetMdnsResponder(
+ std::make_unique<webrtc::FakeMdnsResponder>(network_thread()));
+ callee()->SetMdnsResponder(
+ std::make_unique<webrtc::FakeMdnsResponder>(network_thread()));
+
+ SetPortAllocatorFlags(kOnlyLocalPorts, kOnlyLocalPorts);
+
+ ConnectFakeSignaling();
+ caller()->AddAudioVideoTracks();
+ callee()->AddAudioVideoTracks();
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceConnectionCompleted,
+ caller()->ice_connection_state(), kDefaultTimeout);
+ EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceConnectionConnected,
+ callee()->ice_connection_state(), kDefaultTimeout);
+
+ // Part of reporting the stats will occur on the network thread, so flush it
+ // before checking NumEvents.
+ SendTask(network_thread(), [] {});
+
+ EXPECT_METRIC_EQ(1, webrtc::metrics::NumEvents(
+ "WebRTC.PeerConnection.CandidatePairType_UDP",
+ webrtc::kIceCandidatePairHostNameHostName));
+ DestroyPeerConnections();
+}
+
+// Test that firewalling the ICE connection causes the clients to identify the
+// disconnected state and then removing the firewall causes them to reconnect.
+class PeerConnectionIntegrationIceStatesTest
+ : public PeerConnectionIntegrationBaseTest,
+ public ::testing::WithParamInterface<
+ std::tuple<SdpSemantics, std::tuple<std::string, uint32_t>>> {
+ protected:
+ PeerConnectionIntegrationIceStatesTest()
+ : PeerConnectionIntegrationBaseTest(std::get<0>(GetParam())) {
+ port_allocator_flags_ = std::get<1>(std::get<1>(GetParam()));
+ }
+
+ void StartStunServer(const SocketAddress& server_address) {
+ stun_server_.reset(
+ cricket::TestStunServer::Create(firewall(), server_address));
+ }
+
+ bool TestIPv6() {
+ return (port_allocator_flags_ & cricket::PORTALLOCATOR_ENABLE_IPV6);
+ }
+
+ void SetPortAllocatorFlags() {
+ PeerConnectionIntegrationBaseTest::SetPortAllocatorFlags(
+ port_allocator_flags_, port_allocator_flags_);
+ }
+
+ std::vector<SocketAddress> CallerAddresses() {
+ std::vector<SocketAddress> addresses;
+ addresses.push_back(SocketAddress("1.1.1.1", 0));
+ if (TestIPv6()) {
+ addresses.push_back(SocketAddress("1111:0:a:b:c:d:e:f", 0));
+ }
+ return addresses;
+ }
+
+ std::vector<SocketAddress> CalleeAddresses() {
+ std::vector<SocketAddress> addresses;
+ addresses.push_back(SocketAddress("2.2.2.2", 0));
+ if (TestIPv6()) {
+ addresses.push_back(SocketAddress("2222:0:a:b:c:d:e:f", 0));
+ }
+ return addresses;
+ }
+
+ void SetUpNetworkInterfaces() {
+ // Remove the default interfaces added by the test infrastructure.
+ caller()->network_manager()->RemoveInterface(kDefaultLocalAddress);
+ callee()->network_manager()->RemoveInterface(kDefaultLocalAddress);
+
+ // Add network addresses for test.
+ for (const auto& caller_address : CallerAddresses()) {
+ caller()->network_manager()->AddInterface(caller_address);
+ }
+ for (const auto& callee_address : CalleeAddresses()) {
+ callee()->network_manager()->AddInterface(callee_address);
+ }
+ }
+
+ private:
+ uint32_t port_allocator_flags_;
+ std::unique_ptr<cricket::TestStunServer> stun_server_;
+};
+
+// Ensure FakeClockForTest is constructed first (see class for rationale).
+class PeerConnectionIntegrationIceStatesTestWithFakeClock
+ : public FakeClockForTest,
+ public PeerConnectionIntegrationIceStatesTest {};
+
+#if !defined(THREAD_SANITIZER)
+// This test provokes TSAN errors. bugs.webrtc.org/11282
+
+// Tests that if the connection doesn't get set up properly we eventually reach
+// the "failed" iceConnectionState.
+TEST_P(PeerConnectionIntegrationIceStatesTestWithFakeClock,
+ IceStateSetupFailure) {
+ // Block connections to/from the caller and wait for ICE to become
+ // disconnected.
+ for (const auto& caller_address : CallerAddresses()) {
+ firewall()->AddRule(false, rtc::FP_ANY, rtc::FD_ANY, caller_address);
+ }
+
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ SetPortAllocatorFlags();
+ SetUpNetworkInterfaces();
+ caller()->AddAudioVideoTracks();
+ caller()->CreateAndSetAndSignalOffer();
+
+ // According to RFC7675, if there is no response within 30 seconds then the
+ // peer should consider the other side to have rejected the connection. This
+ // is signaled by the state transitioning to "failed".
+ constexpr int kConsentTimeout = 30000;
+ ASSERT_EQ_SIMULATED_WAIT(PeerConnectionInterface::kIceConnectionFailed,
+ caller()->standardized_ice_connection_state(),
+ kConsentTimeout, FakeClock());
+}
+
+#endif // !defined(THREAD_SANITIZER)
+
+// Tests that the best connection is set to the appropriate IPv4/IPv6 connection
+// and that the statistics in the metric observers are updated correctly.
+// TODO(bugs.webrtc.org/12591): Flaky on Windows.
+#if defined(WEBRTC_WIN)
+#define MAYBE_VerifyBestConnection DISABLED_VerifyBestConnection
+#else
+#define MAYBE_VerifyBestConnection VerifyBestConnection
+#endif
+TEST_P(PeerConnectionIntegrationIceStatesTest, MAYBE_VerifyBestConnection) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ SetPortAllocatorFlags();
+ SetUpNetworkInterfaces();
+ caller()->AddAudioVideoTracks();
+ callee()->AddAudioVideoTracks();
+ caller()->CreateAndSetAndSignalOffer();
+
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceConnectionCompleted,
+ caller()->ice_connection_state(), kDefaultTimeout);
+ EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceConnectionConnected,
+ callee()->ice_connection_state(), kDefaultTimeout);
+
+ // Part of reporting the stats will occur on the network thread, so flush it
+ // before checking NumEvents.
+ SendTask(network_thread(), [] {});
+
+ // TODO(bugs.webrtc.org/9456): Fix it.
+ const int num_best_ipv4 = webrtc::metrics::NumEvents(
+ "WebRTC.PeerConnection.IPMetrics", webrtc::kBestConnections_IPv4);
+ const int num_best_ipv6 = webrtc::metrics::NumEvents(
+ "WebRTC.PeerConnection.IPMetrics", webrtc::kBestConnections_IPv6);
+ if (TestIPv6()) {
+ // When IPv6 is enabled, we should prefer an IPv6 connection over an IPv4
+ // connection.
+ EXPECT_METRIC_EQ(0, num_best_ipv4);
+ EXPECT_METRIC_EQ(1, num_best_ipv6);
+ } else {
+ EXPECT_METRIC_EQ(1, num_best_ipv4);
+ EXPECT_METRIC_EQ(0, num_best_ipv6);
+ }
+
+ EXPECT_METRIC_EQ(0, webrtc::metrics::NumEvents(
+ "WebRTC.PeerConnection.CandidatePairType_UDP",
+ webrtc::kIceCandidatePairHostHost));
+ EXPECT_METRIC_EQ(1, webrtc::metrics::NumEvents(
+ "WebRTC.PeerConnection.CandidatePairType_UDP",
+ webrtc::kIceCandidatePairHostPublicHostPublic));
+}
+
+constexpr uint32_t kFlagsIPv4NoStun = cricket::PORTALLOCATOR_DISABLE_TCP |
+ cricket::PORTALLOCATOR_DISABLE_STUN |
+ cricket::PORTALLOCATOR_DISABLE_RELAY;
+constexpr uint32_t kFlagsIPv6NoStun =
+ cricket::PORTALLOCATOR_DISABLE_TCP | cricket::PORTALLOCATOR_DISABLE_STUN |
+ cricket::PORTALLOCATOR_ENABLE_IPV6 | cricket::PORTALLOCATOR_DISABLE_RELAY;
+constexpr uint32_t kFlagsIPv4Stun =
+ cricket::PORTALLOCATOR_DISABLE_TCP | cricket::PORTALLOCATOR_DISABLE_RELAY;
+
+INSTANTIATE_TEST_SUITE_P(
+ PeerConnectionIntegrationTest,
+ PeerConnectionIntegrationIceStatesTest,
+ Combine(Values(SdpSemantics::kPlanB_DEPRECATED, SdpSemantics::kUnifiedPlan),
+ Values(std::make_pair("IPv4 no STUN", kFlagsIPv4NoStun),
+ std::make_pair("IPv6 no STUN", kFlagsIPv6NoStun),
+ std::make_pair("IPv4 with STUN", kFlagsIPv4Stun))));
+
+INSTANTIATE_TEST_SUITE_P(
+ PeerConnectionIntegrationTest,
+ PeerConnectionIntegrationIceStatesTestWithFakeClock,
+ Combine(Values(SdpSemantics::kPlanB_DEPRECATED, SdpSemantics::kUnifiedPlan),
+ Values(std::make_pair("IPv4 no STUN", kFlagsIPv4NoStun),
+ std::make_pair("IPv6 no STUN", kFlagsIPv6NoStun),
+ std::make_pair("IPv4 with STUN", kFlagsIPv4Stun))));
+
+// This test sets up a call between two parties with audio and video.
+// During the call, the caller restarts ICE and the test verifies that
+// new ICE candidates are generated and audio and video still can flow, and the
+// ICE state reaches completed again.
+TEST_P(PeerConnectionIntegrationTest, MediaContinuesFlowingAfterIceRestart) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ // Do normal offer/answer and wait for ICE to complete.
+ caller()->AddAudioVideoTracks();
+ callee()->AddAudioVideoTracks();
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceConnectionCompleted,
+ caller()->ice_connection_state(), kMaxWaitForFramesMs);
+ EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceConnectionConnected,
+ callee()->ice_connection_state(), kMaxWaitForFramesMs);
+
+ // To verify that the ICE restart actually occurs, get
+ // ufrag/password/candidates before and after restart.
+ // Create an SDP string of the first audio candidate for both clients.
+ const webrtc::IceCandidateCollection* audio_candidates_caller =
+ caller()->pc()->local_description()->candidates(0);
+ const webrtc::IceCandidateCollection* audio_candidates_callee =
+ callee()->pc()->local_description()->candidates(0);
+ ASSERT_GT(audio_candidates_caller->count(), 0u);
+ ASSERT_GT(audio_candidates_callee->count(), 0u);
+ std::string caller_candidate_pre_restart;
+ ASSERT_TRUE(
+ audio_candidates_caller->at(0)->ToString(&caller_candidate_pre_restart));
+ std::string callee_candidate_pre_restart;
+ ASSERT_TRUE(
+ audio_candidates_callee->at(0)->ToString(&callee_candidate_pre_restart));
+ const cricket::SessionDescription* desc =
+ caller()->pc()->local_description()->description();
+ std::string caller_ufrag_pre_restart =
+ desc->transport_infos()[0].description.ice_ufrag;
+ desc = callee()->pc()->local_description()->description();
+ std::string callee_ufrag_pre_restart =
+ desc->transport_infos()[0].description.ice_ufrag;
+
+ EXPECT_EQ(caller()->ice_candidate_pair_change_history().size(), 1u);
+ // Have the caller initiate an ICE restart.
+ caller()->SetOfferAnswerOptions(IceRestartOfferAnswerOptions());
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceConnectionCompleted,
+ caller()->ice_connection_state(), kMaxWaitForFramesMs);
+ EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceConnectionConnected,
+ callee()->ice_connection_state(), kMaxWaitForFramesMs);
+
+ // Grab the ufrags/candidates again.
+ audio_candidates_caller = caller()->pc()->local_description()->candidates(0);
+ audio_candidates_callee = callee()->pc()->local_description()->candidates(0);
+ ASSERT_GT(audio_candidates_caller->count(), 0u);
+ ASSERT_GT(audio_candidates_callee->count(), 0u);
+ std::string caller_candidate_post_restart;
+ ASSERT_TRUE(
+ audio_candidates_caller->at(0)->ToString(&caller_candidate_post_restart));
+ std::string callee_candidate_post_restart;
+ ASSERT_TRUE(
+ audio_candidates_callee->at(0)->ToString(&callee_candidate_post_restart));
+ desc = caller()->pc()->local_description()->description();
+ std::string caller_ufrag_post_restart =
+ desc->transport_infos()[0].description.ice_ufrag;
+ desc = callee()->pc()->local_description()->description();
+ std::string callee_ufrag_post_restart =
+ desc->transport_infos()[0].description.ice_ufrag;
+ // Sanity check that an ICE restart was actually negotiated in SDP.
+ ASSERT_NE(caller_candidate_pre_restart, caller_candidate_post_restart);
+ ASSERT_NE(callee_candidate_pre_restart, callee_candidate_post_restart);
+ ASSERT_NE(caller_ufrag_pre_restart, caller_ufrag_post_restart);
+ ASSERT_NE(callee_ufrag_pre_restart, callee_ufrag_post_restart);
+ EXPECT_GT(caller()->ice_candidate_pair_change_history().size(), 1u);
+
+ // Ensure that additional frames are received after the ICE restart.
+ MediaExpectations media_expectations;
+ media_expectations.ExpectBidirectionalAudioAndVideo();
+ ASSERT_TRUE(ExpectNewFrames(media_expectations));
+}
+
+// Verify that audio/video can be received end-to-end when ICE renomination is
+// enabled.
+TEST_P(PeerConnectionIntegrationTest, EndToEndCallWithIceRenomination) {
+ PeerConnectionInterface::RTCConfiguration config;
+ config.enable_ice_renomination = true;
+ ASSERT_TRUE(CreatePeerConnectionWrappersWithConfig(config, config));
+ ConnectFakeSignaling();
+ // Do normal offer/answer and wait for some frames to be received in each
+ // direction.
+ caller()->AddAudioVideoTracks();
+ callee()->AddAudioVideoTracks();
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ // Sanity check that ICE renomination was actually negotiated.
+ const cricket::SessionDescription* desc =
+ caller()->pc()->local_description()->description();
+ for (const cricket::TransportInfo& info : desc->transport_infos()) {
+ ASSERT_THAT(info.description.transport_options, Contains("renomination"));
+ }
+ desc = callee()->pc()->local_description()->description();
+ for (const cricket::TransportInfo& info : desc->transport_infos()) {
+ ASSERT_THAT(info.description.transport_options, Contains("renomination"));
+ }
+ MediaExpectations media_expectations;
+ media_expectations.ExpectBidirectionalAudioAndVideo();
+ ASSERT_TRUE(ExpectNewFrames(media_expectations));
+}
+
+// With a max bundle policy and RTCP muxing, adding a new media description to
+// the connection should not affect ICE at all because the new media will use
+// the existing connection.
+// TODO(bugs.webrtc.org/12538): Fails on tsan.
+#if defined(THREAD_SANITIZER)
+#define MAYBE_AddMediaToConnectedBundleDoesNotRestartIce \
+ DISABLED_AddMediaToConnectedBundleDoesNotRestartIce
+#else
+#define MAYBE_AddMediaToConnectedBundleDoesNotRestartIce \
+ AddMediaToConnectedBundleDoesNotRestartIce
+#endif
+TEST_P(PeerConnectionIntegrationTest,
+ MAYBE_AddMediaToConnectedBundleDoesNotRestartIce) {
+ PeerConnectionInterface::RTCConfiguration config;
+ config.bundle_policy = PeerConnectionInterface::kBundlePolicyMaxBundle;
+ config.rtcp_mux_policy = PeerConnectionInterface::kRtcpMuxPolicyRequire;
+ ASSERT_TRUE(CreatePeerConnectionWrappersWithConfig(
+ config, PeerConnectionInterface::RTCConfiguration()));
+ ConnectFakeSignaling();
+
+ caller()->AddAudioTrack();
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ ASSERT_EQ_WAIT(PeerConnectionInterface::kIceConnectionCompleted,
+ caller()->ice_connection_state(), kDefaultTimeout);
+
+ caller()->clear_ice_connection_state_history();
+
+ caller()->AddVideoTrack();
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+
+ EXPECT_EQ(0u, caller()->ice_connection_state_history().size());
+}
+
+// This test sets up a call between two parties with audio and video. It then
+// renegotiates setting the video m-line to "port 0", then later renegotiates
+// again, enabling video.
+TEST_P(PeerConnectionIntegrationTest,
+ VideoFlowsAfterMediaSectionIsRejectedAndRecycled) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+
+ // Do initial negotiation, only sending media from the caller. Will result in
+ // video and audio recvonly "m=" sections.
+ caller()->AddAudioVideoTracks();
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+
+ // Negotiate again, disabling the video "m=" section (the callee will set the
+ // port to 0 due to offer_to_receive_video = 0).
+ if (sdp_semantics_ == SdpSemantics::kPlanB_DEPRECATED) {
+ PeerConnectionInterface::RTCOfferAnswerOptions options;
+ options.offer_to_receive_video = 0;
+ callee()->SetOfferAnswerOptions(options);
+ } else {
+ callee()->SetRemoteOfferHandler([this] {
+ callee()
+ ->GetFirstTransceiverOfType(cricket::MEDIA_TYPE_VIDEO)
+ ->StopInternal();
+ });
+ }
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ // Sanity check that video "m=" section was actually rejected.
+ const ContentInfo* answer_video_content = cricket::GetFirstVideoContent(
+ callee()->pc()->local_description()->description());
+ ASSERT_NE(nullptr, answer_video_content);
+ ASSERT_TRUE(answer_video_content->rejected);
+
+ // Enable video and do negotiation again, making sure video is received
+ // end-to-end, also adding media stream to callee.
+ if (sdp_semantics_ == SdpSemantics::kPlanB_DEPRECATED) {
+ PeerConnectionInterface::RTCOfferAnswerOptions options;
+ options.offer_to_receive_video = 1;
+ callee()->SetOfferAnswerOptions(options);
+ } else {
+ // The caller's transceiver is stopped, so we need to add another track.
+ auto caller_transceiver =
+ caller()->GetFirstTransceiverOfType(cricket::MEDIA_TYPE_VIDEO);
+ EXPECT_EQ(nullptr, caller_transceiver.get());
+ caller()->AddVideoTrack();
+ }
+ callee()->AddVideoTrack();
+ callee()->SetRemoteOfferHandler(nullptr);
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+
+ // Verify the caller receives frames from the newly added stream, and the
+ // callee receives additional frames from the re-enabled video m= section.
+ MediaExpectations media_expectations;
+ media_expectations.CalleeExpectsSomeAudio();
+ media_expectations.ExpectBidirectionalVideo();
+ ASSERT_TRUE(ExpectNewFrames(media_expectations));
+}
+
+// This tests that if we negotiate after calling CreateSender but before we
+// have a track, then set a track later, frames from the newly-set track are
+// received end-to-end.
+TEST_F(PeerConnectionIntegrationTestPlanB,
+ MediaFlowsAfterEarlyWarmupWithCreateSender) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ auto caller_audio_sender =
+ caller()->pc()->CreateSender("audio", "caller_stream");
+ auto caller_video_sender =
+ caller()->pc()->CreateSender("video", "caller_stream");
+ auto callee_audio_sender =
+ callee()->pc()->CreateSender("audio", "callee_stream");
+ auto callee_video_sender =
+ callee()->pc()->CreateSender("video", "callee_stream");
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kMaxWaitForActivationMs);
+ // Wait for ICE to complete, without any tracks being set.
+ EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceConnectionCompleted,
+ caller()->ice_connection_state(), kMaxWaitForFramesMs);
+ EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceConnectionConnected,
+ callee()->ice_connection_state(), kMaxWaitForFramesMs);
+ // Now set the tracks, and expect frames to immediately start flowing.
+ EXPECT_TRUE(
+ caller_audio_sender->SetTrack(caller()->CreateLocalAudioTrack().get()));
+ EXPECT_TRUE(
+ caller_video_sender->SetTrack(caller()->CreateLocalVideoTrack().get()));
+ EXPECT_TRUE(
+ callee_audio_sender->SetTrack(callee()->CreateLocalAudioTrack().get()));
+ EXPECT_TRUE(
+ callee_video_sender->SetTrack(callee()->CreateLocalVideoTrack().get()));
+ MediaExpectations media_expectations;
+ media_expectations.ExpectBidirectionalAudioAndVideo();
+ ASSERT_TRUE(ExpectNewFrames(media_expectations));
+}
+
+// This tests that if we negotiate after calling AddTransceiver but before we
+// have a track, then set a track later, frames from the newly-set tracks are
+// received end-to-end.
+TEST_F(PeerConnectionIntegrationTestUnifiedPlan,
+ MediaFlowsAfterEarlyWarmupWithAddTransceiver) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ auto audio_result = caller()->pc()->AddTransceiver(cricket::MEDIA_TYPE_AUDIO);
+ ASSERT_EQ(RTCErrorType::NONE, audio_result.error().type());
+ auto caller_audio_sender = audio_result.MoveValue()->sender();
+ auto video_result = caller()->pc()->AddTransceiver(cricket::MEDIA_TYPE_VIDEO);
+ ASSERT_EQ(RTCErrorType::NONE, video_result.error().type());
+ auto caller_video_sender = video_result.MoveValue()->sender();
+ callee()->SetRemoteOfferHandler([this] {
+ ASSERT_EQ(2u, callee()->pc()->GetTransceivers().size());
+ callee()->pc()->GetTransceivers()[0]->SetDirectionWithError(
+ RtpTransceiverDirection::kSendRecv);
+ callee()->pc()->GetTransceivers()[1]->SetDirectionWithError(
+ RtpTransceiverDirection::kSendRecv);
+ });
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kMaxWaitForActivationMs);
+ // Wait for ICE to complete, without any tracks being set.
+ EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceConnectionCompleted,
+ caller()->ice_connection_state(), kMaxWaitForFramesMs);
+ EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceConnectionConnected,
+ callee()->ice_connection_state(), kMaxWaitForFramesMs);
+ // Now set the tracks, and expect frames to immediately start flowing.
+ auto callee_audio_sender = callee()->pc()->GetSenders()[0];
+ auto callee_video_sender = callee()->pc()->GetSenders()[1];
+ ASSERT_TRUE(
+ caller_audio_sender->SetTrack(caller()->CreateLocalAudioTrack().get()));
+ ASSERT_TRUE(
+ caller_video_sender->SetTrack(caller()->CreateLocalVideoTrack().get()));
+ ASSERT_TRUE(
+ callee_audio_sender->SetTrack(callee()->CreateLocalAudioTrack().get()));
+ ASSERT_TRUE(
+ callee_video_sender->SetTrack(callee()->CreateLocalVideoTrack().get()));
+ MediaExpectations media_expectations;
+ media_expectations.ExpectBidirectionalAudioAndVideo();
+ ASSERT_TRUE(ExpectNewFrames(media_expectations));
+}
+
+// This test verifies that a remote video track can be added via AddStream,
+// and sent end-to-end. For this particular test, it's simply echoed back
+// from the caller to the callee, rather than being forwarded to a third
+// PeerConnection.
+TEST_F(PeerConnectionIntegrationTestPlanB, CanSendRemoteVideoTrack) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ // Just send a video track from the caller.
+ caller()->AddVideoTrack();
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kMaxWaitForActivationMs);
+ ASSERT_EQ(1U, callee()->remote_streams()->count());
+
+ // Echo the stream back, and do a new offer/anwer (initiated by callee this
+ // time).
+ callee()->pc()->AddStream(callee()->remote_streams()->at(0));
+ callee()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kMaxWaitForActivationMs);
+
+ MediaExpectations media_expectations;
+ media_expectations.ExpectBidirectionalVideo();
+ ASSERT_TRUE(ExpectNewFrames(media_expectations));
+}
+
+#if !defined(THREAD_SANITIZER)
+// This test provokes TSAN errors. bugs.webrtc.org/11282
+
+// Test that we achieve the expected end-to-end connection time, using a
+// fake clock and simulated latency on the media and signaling paths.
+// We use a TURN<->TURN connection because this is usually the quickest to
+// set up initially, especially when we're confident the connection will work
+// and can start sending media before we get a STUN response.
+//
+// With various optimizations enabled, here are the network delays we expect to
+// be on the critical path:
+// 1. 2 signaling trips: Signaling offer and offerer's TURN candidate, then
+// signaling answer (with DTLS fingerprint).
+// 2. 9 media hops: Rest of the DTLS handshake. 3 hops in each direction when
+// using TURN<->TURN pair, and DTLS exchange is 4 packets,
+// the first of which should have arrived before the answer.
+TEST_P(PeerConnectionIntegrationTestWithFakeClock,
+ EndToEndConnectionTimeWithTurnTurnPair) {
+ static constexpr int media_hop_delay_ms = 50;
+ static constexpr int signaling_trip_delay_ms = 500;
+ // For explanation of these values, see comment above.
+ static constexpr int required_media_hops = 9;
+ static constexpr int required_signaling_trips = 2;
+ // For internal delays (such as posting an event asychronously).
+ static constexpr int allowed_internal_delay_ms = 20;
+ static constexpr int total_connection_time_ms =
+ media_hop_delay_ms * required_media_hops +
+ signaling_trip_delay_ms * required_signaling_trips +
+ allowed_internal_delay_ms;
+
+ static const rtc::SocketAddress turn_server_1_internal_address{"88.88.88.0",
+ 3478};
+ static const rtc::SocketAddress turn_server_1_external_address{"88.88.88.1",
+ 0};
+ static const rtc::SocketAddress turn_server_2_internal_address{"99.99.99.0",
+ 3478};
+ static const rtc::SocketAddress turn_server_2_external_address{"99.99.99.1",
+ 0};
+ cricket::TestTurnServer* turn_server_1 = CreateTurnServer(
+ turn_server_1_internal_address, turn_server_1_external_address);
+
+ cricket::TestTurnServer* turn_server_2 = CreateTurnServer(
+ turn_server_2_internal_address, turn_server_2_external_address);
+ // Bypass permission check on received packets so media can be sent before
+ // the candidate is signaled.
+ SendTask(network_thread(), [turn_server_1] {
+ turn_server_1->set_enable_permission_checks(false);
+ });
+ SendTask(network_thread(), [turn_server_2] {
+ turn_server_2->set_enable_permission_checks(false);
+ });
+
+ PeerConnectionInterface::RTCConfiguration client_1_config;
+ webrtc::PeerConnectionInterface::IceServer ice_server_1;
+ ice_server_1.urls.push_back("turn:88.88.88.0:3478");
+ ice_server_1.username = "test";
+ ice_server_1.password = "test";
+ client_1_config.servers.push_back(ice_server_1);
+ client_1_config.type = webrtc::PeerConnectionInterface::kRelay;
+ client_1_config.presume_writable_when_fully_relayed = true;
+
+ PeerConnectionInterface::RTCConfiguration client_2_config;
+ webrtc::PeerConnectionInterface::IceServer ice_server_2;
+ ice_server_2.urls.push_back("turn:99.99.99.0:3478");
+ ice_server_2.username = "test";
+ ice_server_2.password = "test";
+ client_2_config.servers.push_back(ice_server_2);
+ client_2_config.type = webrtc::PeerConnectionInterface::kRelay;
+ client_2_config.presume_writable_when_fully_relayed = true;
+
+ ASSERT_TRUE(
+ CreatePeerConnectionWrappersWithConfig(client_1_config, client_2_config));
+ // Set up the simulated delays.
+ SetSignalingDelayMs(signaling_trip_delay_ms);
+ ConnectFakeSignaling();
+ virtual_socket_server()->set_delay_mean(media_hop_delay_ms);
+ virtual_socket_server()->UpdateDelayDistribution();
+
+ // Set "offer to receive audio/video" without adding any tracks, so we just
+ // set up ICE/DTLS with no media.
+ PeerConnectionInterface::RTCOfferAnswerOptions options;
+ options.offer_to_receive_audio = 1;
+ options.offer_to_receive_video = 1;
+ caller()->SetOfferAnswerOptions(options);
+ caller()->CreateAndSetAndSignalOffer();
+ EXPECT_TRUE_SIMULATED_WAIT(DtlsConnected(), total_connection_time_ms,
+ FakeClock());
+ // Closing the PeerConnections destroys the ports before the ScopedFakeClock.
+ // If this is not done a DCHECK can be hit in ports.cc, because a large
+ // negative number is calculated for the rtt due to the global clock changing.
+ ClosePeerConnections();
+}
+
+TEST_P(PeerConnectionIntegrationTestWithFakeClock,
+ OnIceCandidateFlushesGetStatsCache) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ caller()->AddAudioTrack();
+
+ // Call getStats, assert there are no candidates.
+ rtc::scoped_refptr<const webrtc::RTCStatsReport> first_report =
+ caller()->NewGetStats();
+ ASSERT_TRUE(first_report);
+ auto first_candidate_stats =
+ first_report->GetStatsOfType<webrtc::RTCLocalIceCandidateStats>();
+ ASSERT_EQ(first_candidate_stats.size(), 0u);
+
+ // Create an offer at the caller and set it as remote description on the
+ // callee.
+ caller()->CreateAndSetAndSignalOffer();
+ // Call getStats again, assert there are candidates now.
+ rtc::scoped_refptr<const webrtc::RTCStatsReport> second_report =
+ caller()->NewGetStats();
+ ASSERT_TRUE(second_report);
+ auto second_candidate_stats =
+ second_report->GetStatsOfType<webrtc::RTCLocalIceCandidateStats>();
+ ASSERT_NE(second_candidate_stats.size(), 0u);
+
+ // The fake clock ensures that no time has passed so the cache must have been
+ // explicitly invalidated.
+ EXPECT_EQ(first_report->timestamp(), second_report->timestamp());
+}
+
+TEST_P(PeerConnectionIntegrationTestWithFakeClock,
+ AddIceCandidateFlushesGetStatsCache) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignalingForSdpOnly();
+ caller()->AddAudioTrack();
+
+ // Start candidate gathering and wait for it to complete. Candidates are not
+ // signalled.
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_SIMULATED_WAIT(caller()->IceGatheringStateComplete(),
+ kDefaultTimeout, FakeClock());
+
+ // Call getStats, assert there are no candidates.
+ rtc::scoped_refptr<const webrtc::RTCStatsReport> first_report =
+ caller()->NewGetStats();
+ ASSERT_TRUE(first_report);
+ auto first_candidate_stats =
+ first_report->GetStatsOfType<webrtc::RTCRemoteIceCandidateStats>();
+ ASSERT_EQ(first_candidate_stats.size(), 0u);
+
+ // Add a "fake" candidate.
+ absl::optional<RTCError> result;
+ caller()->pc()->AddIceCandidate(
+ absl::WrapUnique(webrtc::CreateIceCandidate(
+ "", 0,
+ "candidate:2214029314 1 udp 2122260223 127.0.0.1 49152 typ host",
+ nullptr)),
+ [&result](RTCError r) { result = r; });
+ ASSERT_TRUE_WAIT(result.has_value(), kDefaultTimeout);
+ ASSERT_TRUE(result.value().ok());
+
+ // Call getStats again, assert there is a remote candidate now.
+ rtc::scoped_refptr<const webrtc::RTCStatsReport> second_report =
+ caller()->NewGetStats();
+ ASSERT_TRUE(second_report);
+ auto second_candidate_stats =
+ second_report->GetStatsOfType<webrtc::RTCRemoteIceCandidateStats>();
+ ASSERT_EQ(second_candidate_stats.size(), 1u);
+
+ // The fake clock ensures that no time has passed so the cache must have been
+ // explicitly invalidated.
+ EXPECT_EQ(first_report->timestamp(), second_report->timestamp());
+}
+
+#endif // !defined(THREAD_SANITIZER)
+
+// Verify that a TurnCustomizer passed in through RTCConfiguration
+// is actually used by the underlying TURN candidate pair.
+// Note that turnport_unittest.cc contains more detailed, lower-level tests.
+TEST_P(PeerConnectionIntegrationTest, TurnCustomizerUsedForTurnConnections) {
+ static const rtc::SocketAddress turn_server_1_internal_address{"88.88.88.0",
+ 3478};
+ static const rtc::SocketAddress turn_server_1_external_address{"88.88.88.1",
+ 0};
+ static const rtc::SocketAddress turn_server_2_internal_address{"99.99.99.0",
+ 3478};
+ static const rtc::SocketAddress turn_server_2_external_address{"99.99.99.1",
+ 0};
+ CreateTurnServer(turn_server_1_internal_address,
+ turn_server_1_external_address);
+ CreateTurnServer(turn_server_2_internal_address,
+ turn_server_2_external_address);
+
+ PeerConnectionInterface::RTCConfiguration client_1_config;
+ webrtc::PeerConnectionInterface::IceServer ice_server_1;
+ ice_server_1.urls.push_back("turn:88.88.88.0:3478");
+ ice_server_1.username = "test";
+ ice_server_1.password = "test";
+ client_1_config.servers.push_back(ice_server_1);
+ client_1_config.type = webrtc::PeerConnectionInterface::kRelay;
+ auto* customizer1 = CreateTurnCustomizer();
+ client_1_config.turn_customizer = customizer1;
+
+ PeerConnectionInterface::RTCConfiguration client_2_config;
+ webrtc::PeerConnectionInterface::IceServer ice_server_2;
+ ice_server_2.urls.push_back("turn:99.99.99.0:3478");
+ ice_server_2.username = "test";
+ ice_server_2.password = "test";
+ client_2_config.servers.push_back(ice_server_2);
+ client_2_config.type = webrtc::PeerConnectionInterface::kRelay;
+ auto* customizer2 = CreateTurnCustomizer();
+ client_2_config.turn_customizer = customizer2;
+
+ ASSERT_TRUE(
+ CreatePeerConnectionWrappersWithConfig(client_1_config, client_2_config));
+ ConnectFakeSignaling();
+
+ // Set "offer to receive audio/video" without adding any tracks, so we just
+ // set up ICE/DTLS with no media.
+ PeerConnectionInterface::RTCOfferAnswerOptions options;
+ options.offer_to_receive_audio = 1;
+ options.offer_to_receive_video = 1;
+ caller()->SetOfferAnswerOptions(options);
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(DtlsConnected(), kDefaultTimeout);
+
+ ExpectTurnCustomizerCountersIncremented(customizer1);
+ ExpectTurnCustomizerCountersIncremented(customizer2);
+}
+
+// Verifies that you can use TCP instead of UDP to connect to a TURN server and
+// send media between the caller and the callee.
+TEST_P(PeerConnectionIntegrationTest, TCPUsedForTurnConnections) {
+ static const rtc::SocketAddress turn_server_internal_address{"88.88.88.0",
+ 3478};
+ static const rtc::SocketAddress turn_server_external_address{"88.88.88.1", 0};
+
+ // Enable TCP for the fake turn server.
+ CreateTurnServer(turn_server_internal_address, turn_server_external_address,
+ cricket::PROTO_TCP);
+
+ webrtc::PeerConnectionInterface::IceServer ice_server;
+ ice_server.urls.push_back("turn:88.88.88.0:3478?transport=tcp");
+ ice_server.username = "test";
+ ice_server.password = "test";
+
+ PeerConnectionInterface::RTCConfiguration client_1_config;
+ client_1_config.servers.push_back(ice_server);
+ client_1_config.type = webrtc::PeerConnectionInterface::kRelay;
+
+ PeerConnectionInterface::RTCConfiguration client_2_config;
+ client_2_config.servers.push_back(ice_server);
+ client_2_config.type = webrtc::PeerConnectionInterface::kRelay;
+
+ ASSERT_TRUE(
+ CreatePeerConnectionWrappersWithConfig(client_1_config, client_2_config));
+
+ // Do normal offer/answer and wait for ICE to complete.
+ ConnectFakeSignaling();
+ caller()->AddAudioVideoTracks();
+ callee()->AddAudioVideoTracks();
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceConnectionConnected,
+ callee()->ice_connection_state(), kMaxWaitForFramesMs);
+
+ MediaExpectations media_expectations;
+ media_expectations.ExpectBidirectionalAudioAndVideo();
+ EXPECT_TRUE(ExpectNewFrames(media_expectations));
+}
+
+// Verify that a SSLCertificateVerifier passed in through
+// PeerConnectionDependencies is actually used by the underlying SSL
+// implementation to determine whether a certificate presented by the TURN
+// server is accepted by the client. Note that openssladapter_unittest.cc
+// contains more detailed, lower-level tests.
+TEST_P(PeerConnectionIntegrationTest,
+ SSLCertificateVerifierUsedForTurnConnections) {
+ static const rtc::SocketAddress turn_server_internal_address{"88.88.88.0",
+ 3478};
+ static const rtc::SocketAddress turn_server_external_address{"88.88.88.1", 0};
+
+ // Enable TCP-TLS for the fake turn server. We need to pass in 88.88.88.0 so
+ // that host name verification passes on the fake certificate.
+ CreateTurnServer(turn_server_internal_address, turn_server_external_address,
+ cricket::PROTO_TLS, "88.88.88.0");
+
+ webrtc::PeerConnectionInterface::IceServer ice_server;
+ ice_server.urls.push_back("turns:88.88.88.0:3478?transport=tcp");
+ ice_server.username = "test";
+ ice_server.password = "test";
+
+ PeerConnectionInterface::RTCConfiguration client_1_config;
+ client_1_config.servers.push_back(ice_server);
+ client_1_config.type = webrtc::PeerConnectionInterface::kRelay;
+
+ PeerConnectionInterface::RTCConfiguration client_2_config;
+ client_2_config.servers.push_back(ice_server);
+ // Setting the type to kRelay forces the connection to go through a TURN
+ // server.
+ client_2_config.type = webrtc::PeerConnectionInterface::kRelay;
+
+ // Get a copy to the pointer so we can verify calls later.
+ rtc::TestCertificateVerifier* client_1_cert_verifier =
+ new rtc::TestCertificateVerifier();
+ client_1_cert_verifier->verify_certificate_ = true;
+ rtc::TestCertificateVerifier* client_2_cert_verifier =
+ new rtc::TestCertificateVerifier();
+ client_2_cert_verifier->verify_certificate_ = true;
+
+ // Create the dependencies with the test certificate verifier.
+ webrtc::PeerConnectionDependencies client_1_deps(nullptr);
+ client_1_deps.tls_cert_verifier =
+ std::unique_ptr<rtc::TestCertificateVerifier>(client_1_cert_verifier);
+ webrtc::PeerConnectionDependencies client_2_deps(nullptr);
+ client_2_deps.tls_cert_verifier =
+ std::unique_ptr<rtc::TestCertificateVerifier>(client_2_cert_verifier);
+
+ ASSERT_TRUE(CreatePeerConnectionWrappersWithConfigAndDeps(
+ client_1_config, std::move(client_1_deps), client_2_config,
+ std::move(client_2_deps)));
+ ConnectFakeSignaling();
+
+ // Set "offer to receive audio/video" without adding any tracks, so we just
+ // set up ICE/DTLS with no media.
+ PeerConnectionInterface::RTCOfferAnswerOptions options;
+ options.offer_to_receive_audio = 1;
+ options.offer_to_receive_video = 1;
+ caller()->SetOfferAnswerOptions(options);
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(DtlsConnected(), kDefaultTimeout);
+
+ EXPECT_GT(client_1_cert_verifier->call_count_, 0u);
+ EXPECT_GT(client_2_cert_verifier->call_count_, 0u);
+}
+
+// Test that the injected ICE transport factory is used to create ICE transports
+// for WebRTC connections.
+TEST_P(PeerConnectionIntegrationTest, IceTransportFactoryUsedForConnections) {
+ PeerConnectionInterface::RTCConfiguration default_config;
+ PeerConnectionDependencies dependencies(nullptr);
+ auto ice_transport_factory = std::make_unique<MockIceTransportFactory>();
+ EXPECT_CALL(*ice_transport_factory, RecordIceTransportCreated()).Times(1);
+ dependencies.ice_transport_factory = std::move(ice_transport_factory);
+ auto wrapper = CreatePeerConnectionWrapper("Caller", nullptr, &default_config,
+ std::move(dependencies), nullptr,
+ /*reset_encoder_factory=*/false,
+ /*reset_decoder_factory=*/false);
+ ASSERT_TRUE(wrapper);
+ wrapper->CreateDataChannel();
+ auto observer = rtc::make_ref_counted<MockSetSessionDescriptionObserver>();
+ wrapper->pc()->SetLocalDescription(observer.get(),
+ wrapper->CreateOfferAndWait().release());
+}
+
+// Test that audio and video flow end-to-end when codec names don't use the
+// expected casing, given that they're supposed to be case insensitive. To test
+// this, all but one codec is removed from each media description, and its
+// casing is changed.
+//
+// In the past, this has regressed and caused crashes/black video, due to the
+// fact that code at some layers was doing case-insensitive comparisons and
+// code at other layers was not.
+TEST_P(PeerConnectionIntegrationTest, CodecNamesAreCaseInsensitive) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ caller()->AddAudioVideoTracks();
+ callee()->AddAudioVideoTracks();
+
+ // Remove all but one audio/video codec (opus and VP8), and change the
+ // casing of the caller's generated offer.
+ caller()->SetGeneratedSdpMunger([](cricket::SessionDescription* description) {
+ cricket::AudioContentDescription* audio =
+ GetFirstAudioContentDescription(description);
+ ASSERT_NE(nullptr, audio);
+ auto audio_codecs = audio->codecs();
+ audio_codecs.erase(std::remove_if(audio_codecs.begin(), audio_codecs.end(),
+ [](const cricket::AudioCodec& codec) {
+ return codec.name != "opus";
+ }),
+ audio_codecs.end());
+ ASSERT_EQ(1u, audio_codecs.size());
+ audio_codecs[0].name = "OpUs";
+ audio->set_codecs(audio_codecs);
+
+ cricket::VideoContentDescription* video =
+ GetFirstVideoContentDescription(description);
+ ASSERT_NE(nullptr, video);
+ auto video_codecs = video->codecs();
+ video_codecs.erase(std::remove_if(video_codecs.begin(), video_codecs.end(),
+ [](const cricket::VideoCodec& codec) {
+ return codec.name != "VP8";
+ }),
+ video_codecs.end());
+ ASSERT_EQ(1u, video_codecs.size());
+ video_codecs[0].name = "vP8";
+ video->set_codecs(video_codecs);
+ });
+
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+
+ // Verify frames are still received end-to-end.
+ MediaExpectations media_expectations;
+ media_expectations.ExpectBidirectionalAudioAndVideo();
+ ASSERT_TRUE(ExpectNewFrames(media_expectations));
+}
+
+TEST_P(PeerConnectionIntegrationTest, GetSourcesAudio) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ caller()->AddAudioTrack();
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ // Wait for one audio frame to be received by the callee.
+ MediaExpectations media_expectations;
+ media_expectations.CalleeExpectsSomeAudio(1);
+ ASSERT_TRUE(ExpectNewFrames(media_expectations));
+ ASSERT_EQ(callee()->pc()->GetReceivers().size(), 1u);
+ auto receiver = callee()->pc()->GetReceivers()[0];
+ ASSERT_EQ(receiver->media_type(), cricket::MEDIA_TYPE_AUDIO);
+ auto sources = receiver->GetSources();
+ ASSERT_GT(receiver->GetParameters().encodings.size(), 0u);
+ EXPECT_EQ(receiver->GetParameters().encodings[0].ssrc,
+ sources[0].source_id());
+ EXPECT_EQ(webrtc::RtpSourceType::SSRC, sources[0].source_type());
+}
+
+TEST_P(PeerConnectionIntegrationTest, GetSourcesVideo) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ caller()->AddVideoTrack();
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ // Wait for one video frame to be received by the callee.
+ MediaExpectations media_expectations;
+ media_expectations.CalleeExpectsSomeVideo(1);
+ ASSERT_TRUE(ExpectNewFrames(media_expectations));
+ ASSERT_EQ(callee()->pc()->GetReceivers().size(), 1u);
+ auto receiver = callee()->pc()->GetReceivers()[0];
+ ASSERT_EQ(receiver->media_type(), cricket::MEDIA_TYPE_VIDEO);
+ auto sources = receiver->GetSources();
+ ASSERT_GT(receiver->GetParameters().encodings.size(), 0u);
+ ASSERT_GT(sources.size(), 0u);
+ EXPECT_EQ(receiver->GetParameters().encodings[0].ssrc,
+ sources[0].source_id());
+ EXPECT_EQ(webrtc::RtpSourceType::SSRC, sources[0].source_type());
+}
+
+TEST_P(PeerConnectionIntegrationTest, UnsignaledSsrcGetSourcesAudio) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ caller()->AddAudioTrack();
+ callee()->SetReceivedSdpMunger(RemoveSsrcsAndMsids);
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ ASSERT_EQ(callee()->pc()->GetReceivers().size(), 1u);
+ auto receiver = callee()->pc()->GetReceivers()[0];
+ std::vector<RtpSource> sources;
+ EXPECT_TRUE_WAIT(([&receiver, &sources]() {
+ sources = receiver->GetSources();
+ return !sources.empty();
+ })(),
+ kDefaultTimeout);
+ ASSERT_GT(sources.size(), 0u);
+ EXPECT_EQ(webrtc::RtpSourceType::SSRC, sources[0].source_type());
+}
+
+TEST_P(PeerConnectionIntegrationTest, UnsignaledSsrcGetSourcesVideo) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ caller()->AddVideoTrack();
+ callee()->SetReceivedSdpMunger(RemoveSsrcsAndMsids);
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ ASSERT_EQ(callee()->pc()->GetReceivers().size(), 1u);
+ auto receiver = callee()->pc()->GetReceivers()[0];
+ std::vector<RtpSource> sources;
+ EXPECT_TRUE_WAIT(([&receiver, &sources]() {
+ sources = receiver->GetSources();
+ return !sources.empty();
+ })(),
+ kDefaultTimeout);
+ ASSERT_GT(sources.size(), 0u);
+ EXPECT_EQ(webrtc::RtpSourceType::SSRC, sources[0].source_type());
+}
+
+// Similar to the above test, except instead of waiting until GetSources() is
+// non-empty we wait until media is flowing and then assert that GetSources()
+// is not empty. This provides test coverage for https://crbug.com/webrtc/14817
+// where a race due to the re-creationg of the unsignaled ssrc stream would
+// clear the GetSources() history. This test not flaking confirms the bug fix.
+TEST_P(PeerConnectionIntegrationTest,
+ UnsignaledSsrcGetSourcesNonEmptyIfMediaFlowing) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ caller()->AddVideoTrack();
+ callee()->SetReceivedSdpMunger(RemoveSsrcsAndMsids);
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ // Wait for one video frame to be received by the callee.
+ MediaExpectations media_expectations;
+ media_expectations.CalleeExpectsSomeVideo(1);
+ ASSERT_TRUE(ExpectNewFrames(media_expectations));
+ ASSERT_EQ(callee()->pc()->GetReceivers().size(), 1u);
+ auto receiver = callee()->pc()->GetReceivers()[0];
+ std::vector<RtpSource> sources = receiver->GetSources();
+ // SSRC history must not be cleared since the reception of the first frame.
+ ASSERT_GT(sources.size(), 0u);
+ EXPECT_EQ(webrtc::RtpSourceType::SSRC, sources[0].source_type());
+}
+
+TEST_P(PeerConnectionIntegrationTest, UnsignaledSsrcGetParametersAudio) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ caller()->AddAudioTrack();
+ callee()->SetReceivedSdpMunger(RemoveSsrcsAndMsids);
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ ASSERT_EQ(callee()->pc()->GetReceivers().size(), 1u);
+ auto receiver = callee()->pc()->GetReceivers()[0];
+ RtpParameters parameters;
+ EXPECT_TRUE_WAIT(([&receiver, &parameters]() {
+ parameters = receiver->GetParameters();
+ return !parameters.encodings.empty() &&
+ parameters.encodings[0].ssrc.has_value();
+ })(),
+ kDefaultTimeout);
+ ASSERT_EQ(parameters.encodings.size(), 1u);
+ EXPECT_TRUE(parameters.encodings[0].ssrc.has_value());
+}
+
+TEST_P(PeerConnectionIntegrationTest, UnsignaledSsrcGetParametersVideo) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ caller()->AddVideoTrack();
+ callee()->SetReceivedSdpMunger(RemoveSsrcsAndMsids);
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ ASSERT_EQ(callee()->pc()->GetReceivers().size(), 1u);
+ auto receiver = callee()->pc()->GetReceivers()[0];
+ RtpParameters parameters;
+ EXPECT_TRUE_WAIT(([&receiver, &parameters]() {
+ parameters = receiver->GetParameters();
+ return !parameters.encodings.empty() &&
+ parameters.encodings[0].ssrc.has_value();
+ })(),
+ kDefaultTimeout);
+ ASSERT_EQ(parameters.encodings.size(), 1u);
+ EXPECT_TRUE(parameters.encodings[0].ssrc.has_value());
+}
+
+TEST_P(PeerConnectionIntegrationTest,
+ GetParametersHasEncodingsBeforeNegotiation) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ auto sender = caller()->AddTrack(caller()->CreateLocalVideoTrack());
+ auto parameters = sender->GetParameters();
+ EXPECT_EQ(parameters.encodings.size(), 1u);
+}
+
+// Test that if a track is removed and added again with a different stream ID,
+// the new stream ID is successfully communicated in SDP and media continues to
+// flow end-to-end.
+// TODO(webrtc.bugs.org/8734): This test does not work for Unified Plan because
+// it will not reuse a transceiver that has already been sending. After creating
+// a new transceiver it tries to create an offer with two senders of the same
+// track ids and it fails.
+TEST_F(PeerConnectionIntegrationTestPlanB, RemoveAndAddTrackWithNewStreamId) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+
+ // Add track using stream 1, do offer/answer.
+ rtc::scoped_refptr<webrtc::AudioTrackInterface> track =
+ caller()->CreateLocalAudioTrack();
+ rtc::scoped_refptr<webrtc::RtpSenderInterface> sender =
+ caller()->AddTrack(track, {"stream_1"});
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ {
+ MediaExpectations media_expectations;
+ media_expectations.CalleeExpectsSomeAudio(1);
+ ASSERT_TRUE(ExpectNewFrames(media_expectations));
+ }
+ // Remove the sender, and create a new one with the new stream.
+ caller()->pc()->RemoveTrackOrError(sender);
+ sender = caller()->AddTrack(track, {"stream_2"});
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ // Wait for additional audio frames to be received by the callee.
+ {
+ MediaExpectations media_expectations;
+ media_expectations.CalleeExpectsSomeAudio();
+ ASSERT_TRUE(ExpectNewFrames(media_expectations));
+ }
+}
+
+TEST_P(PeerConnectionIntegrationTest, RtcEventLogOutputWriteCalled) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+
+ auto output = std::make_unique<testing::NiceMock<MockRtcEventLogOutput>>();
+ ON_CALL(*output, IsActive()).WillByDefault(::testing::Return(true));
+ ON_CALL(*output, Write(::testing::A<absl::string_view>()))
+ .WillByDefault(::testing::Return(true));
+ EXPECT_CALL(*output, Write(::testing::A<absl::string_view>()))
+ .Times(::testing::AtLeast(1));
+ EXPECT_TRUE(caller()->pc()->StartRtcEventLog(
+ std::move(output), webrtc::RtcEventLog::kImmediateOutput));
+
+ caller()->AddAudioVideoTracks();
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+}
+
+// Test that if candidates are only signaled by applying full session
+// descriptions (instead of using AddIceCandidate), the peers can connect to
+// each other and exchange media.
+TEST_P(PeerConnectionIntegrationTest, MediaFlowsWhenCandidatesSetOnlyInSdp) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ // Each side will signal the session descriptions but not candidates.
+ ConnectFakeSignalingForSdpOnly();
+
+ // Add audio video track and exchange the initial offer/answer with media
+ // information only. This will start ICE gathering on each side.
+ caller()->AddAudioVideoTracks();
+ callee()->AddAudioVideoTracks();
+ caller()->CreateAndSetAndSignalOffer();
+
+ // Wait for all candidates to be gathered on both the caller and callee.
+ ASSERT_EQ_WAIT(PeerConnectionInterface::kIceGatheringComplete,
+ caller()->ice_gathering_state(), kDefaultTimeout);
+ ASSERT_EQ_WAIT(PeerConnectionInterface::kIceGatheringComplete,
+ callee()->ice_gathering_state(), kDefaultTimeout);
+
+ // The candidates will now be included in the session description, so
+ // signaling them will start the ICE connection.
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+
+ // Ensure that media flows in both directions.
+ MediaExpectations media_expectations;
+ media_expectations.ExpectBidirectionalAudioAndVideo();
+ ASSERT_TRUE(ExpectNewFrames(media_expectations));
+}
+
+#if !defined(THREAD_SANITIZER)
+// These tests provokes TSAN errors. See bugs.webrtc.org/11305.
+
+// Test that SetAudioPlayout can be used to disable audio playout from the
+// start, then later enable it. This may be useful, for example, if the caller
+// needs to play a local ringtone until some event occurs, after which it
+// switches to playing the received audio.
+TEST_P(PeerConnectionIntegrationTest, DisableAndEnableAudioPlayout) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+
+ // Set up audio-only call where audio playout is disabled on caller's side.
+ caller()->pc()->SetAudioPlayout(false);
+ caller()->AddAudioTrack();
+ callee()->AddAudioTrack();
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+
+ // Pump messages for a second.
+ WAIT(false, 1000);
+ // Since audio playout is disabled, the caller shouldn't have received
+ // anything (at the playout level, at least).
+ EXPECT_EQ(0, caller()->audio_frames_received());
+ // As a sanity check, make sure the callee (for which playout isn't disabled)
+ // did still see frames on its audio level.
+ ASSERT_GT(callee()->audio_frames_received(), 0);
+
+ // Enable playout again, and ensure audio starts flowing.
+ caller()->pc()->SetAudioPlayout(true);
+ MediaExpectations media_expectations;
+ media_expectations.ExpectBidirectionalAudio();
+ ASSERT_TRUE(ExpectNewFrames(media_expectations));
+}
+
+double GetAudioEnergyStat(PeerConnectionIntegrationWrapper* pc) {
+ auto report = pc->NewGetStats();
+ auto inbound_rtps =
+ report->GetStatsOfType<webrtc::RTCInboundRtpStreamStats>();
+ RTC_CHECK(!inbound_rtps.empty());
+ auto* inbound_rtp = inbound_rtps[0];
+ if (!inbound_rtp->total_audio_energy.is_defined()) {
+ return 0.0;
+ }
+ return *inbound_rtp->total_audio_energy;
+}
+
+// Test that if audio playout is disabled via the SetAudioPlayout() method, then
+// incoming audio is still processed and statistics are generated.
+TEST_P(PeerConnectionIntegrationTest,
+ DisableAudioPlayoutStillGeneratesAudioStats) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+
+ // Set up audio-only call where playout is disabled but audio-processing is
+ // still active.
+ caller()->AddAudioTrack();
+ callee()->AddAudioTrack();
+ caller()->pc()->SetAudioPlayout(false);
+
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+
+ // Wait for the callee to receive audio stats.
+ EXPECT_TRUE_WAIT(GetAudioEnergyStat(caller()) > 0, kMaxWaitForFramesMs);
+}
+
+#endif // !defined(THREAD_SANITIZER)
+
+// Test that SetAudioRecording can be used to disable audio recording from the
+// start, then later enable it. This may be useful, for example, if the caller
+// wants to ensure that no audio resources are active before a certain state
+// is reached.
+TEST_P(PeerConnectionIntegrationTest, DisableAndEnableAudioRecording) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+
+ // Set up audio-only call where audio recording is disabled on caller's side.
+ caller()->pc()->SetAudioRecording(false);
+ caller()->AddAudioTrack();
+ callee()->AddAudioTrack();
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+
+ // Pump messages for a second.
+ WAIT(false, 1000);
+ // Since caller has disabled audio recording, the callee shouldn't have
+ // received anything.
+ EXPECT_EQ(0, callee()->audio_frames_received());
+ // As a sanity check, make sure the caller did still see frames on its
+ // audio level since audio recording is enabled on the calle side.
+ ASSERT_GT(caller()->audio_frames_received(), 0);
+
+ // Enable audio recording again, and ensure audio starts flowing.
+ caller()->pc()->SetAudioRecording(true);
+ MediaExpectations media_expectations;
+ media_expectations.ExpectBidirectionalAudio();
+ ASSERT_TRUE(ExpectNewFrames(media_expectations));
+}
+
+TEST_P(PeerConnectionIntegrationTest,
+ IceEventsGeneratedAndLoggedInRtcEventLog) {
+ ASSERT_TRUE(CreatePeerConnectionWrappersWithFakeRtcEventLog());
+ ConnectFakeSignaling();
+ PeerConnectionInterface::RTCOfferAnswerOptions options;
+ options.offer_to_receive_audio = 1;
+ caller()->SetOfferAnswerOptions(options);
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(DtlsConnected(), kDefaultTimeout);
+ ASSERT_NE(nullptr, caller()->event_log_factory());
+ ASSERT_NE(nullptr, callee()->event_log_factory());
+ webrtc::FakeRtcEventLog* caller_event_log =
+ caller()->event_log_factory()->last_log_created();
+ webrtc::FakeRtcEventLog* callee_event_log =
+ callee()->event_log_factory()->last_log_created();
+ ASSERT_NE(nullptr, caller_event_log);
+ ASSERT_NE(nullptr, callee_event_log);
+ int caller_ice_config_count = caller_event_log->GetEventCount(
+ webrtc::RtcEvent::Type::IceCandidatePairConfig);
+ int caller_ice_event_count = caller_event_log->GetEventCount(
+ webrtc::RtcEvent::Type::IceCandidatePairEvent);
+ int callee_ice_config_count = callee_event_log->GetEventCount(
+ webrtc::RtcEvent::Type::IceCandidatePairConfig);
+ int callee_ice_event_count = callee_event_log->GetEventCount(
+ webrtc::RtcEvent::Type::IceCandidatePairEvent);
+ EXPECT_LT(0, caller_ice_config_count);
+ EXPECT_LT(0, caller_ice_event_count);
+ EXPECT_LT(0, callee_ice_config_count);
+ EXPECT_LT(0, callee_ice_event_count);
+}
+
+TEST_P(PeerConnectionIntegrationTest, RegatherAfterChangingIceTransportType) {
+ static const rtc::SocketAddress turn_server_internal_address{"88.88.88.0",
+ 3478};
+ static const rtc::SocketAddress turn_server_external_address{"88.88.88.1", 0};
+
+ CreateTurnServer(turn_server_internal_address, turn_server_external_address);
+
+ webrtc::PeerConnectionInterface::IceServer ice_server;
+ ice_server.urls.push_back("turn:88.88.88.0:3478");
+ ice_server.username = "test";
+ ice_server.password = "test";
+
+ PeerConnectionInterface::RTCConfiguration caller_config;
+ caller_config.servers.push_back(ice_server);
+ caller_config.type = webrtc::PeerConnectionInterface::kRelay;
+ caller_config.continual_gathering_policy = PeerConnection::GATHER_CONTINUALLY;
+ caller_config.surface_ice_candidates_on_ice_transport_type_changed = true;
+
+ PeerConnectionInterface::RTCConfiguration callee_config;
+ callee_config.servers.push_back(ice_server);
+ callee_config.type = webrtc::PeerConnectionInterface::kRelay;
+ callee_config.continual_gathering_policy = PeerConnection::GATHER_CONTINUALLY;
+ callee_config.surface_ice_candidates_on_ice_transport_type_changed = true;
+
+ ASSERT_TRUE(
+ CreatePeerConnectionWrappersWithConfig(caller_config, callee_config));
+
+ // Do normal offer/answer and wait for ICE to complete.
+ ConnectFakeSignaling();
+ caller()->AddAudioVideoTracks();
+ callee()->AddAudioVideoTracks();
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ // Since we are doing continual gathering, the ICE transport does not reach
+ // kIceGatheringComplete (see
+ // P2PTransportChannel::OnCandidatesAllocationDone), and consequently not
+ // kIceConnectionComplete.
+ EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceConnectionConnected,
+ caller()->ice_connection_state(), kDefaultTimeout);
+ EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceConnectionConnected,
+ callee()->ice_connection_state(), kDefaultTimeout);
+ // Note that we cannot use the metric
+ // `WebRTC.PeerConnection.CandidatePairType_UDP` in this test since this
+ // metric is only populated when we reach kIceConnectionComplete in the
+ // current implementation.
+ EXPECT_EQ(cricket::RELAY_PORT_TYPE,
+ caller()->last_candidate_gathered().type());
+ EXPECT_EQ(cricket::RELAY_PORT_TYPE,
+ callee()->last_candidate_gathered().type());
+
+ // Loosen the caller's candidate filter.
+ caller_config = caller()->pc()->GetConfiguration();
+ caller_config.type = webrtc::PeerConnectionInterface::kAll;
+ caller()->pc()->SetConfiguration(caller_config);
+ // We should have gathered a new host candidate.
+ EXPECT_EQ_WAIT(cricket::LOCAL_PORT_TYPE,
+ caller()->last_candidate_gathered().type(), kDefaultTimeout);
+
+ // Loosen the callee's candidate filter.
+ callee_config = callee()->pc()->GetConfiguration();
+ callee_config.type = webrtc::PeerConnectionInterface::kAll;
+ callee()->pc()->SetConfiguration(callee_config);
+ EXPECT_EQ_WAIT(cricket::LOCAL_PORT_TYPE,
+ callee()->last_candidate_gathered().type(), kDefaultTimeout);
+
+ // Create an offer and verify that it does not contain an ICE restart (i.e new
+ // ice credentials).
+ std::string caller_ufrag_pre_offer = caller()
+ ->pc()
+ ->local_description()
+ ->description()
+ ->transport_infos()[0]
+ .description.ice_ufrag;
+ caller()->CreateAndSetAndSignalOffer();
+ std::string caller_ufrag_post_offer = caller()
+ ->pc()
+ ->local_description()
+ ->description()
+ ->transport_infos()[0]
+ .description.ice_ufrag;
+ EXPECT_EQ(caller_ufrag_pre_offer, caller_ufrag_post_offer);
+}
+
+TEST_P(PeerConnectionIntegrationTest, OnIceCandidateError) {
+ static const rtc::SocketAddress turn_server_internal_address{"88.88.88.0",
+ 3478};
+ static const rtc::SocketAddress turn_server_external_address{"88.88.88.1", 0};
+
+ CreateTurnServer(turn_server_internal_address, turn_server_external_address);
+
+ webrtc::PeerConnectionInterface::IceServer ice_server;
+ ice_server.urls.push_back("turn:88.88.88.0:3478");
+ ice_server.username = "test";
+ ice_server.password = "123";
+
+ PeerConnectionInterface::RTCConfiguration caller_config;
+ caller_config.servers.push_back(ice_server);
+ caller_config.type = webrtc::PeerConnectionInterface::kRelay;
+ caller_config.continual_gathering_policy = PeerConnection::GATHER_CONTINUALLY;
+
+ PeerConnectionInterface::RTCConfiguration callee_config;
+ callee_config.servers.push_back(ice_server);
+ callee_config.type = webrtc::PeerConnectionInterface::kRelay;
+ callee_config.continual_gathering_policy = PeerConnection::GATHER_CONTINUALLY;
+
+ ASSERT_TRUE(
+ CreatePeerConnectionWrappersWithConfig(caller_config, callee_config));
+
+ // Do normal offer/answer and wait for ICE to complete.
+ ConnectFakeSignaling();
+ caller()->AddAudioVideoTracks();
+ callee()->AddAudioVideoTracks();
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ EXPECT_EQ_WAIT(401, caller()->error_event().error_code, kDefaultTimeout);
+ EXPECT_EQ("Unauthorized", caller()->error_event().error_text);
+ EXPECT_EQ("turn:88.88.88.0:3478?transport=udp", caller()->error_event().url);
+ EXPECT_NE(caller()->error_event().address, "");
+}
+
+TEST_P(PeerConnectionIntegrationTest, OnIceCandidateErrorWithEmptyAddress) {
+ webrtc::PeerConnectionInterface::IceServer ice_server;
+ ice_server.urls.push_back("turn:127.0.0.1:3478?transport=tcp");
+ ice_server.username = "test";
+ ice_server.password = "test";
+
+ PeerConnectionInterface::RTCConfiguration caller_config;
+ caller_config.servers.push_back(ice_server);
+ caller_config.type = webrtc::PeerConnectionInterface::kRelay;
+ caller_config.continual_gathering_policy = PeerConnection::GATHER_CONTINUALLY;
+
+ PeerConnectionInterface::RTCConfiguration callee_config;
+ callee_config.servers.push_back(ice_server);
+ callee_config.type = webrtc::PeerConnectionInterface::kRelay;
+ callee_config.continual_gathering_policy = PeerConnection::GATHER_CONTINUALLY;
+
+ ASSERT_TRUE(
+ CreatePeerConnectionWrappersWithConfig(caller_config, callee_config));
+
+ // Do normal offer/answer and wait for ICE to complete.
+ ConnectFakeSignaling();
+ caller()->AddAudioVideoTracks();
+ callee()->AddAudioVideoTracks();
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ EXPECT_EQ_WAIT(701, caller()->error_event().error_code, kDefaultTimeout);
+ EXPECT_EQ(caller()->error_event().address, "");
+}
+
+// TODO(https://crbug.com/webrtc/14947): Investigate why this is flaking and
+// find a way to re-enable the test.
+TEST_F(PeerConnectionIntegrationTestUnifiedPlan,
+ DISABLED_AudioKeepsFlowingAfterImplicitRollback) {
+ PeerConnectionInterface::RTCConfiguration config;
+ config.sdp_semantics = SdpSemantics::kUnifiedPlan;
+ config.enable_implicit_rollback = true;
+ ASSERT_TRUE(CreatePeerConnectionWrappersWithConfig(config, config));
+ ConnectFakeSignaling();
+ caller()->AddAudioTrack();
+ callee()->AddAudioTrack();
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ MediaExpectations media_expectations;
+ media_expectations.ExpectBidirectionalAudio();
+ ASSERT_TRUE(ExpectNewFrames(media_expectations));
+ SetSignalIceCandidates(false); // Workaround candidate outrace sdp.
+ caller()->AddVideoTrack();
+ callee()->AddVideoTrack();
+ auto observer = rtc::make_ref_counted<MockSetSessionDescriptionObserver>();
+ callee()->pc()->SetLocalDescription(observer.get(),
+ callee()->CreateOfferAndWait().release());
+ EXPECT_TRUE_WAIT(observer->called(), kDefaultTimeout);
+ caller()->CreateAndSetAndSignalOffer(); // Implicit rollback.
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ ASSERT_TRUE(ExpectNewFrames(media_expectations));
+}
+
+TEST_F(PeerConnectionIntegrationTestUnifiedPlan,
+ ImplicitRollbackVisitsStableState) {
+ RTCConfiguration config;
+ config.sdp_semantics = SdpSemantics::kUnifiedPlan;
+ config.enable_implicit_rollback = true;
+
+ ASSERT_TRUE(CreatePeerConnectionWrappersWithConfig(config, config));
+
+ auto sld_observer =
+ rtc::make_ref_counted<MockSetSessionDescriptionObserver>();
+ callee()->pc()->SetLocalDescription(sld_observer.get(),
+ callee()->CreateOfferAndWait().release());
+ EXPECT_TRUE_WAIT(sld_observer->called(), kDefaultTimeout);
+ EXPECT_EQ(sld_observer->error(), "");
+
+ auto srd_observer =
+ rtc::make_ref_counted<MockSetSessionDescriptionObserver>();
+ callee()->pc()->SetRemoteDescription(
+ srd_observer.get(), caller()->CreateOfferAndWait().release());
+ EXPECT_TRUE_WAIT(srd_observer->called(), kDefaultTimeout);
+ EXPECT_EQ(srd_observer->error(), "");
+
+ EXPECT_THAT(callee()->peer_connection_signaling_state_history(),
+ ElementsAre(PeerConnectionInterface::kHaveLocalOffer,
+ PeerConnectionInterface::kStable,
+ PeerConnectionInterface::kHaveRemoteOffer));
+}
+
+TEST_F(PeerConnectionIntegrationTestUnifiedPlan,
+ H264FmtpSpsPpsIdrInKeyframeParameterUsage) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ caller()->AddVideoTrack();
+ callee()->AddVideoTrack();
+ auto munger = [](cricket::SessionDescription* desc) {
+ cricket::VideoContentDescription* video =
+ GetFirstVideoContentDescription(desc);
+ auto codecs = video->codecs();
+ for (auto&& codec : codecs) {
+ if (codec.name == "H264") {
+ std::string value;
+ // The parameter is not supposed to be present in SDP by default.
+ EXPECT_FALSE(
+ codec.GetParam(cricket::kH264FmtpSpsPpsIdrInKeyframe, &value));
+ codec.SetParam(std::string(cricket::kH264FmtpSpsPpsIdrInKeyframe),
+ std::string(""));
+ }
+ }
+ video->set_codecs(codecs);
+ };
+ // Munge local offer for SLD.
+ caller()->SetGeneratedSdpMunger(munger);
+ // Munge remote answer for SRD.
+ caller()->SetReceivedSdpMunger(munger);
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ // Observe that after munging the parameter is present in generated SDP.
+ caller()->SetGeneratedSdpMunger([](cricket::SessionDescription* desc) {
+ cricket::VideoContentDescription* video =
+ GetFirstVideoContentDescription(desc);
+ for (auto&& codec : video->codecs()) {
+ if (codec.name == "H264") {
+ std::string value;
+ EXPECT_TRUE(
+ codec.GetParam(cricket::kH264FmtpSpsPpsIdrInKeyframe, &value));
+ }
+ }
+ });
+ caller()->CreateOfferAndWait();
+}
+
+TEST_F(PeerConnectionIntegrationTestUnifiedPlan,
+ RenegotiateManyAudioTransceivers) {
+ PeerConnectionInterface::RTCConfiguration config;
+ config.sdp_semantics = SdpSemantics::kUnifiedPlan;
+ ASSERT_TRUE(CreatePeerConnectionWrappersWithConfig(config, config));
+ ConnectFakeSignaling();
+ caller()->pc()->AddTransceiver(cricket::MEDIA_TYPE_AUDIO);
+
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ int current_size = caller()->pc()->GetTransceivers().size();
+ // Add more tracks until we get close to having issues.
+ // Issues have been seen at:
+ // - 32 tracks on android_arm64_rel and android_arm_dbg bots
+ // - 16 tracks on android_arm_dbg (flaky)
+ while (current_size < 8) {
+ // Double the number of tracks
+ for (int i = 0; i < current_size; i++) {
+ caller()->pc()->AddTransceiver(cricket::MEDIA_TYPE_AUDIO);
+ }
+ current_size = caller()->pc()->GetTransceivers().size();
+ RTC_LOG(LS_INFO) << "Renegotiating with " << current_size << " tracks";
+ auto start_time_ms = rtc::TimeMillis();
+ caller()->CreateAndSetAndSignalOffer();
+ // We want to stop when the time exceeds one second.
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ auto elapsed_time_ms = rtc::TimeMillis() - start_time_ms;
+ RTC_LOG(LS_INFO) << "Renegotiating took " << elapsed_time_ms << " ms";
+ ASSERT_GT(1000, elapsed_time_ms)
+ << "Audio transceivers: Negotiation took too long after "
+ << current_size << " tracks added";
+ }
+}
+
+TEST_F(PeerConnectionIntegrationTestUnifiedPlan,
+ RenegotiateManyVideoTransceivers) {
+ PeerConnectionInterface::RTCConfiguration config;
+ config.sdp_semantics = SdpSemantics::kUnifiedPlan;
+ ASSERT_TRUE(CreatePeerConnectionWrappersWithConfig(config, config));
+ ConnectFakeSignaling();
+ caller()->pc()->AddTransceiver(cricket::MEDIA_TYPE_VIDEO);
+
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ int current_size = caller()->pc()->GetTransceivers().size();
+ // Add more tracks until we get close to having issues.
+ // Issues have been seen at:
+ // - 96 on a Linux workstation
+ // - 64 at win_x86_more_configs and win_x64_msvc_dbg
+ // - 32 on android_arm64_rel and linux_dbg bots
+ // - 16 on Android 64 (Nexus 5x)
+ while (current_size < 8) {
+ // Double the number of tracks
+ for (int i = 0; i < current_size; i++) {
+ caller()->pc()->AddTransceiver(cricket::MEDIA_TYPE_VIDEO);
+ }
+ current_size = caller()->pc()->GetTransceivers().size();
+ RTC_LOG(LS_INFO) << "Renegotiating with " << current_size << " tracks";
+ auto start_time_ms = rtc::TimeMillis();
+ caller()->CreateAndSetAndSignalOffer();
+ // We want to stop when the time exceeds one second.
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ auto elapsed_time_ms = rtc::TimeMillis() - start_time_ms;
+ RTC_LOG(LS_INFO) << "Renegotiating took " << elapsed_time_ms << " ms";
+ ASSERT_GT(1000, elapsed_time_ms)
+ << "Video transceivers: Negotiation took too long after "
+ << current_size << " tracks added";
+ }
+}
+
+TEST_F(PeerConnectionIntegrationTestUnifiedPlan,
+ RenegotiateManyVideoTransceiversAndWatchAudioDelay) {
+ PeerConnectionInterface::RTCConfiguration config;
+ config.sdp_semantics = SdpSemantics::kUnifiedPlan;
+ ASSERT_TRUE(CreatePeerConnectionWrappersWithConfig(config, config));
+ ConnectFakeSignaling();
+ caller()->AddAudioTrack();
+ callee()->AddAudioTrack();
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ // Wait until we can see the audio flowing.
+ MediaExpectations media_expectations;
+ media_expectations.CalleeExpectsSomeAudio();
+ ASSERT_TRUE(ExpectNewFrames(media_expectations));
+
+ // Get the baseline numbers for audio_packets and audio_delay
+ // in both directions.
+ caller()->StartWatchingDelayStats();
+ callee()->StartWatchingDelayStats();
+
+ int current_size = caller()->pc()->GetTransceivers().size();
+ // Add more tracks until we get close to having issues.
+ // Making this number very large makes the test very slow.
+ while (current_size < 16) {
+ // Double the number of tracks
+ for (int i = 0; i < current_size; i++) {
+ caller()->pc()->AddTransceiver(cricket::MEDIA_TYPE_VIDEO);
+ }
+ current_size = caller()->pc()->GetTransceivers().size();
+ RTC_LOG(LS_INFO) << "Renegotiating with " << current_size << " tracks";
+ auto start_time_ms = rtc::TimeMillis();
+ caller()->CreateAndSetAndSignalOffer();
+ // We want to stop when the time exceeds one second.
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ auto elapsed_time_ms = rtc::TimeMillis() - start_time_ms;
+ RTC_LOG(LS_INFO) << "Renegotiating took " << elapsed_time_ms << " ms";
+ // This is a guard against the test using excessive amounts of time.
+ ASSERT_GT(5000, elapsed_time_ms)
+ << "Video transceivers: Negotiation took too long after "
+ << current_size << " tracks added";
+ caller()->UpdateDelayStats("caller reception", current_size);
+ callee()->UpdateDelayStats("callee reception", current_size);
+ }
+}
+
+TEST_F(PeerConnectionIntegrationTestUnifiedPlan,
+ GetParametersHasEncodingsBeforeNegotiation) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ auto result = caller()->pc()->AddTransceiver(cricket::MEDIA_TYPE_VIDEO);
+ auto transceiver = result.MoveValue();
+ auto parameters = transceiver->sender()->GetParameters();
+ EXPECT_EQ(parameters.encodings.size(), 1u);
+}
+
+TEST_F(PeerConnectionIntegrationTestUnifiedPlan,
+ GetParametersHasInitEncodingsBeforeNegotiation) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ RtpTransceiverInit init;
+ init.send_encodings.push_back({});
+ init.send_encodings[0].max_bitrate_bps = 12345;
+ auto result = caller()->pc()->AddTransceiver(cricket::MEDIA_TYPE_VIDEO, init);
+ auto transceiver = result.MoveValue();
+ auto parameters = transceiver->sender()->GetParameters();
+ ASSERT_EQ(parameters.encodings.size(), 1u);
+ EXPECT_EQ(parameters.encodings[0].max_bitrate_bps, 12345);
+}
+
+INSTANTIATE_TEST_SUITE_P(PeerConnectionIntegrationTest,
+ PeerConnectionIntegrationTest,
+ Values(SdpSemantics::kPlanB_DEPRECATED,
+ SdpSemantics::kUnifiedPlan));
+
+INSTANTIATE_TEST_SUITE_P(PeerConnectionIntegrationTest,
+ PeerConnectionIntegrationTestWithFakeClock,
+ Values(SdpSemantics::kPlanB_DEPRECATED,
+ SdpSemantics::kUnifiedPlan));
+
+// Tests that verify interoperability between Plan B and Unified Plan
+// PeerConnections.
+class PeerConnectionIntegrationInteropTest
+ : public PeerConnectionIntegrationBaseTest,
+ public ::testing::WithParamInterface<
+ std::tuple<SdpSemantics, SdpSemantics>> {
+ protected:
+ // Setting the SdpSemantics for the base test to kDefault does not matter
+ // because we specify not to use the test semantics when creating
+ // PeerConnectionIntegrationWrappers.
+ PeerConnectionIntegrationInteropTest()
+ : PeerConnectionIntegrationBaseTest(SdpSemantics::kPlanB_DEPRECATED),
+ caller_semantics_(std::get<0>(GetParam())),
+ callee_semantics_(std::get<1>(GetParam())) {}
+
+ bool CreatePeerConnectionWrappersWithSemantics() {
+ return CreatePeerConnectionWrappersWithSdpSemantics(caller_semantics_,
+ callee_semantics_);
+ }
+
+ const SdpSemantics caller_semantics_;
+ const SdpSemantics callee_semantics_;
+};
+
+TEST_P(PeerConnectionIntegrationInteropTest, NoMediaLocalToNoMediaRemote) {
+ ASSERT_TRUE(CreatePeerConnectionWrappersWithSemantics());
+ ConnectFakeSignaling();
+
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+}
+
+TEST_P(PeerConnectionIntegrationInteropTest, OneAudioLocalToNoMediaRemote) {
+ ASSERT_TRUE(CreatePeerConnectionWrappersWithSemantics());
+ ConnectFakeSignaling();
+ auto audio_sender = caller()->AddAudioTrack();
+
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+
+ // Verify that one audio receiver has been created on the remote and that it
+ // has the same track ID as the sending track.
+ auto receivers = callee()->pc()->GetReceivers();
+ ASSERT_EQ(1u, receivers.size());
+ EXPECT_EQ(cricket::MEDIA_TYPE_AUDIO, receivers[0]->media_type());
+ EXPECT_EQ(receivers[0]->track()->id(), audio_sender->track()->id());
+
+ MediaExpectations media_expectations;
+ media_expectations.CalleeExpectsSomeAudio();
+ ASSERT_TRUE(ExpectNewFrames(media_expectations));
+}
+
+TEST_P(PeerConnectionIntegrationInteropTest, OneAudioOneVideoToNoMediaRemote) {
+ ASSERT_TRUE(CreatePeerConnectionWrappersWithSemantics());
+ ConnectFakeSignaling();
+ auto video_sender = caller()->AddVideoTrack();
+ auto audio_sender = caller()->AddAudioTrack();
+
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+
+ // Verify that one audio and one video receiver have been created on the
+ // remote and that they have the same track IDs as the sending tracks.
+ auto audio_receivers =
+ callee()->GetReceiversOfType(cricket::MEDIA_TYPE_AUDIO);
+ ASSERT_EQ(1u, audio_receivers.size());
+ EXPECT_EQ(audio_receivers[0]->track()->id(), audio_sender->track()->id());
+ auto video_receivers =
+ callee()->GetReceiversOfType(cricket::MEDIA_TYPE_VIDEO);
+ ASSERT_EQ(1u, video_receivers.size());
+ EXPECT_EQ(video_receivers[0]->track()->id(), video_sender->track()->id());
+
+ MediaExpectations media_expectations;
+ media_expectations.CalleeExpectsSomeAudioAndVideo();
+ ASSERT_TRUE(ExpectNewFrames(media_expectations));
+}
+
+TEST_P(PeerConnectionIntegrationInteropTest,
+ OneAudioOneVideoLocalToOneAudioOneVideoRemote) {
+ ASSERT_TRUE(CreatePeerConnectionWrappersWithSemantics());
+ ConnectFakeSignaling();
+ caller()->AddAudioVideoTracks();
+ callee()->AddAudioVideoTracks();
+
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+
+ MediaExpectations media_expectations;
+ media_expectations.ExpectBidirectionalAudioAndVideo();
+ ASSERT_TRUE(ExpectNewFrames(media_expectations));
+}
+
+TEST_P(PeerConnectionIntegrationInteropTest,
+ ReverseRolesOneAudioLocalToOneVideoRemote) {
+ ASSERT_TRUE(CreatePeerConnectionWrappersWithSemantics());
+ ConnectFakeSignaling();
+ caller()->AddAudioTrack();
+ callee()->AddVideoTrack();
+
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+
+ // Verify that only the audio track has been negotiated.
+ EXPECT_EQ(0u, caller()->GetReceiversOfType(cricket::MEDIA_TYPE_VIDEO).size());
+ // Might also check that the callee's NegotiationNeeded flag is set.
+
+ // Reverse roles.
+ callee()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+
+ MediaExpectations media_expectations;
+ media_expectations.CallerExpectsSomeVideo();
+ media_expectations.CalleeExpectsSomeAudio();
+ ASSERT_TRUE(ExpectNewFrames(media_expectations));
+}
+
+TEST_P(PeerConnectionIntegrationTest, NewTracksDoNotCauseNewCandidates) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ caller()->AddAudioVideoTracks();
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ ASSERT_TRUE_WAIT(DtlsConnected(), kDefaultTimeout);
+ caller()->ExpectCandidates(0);
+ callee()->ExpectCandidates(0);
+ caller()->AddAudioTrack();
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+}
+
+TEST_P(PeerConnectionIntegrationTest, MediaCallWithoutMediaEngineFails) {
+ ASSERT_TRUE(CreatePeerConnectionWrappersWithoutMediaEngine());
+ // AddTrack should fail.
+ EXPECT_FALSE(
+ caller()->pc()->AddTrack(caller()->CreateLocalAudioTrack(), {}).ok());
+}
+
+INSTANTIATE_TEST_SUITE_P(
+ PeerConnectionIntegrationTest,
+ PeerConnectionIntegrationInteropTest,
+ Values(std::make_tuple(SdpSemantics::kPlanB_DEPRECATED,
+ SdpSemantics::kUnifiedPlan),
+ std::make_tuple(SdpSemantics::kUnifiedPlan,
+ SdpSemantics::kPlanB_DEPRECATED)));
+
+// Test that if the Unified Plan side offers two video tracks then the Plan B
+// side will only see the first one and ignore the second.
+TEST_F(PeerConnectionIntegrationTestPlanB, TwoVideoUnifiedPlanToNoMediaPlanB) {
+ ASSERT_TRUE(CreatePeerConnectionWrappersWithSdpSemantics(
+ SdpSemantics::kUnifiedPlan, SdpSemantics::kPlanB_DEPRECATED));
+ ConnectFakeSignaling();
+ auto first_sender = caller()->AddVideoTrack();
+ caller()->AddVideoTrack();
+
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+
+ // Verify that there is only one receiver and it corresponds to the first
+ // added track.
+ auto receivers = callee()->pc()->GetReceivers();
+ ASSERT_EQ(1u, receivers.size());
+ EXPECT_TRUE(receivers[0]->track()->enabled());
+ EXPECT_EQ(first_sender->track()->id(), receivers[0]->track()->id());
+
+ MediaExpectations media_expectations;
+ media_expectations.CalleeExpectsSomeVideo();
+ ASSERT_TRUE(ExpectNewFrames(media_expectations));
+}
+
+// Test that if the initial offer tagged BUNDLE section is rejected due to its
+// associated RtpTransceiver being stopped and another transceiver is added,
+// then renegotiation causes the callee to receive the new video track without
+// error.
+// This is a regression test for bugs.webrtc.org/9954
+TEST_F(PeerConnectionIntegrationTestUnifiedPlan,
+ ReOfferWithStoppedBundleTaggedTransceiver) {
+ RTCConfiguration config;
+ config.bundle_policy = PeerConnectionInterface::kBundlePolicyMaxBundle;
+ ASSERT_TRUE(CreatePeerConnectionWrappersWithConfig(config, config));
+ ConnectFakeSignaling();
+ auto audio_transceiver_or_error =
+ caller()->pc()->AddTransceiver(caller()->CreateLocalAudioTrack());
+ ASSERT_TRUE(audio_transceiver_or_error.ok());
+ auto audio_transceiver = audio_transceiver_or_error.MoveValue();
+
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ {
+ MediaExpectations media_expectations;
+ media_expectations.CalleeExpectsSomeAudio();
+ ASSERT_TRUE(ExpectNewFrames(media_expectations));
+ }
+
+ audio_transceiver->StopInternal();
+ caller()->pc()->AddTransceiver(caller()->CreateLocalVideoTrack());
+
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ {
+ MediaExpectations media_expectations;
+ media_expectations.CalleeExpectsSomeVideo();
+ ASSERT_TRUE(ExpectNewFrames(media_expectations));
+ }
+}
+
+TEST_F(PeerConnectionIntegrationTestUnifiedPlan,
+ StopTransceiverRemovesDtlsTransports) {
+ RTCConfiguration config;
+ ASSERT_TRUE(CreatePeerConnectionWrappersWithConfig(config, config));
+ ConnectFakeSignaling();
+ auto audio_transceiver_or_error =
+ caller()->pc()->AddTransceiver(caller()->CreateLocalAudioTrack());
+ ASSERT_TRUE(audio_transceiver_or_error.ok());
+ auto audio_transceiver = audio_transceiver_or_error.MoveValue();
+
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+
+ audio_transceiver->StopStandard();
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ ASSERT_EQ(0U, caller()->pc()->GetTransceivers().size());
+ EXPECT_EQ(PeerConnectionInterface::kIceGatheringNew,
+ caller()->pc()->ice_gathering_state());
+ EXPECT_THAT(caller()->ice_gathering_state_history(),
+ ElementsAre(PeerConnectionInterface::kIceGatheringGathering,
+ PeerConnectionInterface::kIceGatheringComplete,
+ PeerConnectionInterface::kIceGatheringNew));
+}
+
+TEST_F(PeerConnectionIntegrationTestUnifiedPlan,
+ StopTransceiverStopsAndRemovesTransceivers) {
+ RTCConfiguration config;
+ ASSERT_TRUE(CreatePeerConnectionWrappersWithConfig(config, config));
+ ConnectFakeSignaling();
+ auto audio_transceiver_or_error =
+ caller()->pc()->AddTransceiver(caller()->CreateLocalAudioTrack());
+ ASSERT_TRUE(audio_transceiver_or_error.ok());
+ auto caller_transceiver = audio_transceiver_or_error.MoveValue();
+
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ caller_transceiver->StopStandard();
+
+ auto callee_transceiver = callee()->pc()->GetTransceivers()[0];
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ EXPECT_EQ(0U, caller()->pc()->GetTransceivers().size());
+ EXPECT_EQ(0U, callee()->pc()->GetTransceivers().size());
+ EXPECT_EQ(0U, caller()->pc()->GetSenders().size());
+ EXPECT_EQ(0U, callee()->pc()->GetSenders().size());
+ EXPECT_EQ(0U, caller()->pc()->GetReceivers().size());
+ EXPECT_EQ(0U, callee()->pc()->GetReceivers().size());
+ EXPECT_TRUE(caller_transceiver->stopped());
+ EXPECT_TRUE(callee_transceiver->stopped());
+}
+
+TEST_F(PeerConnectionIntegrationTestUnifiedPlan,
+ StopTransceiverEndsIncomingAudioTrack) {
+ RTCConfiguration config;
+ ASSERT_TRUE(CreatePeerConnectionWrappersWithConfig(config, config));
+ ConnectFakeSignaling();
+ auto audio_transceiver_or_error =
+ caller()->pc()->AddTransceiver(caller()->CreateLocalAudioTrack());
+ ASSERT_TRUE(audio_transceiver_or_error.ok());
+ auto audio_transceiver = audio_transceiver_or_error.MoveValue();
+
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ auto caller_track = audio_transceiver->receiver()->track();
+ auto callee_track = callee()->pc()->GetReceivers()[0]->track();
+ audio_transceiver->StopStandard();
+ EXPECT_EQ(MediaStreamTrackInterface::TrackState::kEnded,
+ caller_track->state());
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ EXPECT_EQ(MediaStreamTrackInterface::TrackState::kEnded,
+ callee_track->state());
+}
+
+TEST_F(PeerConnectionIntegrationTestUnifiedPlan,
+ StopTransceiverEndsIncomingVideoTrack) {
+ RTCConfiguration config;
+ ASSERT_TRUE(CreatePeerConnectionWrappersWithConfig(config, config));
+ ConnectFakeSignaling();
+ auto audio_transceiver_or_error =
+ caller()->pc()->AddTransceiver(caller()->CreateLocalVideoTrack());
+ ASSERT_TRUE(audio_transceiver_or_error.ok());
+ auto audio_transceiver = audio_transceiver_or_error.MoveValue();
+
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ auto caller_track = audio_transceiver->receiver()->track();
+ auto callee_track = callee()->pc()->GetReceivers()[0]->track();
+ audio_transceiver->StopStandard();
+ EXPECT_EQ(MediaStreamTrackInterface::TrackState::kEnded,
+ caller_track->state());
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ EXPECT_EQ(MediaStreamTrackInterface::TrackState::kEnded,
+ callee_track->state());
+}
+
+TEST_P(PeerConnectionIntegrationTest, EndToEndRtpSenderVideoEncoderSelector) {
+ ASSERT_TRUE(
+ CreateOneDirectionalPeerConnectionWrappers(/*caller_to_callee=*/true));
+ ConnectFakeSignaling();
+ // Add one-directional video, from caller to callee.
+ rtc::scoped_refptr<webrtc::VideoTrackInterface> caller_track =
+ caller()->CreateLocalVideoTrack();
+ auto sender = caller()->AddTrack(caller_track);
+ PeerConnectionInterface::RTCOfferAnswerOptions options;
+ options.offer_to_receive_video = 0;
+ caller()->SetOfferAnswerOptions(options);
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ ASSERT_EQ(callee()->pc()->GetReceivers().size(), 1u);
+
+ std::unique_ptr<MockEncoderSelector> encoder_selector =
+ std::make_unique<MockEncoderSelector>();
+ EXPECT_CALL(*encoder_selector, OnCurrentEncoder);
+
+ sender->SetEncoderSelector(std::move(encoder_selector));
+
+ // Expect video to be received in one direction.
+ MediaExpectations media_expectations;
+ media_expectations.CallerExpectsNoVideo();
+ media_expectations.CalleeExpectsSomeVideo();
+
+ EXPECT_TRUE(ExpectNewFrames(media_expectations));
+}
+
+int NacksReceivedCount(PeerConnectionIntegrationWrapper& pc) {
+ rtc::scoped_refptr<const webrtc::RTCStatsReport> report = pc.NewGetStats();
+ auto sender_stats = report->GetStatsOfType<RTCOutboundRtpStreamStats>();
+ if (sender_stats.size() != 1) {
+ ADD_FAILURE();
+ return 0;
+ }
+ if (!sender_stats[0]->nack_count.is_defined()) {
+ return 0;
+ }
+ return *sender_stats[0]->nack_count;
+}
+
+int NacksSentCount(PeerConnectionIntegrationWrapper& pc) {
+ rtc::scoped_refptr<const webrtc::RTCStatsReport> report = pc.NewGetStats();
+ auto receiver_stats = report->GetStatsOfType<RTCInboundRtpStreamStats>();
+ if (receiver_stats.size() != 1) {
+ ADD_FAILURE();
+ return 0;
+ }
+ if (!receiver_stats[0]->nack_count.is_defined()) {
+ return 0;
+ }
+ return *receiver_stats[0]->nack_count;
+}
+
+// Test disabled because it is flaky.
+TEST_F(PeerConnectionIntegrationTestUnifiedPlan,
+ DISABLED_AudioPacketLossCausesNack) {
+ RTCConfiguration config;
+ ASSERT_TRUE(CreatePeerConnectionWrappersWithConfig(config, config));
+ ConnectFakeSignaling();
+ auto audio_transceiver_or_error =
+ caller()->pc()->AddTransceiver(caller()->CreateLocalAudioTrack());
+ ASSERT_TRUE(audio_transceiver_or_error.ok());
+ auto send_transceiver = audio_transceiver_or_error.MoveValue();
+ // Munge the SDP to include NACK and RRTR on Opus, and remove all other
+ // codecs.
+ caller()->SetGeneratedSdpMunger([](cricket::SessionDescription* desc) {
+ for (ContentInfo& content : desc->contents()) {
+ cricket::AudioContentDescription* media =
+ content.media_description()->as_audio();
+ std::vector<cricket::AudioCodec> codecs = media->codecs();
+ std::vector<cricket::AudioCodec> codecs_out;
+ for (cricket::AudioCodec codec : codecs) {
+ if (codec.name == "opus") {
+ codec.AddFeedbackParam(cricket::FeedbackParam(
+ cricket::kRtcpFbParamNack, cricket::kParamValueEmpty));
+ codec.AddFeedbackParam(cricket::FeedbackParam(
+ cricket::kRtcpFbParamRrtr, cricket::kParamValueEmpty));
+ codecs_out.push_back(codec);
+ }
+ }
+ EXPECT_FALSE(codecs_out.empty());
+ media->set_codecs(codecs_out);
+ }
+ });
+
+ caller()->CreateAndSetAndSignalOffer();
+ // Check for failure in helpers
+ ASSERT_FALSE(HasFailure());
+ MediaExpectations media_expectations;
+ media_expectations.CalleeExpectsSomeAudio(1);
+ ExpectNewFrames(media_expectations);
+ ASSERT_FALSE(HasFailure());
+
+ virtual_socket_server()->set_drop_probability(0.2);
+
+ // Wait until callee has sent at least one NACK.
+ // Note that due to stats caching, this might only be visible 50 ms
+ // after the nack was in fact sent.
+ EXPECT_TRUE_WAIT(NacksSentCount(*callee()) > 0, kDefaultTimeout);
+ ASSERT_FALSE(HasFailure());
+
+ virtual_socket_server()->set_drop_probability(0.0);
+ // Wait until caller has received at least one NACK
+ EXPECT_TRUE_WAIT(NacksReceivedCount(*caller()) > 0, kDefaultTimeout);
+}
+
+TEST_F(PeerConnectionIntegrationTestUnifiedPlan, VideoPacketLossCausesNack) {
+ RTCConfiguration config;
+ ASSERT_TRUE(CreatePeerConnectionWrappersWithConfig(config, config));
+ ConnectFakeSignaling();
+ auto video_transceiver_or_error =
+ caller()->pc()->AddTransceiver(caller()->CreateLocalVideoTrack());
+ ASSERT_TRUE(video_transceiver_or_error.ok());
+ auto send_transceiver = video_transceiver_or_error.MoveValue();
+ // Munge the SDP to include NACK and RRTR on VP8, and remove all other
+ // codecs.
+ caller()->SetGeneratedSdpMunger([](cricket::SessionDescription* desc) {
+ for (ContentInfo& content : desc->contents()) {
+ cricket::VideoContentDescription* media =
+ content.media_description()->as_video();
+ std::vector<cricket::VideoCodec> codecs = media->codecs();
+ std::vector<cricket::VideoCodec> codecs_out;
+ for (cricket::VideoCodec codec : codecs) {
+ if (codec.name == "VP8") {
+ ASSERT_TRUE(codec.HasFeedbackParam(cricket::FeedbackParam(
+ cricket::kRtcpFbParamNack, cricket::kParamValueEmpty)));
+ codecs_out.push_back(codec);
+ }
+ }
+ EXPECT_FALSE(codecs_out.empty());
+ media->set_codecs(codecs_out);
+ }
+ });
+
+ caller()->CreateAndSetAndSignalOffer();
+ // Check for failure in helpers
+ ASSERT_FALSE(HasFailure());
+ MediaExpectations media_expectations;
+ media_expectations.CalleeExpectsSomeVideo(1);
+ ExpectNewFrames(media_expectations);
+ ASSERT_FALSE(HasFailure());
+
+ virtual_socket_server()->set_drop_probability(0.2);
+
+ // Wait until callee has sent at least one NACK.
+ // Note that due to stats caching, this might only be visible 50 ms
+ // after the nack was in fact sent.
+ EXPECT_TRUE_WAIT(NacksSentCount(*callee()) > 0, kDefaultTimeout);
+ ASSERT_FALSE(HasFailure());
+
+ // Wait until caller has received at least one NACK
+ EXPECT_TRUE_WAIT(NacksReceivedCount(*caller()) > 0, kDefaultTimeout);
+}
+
+} // namespace
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/peer_connection_interface_unittest.cc b/third_party/libwebrtc/pc/peer_connection_interface_unittest.cc
new file mode 100644
index 0000000000..1f5ab2f449
--- /dev/null
+++ b/third_party/libwebrtc/pc/peer_connection_interface_unittest.cc
@@ -0,0 +1,3867 @@
+/*
+ * Copyright 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/peer_connection_interface.h"
+
+#include <limits.h>
+#include <stdint.h>
+
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "absl/strings/str_replace.h"
+#include "absl/types/optional.h"
+#include "api/audio/audio_mixer.h"
+#include "api/audio_codecs/builtin_audio_decoder_factory.h"
+#include "api/audio_codecs/builtin_audio_encoder_factory.h"
+#include "api/call/call_factory_interface.h"
+#include "api/create_peerconnection_factory.h"
+#include "api/data_channel_interface.h"
+#include "api/jsep.h"
+#include "api/media_stream_interface.h"
+#include "api/media_types.h"
+#include "api/rtc_error.h"
+#include "api/rtc_event_log/rtc_event_log.h"
+#include "api/rtc_event_log/rtc_event_log_factory.h"
+#include "api/rtc_event_log_output.h"
+#include "api/rtp_receiver_interface.h"
+#include "api/rtp_sender_interface.h"
+#include "api/rtp_transceiver_direction.h"
+#include "api/scoped_refptr.h"
+#include "api/task_queue/default_task_queue_factory.h"
+#include "api/transport/field_trial_based_config.h"
+#include "api/video_codecs/video_decoder_factory_template.h"
+#include "api/video_codecs/video_decoder_factory_template_dav1d_adapter.h"
+#include "api/video_codecs/video_decoder_factory_template_libvpx_vp8_adapter.h"
+#include "api/video_codecs/video_decoder_factory_template_libvpx_vp9_adapter.h"
+#include "api/video_codecs/video_decoder_factory_template_open_h264_adapter.h"
+#include "api/video_codecs/video_encoder_factory_template.h"
+#include "api/video_codecs/video_encoder_factory_template_libaom_av1_adapter.h"
+#include "api/video_codecs/video_encoder_factory_template_libvpx_vp8_adapter.h"
+#include "api/video_codecs/video_encoder_factory_template_libvpx_vp9_adapter.h"
+#include "api/video_codecs/video_encoder_factory_template_open_h264_adapter.h"
+#include "media/base/codec.h"
+#include "media/base/media_config.h"
+#include "media/base/media_engine.h"
+#include "media/base/stream_params.h"
+#include "media/engine/webrtc_media_engine.h"
+#include "media/engine/webrtc_media_engine_defaults.h"
+#include "media/sctp/sctp_transport_internal.h"
+#include "modules/audio_device/include/audio_device.h"
+#include "modules/audio_processing/include/audio_processing.h"
+#include "p2p/base/fake_port_allocator.h"
+#include "p2p/base/p2p_constants.h"
+#include "p2p/base/port.h"
+#include "p2p/base/port_allocator.h"
+#include "p2p/base/transport_description.h"
+#include "p2p/base/transport_info.h"
+#include "pc/audio_track.h"
+#include "pc/media_session.h"
+#include "pc/media_stream.h"
+#include "pc/peer_connection.h"
+#include "pc/peer_connection_factory.h"
+#include "pc/rtp_sender.h"
+#include "pc/rtp_sender_proxy.h"
+#include "pc/session_description.h"
+#include "pc/stream_collection.h"
+#include "pc/test/fake_audio_capture_module.h"
+#include "pc/test/fake_rtc_certificate_generator.h"
+#include "pc/test/fake_video_track_source.h"
+#include "pc/test/mock_peer_connection_observers.h"
+#include "pc/test/test_sdp_strings.h"
+#include "pc/video_track.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/gunit.h"
+#include "rtc_base/rtc_certificate_generator.h"
+#include "rtc_base/socket_address.h"
+#include "rtc_base/thread.h"
+#include "rtc_base/virtual_socket_server.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+#include "test/scoped_key_value_config.h"
+
+#ifdef WEBRTC_ANDROID
+#include "pc/test/android_test_initializer.h"
+#endif
+
+namespace webrtc {
+namespace {
+
+static const char kStreamId1[] = "local_stream_1";
+static const char kStreamId2[] = "local_stream_2";
+static const char kStreamId3[] = "local_stream_3";
+static const int kDefaultStunPort = 3478;
+static const char kStunAddressOnly[] = "stun:address";
+static const char kStunInvalidPort[] = "stun:address:-1";
+static const char kStunAddressPortAndMore1[] = "stun:address:port:more";
+static const char kStunAddressPortAndMore2[] = "stun:address:port more";
+static const char kTurnIceServerUri[] = "turn:turn.example.org";
+static const char kTurnUsername[] = "user";
+static const char kTurnPassword[] = "password";
+static const char kTurnHostname[] = "turn.example.org";
+static const uint32_t kTimeout = 10000U;
+
+static const char kStreams[][8] = {"stream1", "stream2"};
+static const char kAudioTracks[][32] = {"audiotrack0", "audiotrack1"};
+static const char kVideoTracks[][32] = {"videotrack0", "videotrack1"};
+
+static const char kRecvonly[] = "recvonly";
+static const char kSendrecv[] = "sendrecv";
+constexpr uint64_t kTiebreakerDefault = 44444;
+
+// Reference SDP with a MediaStream with label "stream1" and audio track with
+// id "audio_1" and a video track with id "video_1;
+static const char kSdpStringWithStream1PlanB[] =
+ "v=0\r\n"
+ "o=- 0 0 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "m=audio 1 RTP/AVPF 111\r\n"
+ "a=ice-ufrag:e5785931\r\n"
+ "a=ice-pwd:36fb7878390db89481c1d46daa4278d8\r\n"
+ "a=fingerprint:sha-256 58:AB:6E:F5:F1:E4:57:B7:E9:46:F4:86:04:28:F9:A7:ED:"
+ "BD:AB:AE:40:EF:CE:9A:51:2C:2A:B1:9B:8B:78:84\r\n"
+ "a=mid:audio\r\n"
+ "a=sendrecv\r\n"
+ "a=rtcp-mux\r\n"
+ "a=rtpmap:111 OPUS/48000/2\r\n"
+ "a=ssrc:1 cname:stream1\r\n"
+ "a=ssrc:1 msid:stream1 audiotrack0\r\n"
+ "m=video 1 RTP/AVPF 120\r\n"
+ "a=ice-ufrag:e5785931\r\n"
+ "a=ice-pwd:36fb7878390db89481c1d46daa4278d8\r\n"
+ "a=fingerprint:sha-256 58:AB:6E:F5:F1:E4:57:B7:E9:46:F4:86:04:28:F9:A7:ED:"
+ "BD:AB:AE:40:EF:CE:9A:51:2C:2A:B1:9B:8B:78:84\r\n"
+ "a=mid:video\r\n"
+ "a=sendrecv\r\n"
+ "a=rtcp-mux\r\n"
+ "a=rtpmap:120 VP8/90000\r\n"
+ "a=ssrc:2 cname:stream1\r\n"
+ "a=ssrc:2 msid:stream1 videotrack0\r\n";
+// Same string as above but with the MID changed to the Unified Plan default and
+// a=msid added. This is needed so that this SDP can be used as an answer for a
+// Unified Plan offer.
+static const char kSdpStringWithStream1UnifiedPlan[] =
+ "v=0\r\n"
+ "o=- 0 0 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "m=audio 1 RTP/AVPF 111\r\n"
+ "a=ice-ufrag:e5785931\r\n"
+ "a=ice-pwd:36fb7878390db89481c1d46daa4278d8\r\n"
+ "a=fingerprint:sha-256 58:AB:6E:F5:F1:E4:57:B7:E9:46:F4:86:04:28:F9:A7:ED:"
+ "BD:AB:AE:40:EF:CE:9A:51:2C:2A:B1:9B:8B:78:84\r\n"
+ "a=mid:0\r\n"
+ "a=sendrecv\r\n"
+ "a=rtcp-mux\r\n"
+ "a=rtpmap:111 OPUS/48000/2\r\n"
+ "a=msid:stream1 audiotrack0\r\n"
+ "a=ssrc:1 cname:stream1\r\n"
+ "m=video 1 RTP/AVPF 120\r\n"
+ "a=ice-ufrag:e5785931\r\n"
+ "a=ice-pwd:36fb7878390db89481c1d46daa4278d8\r\n"
+ "a=fingerprint:sha-256 58:AB:6E:F5:F1:E4:57:B7:E9:46:F4:86:04:28:F9:A7:ED:"
+ "BD:AB:AE:40:EF:CE:9A:51:2C:2A:B1:9B:8B:78:84\r\n"
+ "a=mid:1\r\n"
+ "a=sendrecv\r\n"
+ "a=rtcp-mux\r\n"
+ "a=rtpmap:120 VP8/90000\r\n"
+ "a=msid:stream1 videotrack0\r\n"
+ "a=ssrc:2 cname:stream1\r\n";
+
+// Reference SDP with a MediaStream with label "stream1" and audio track with
+// id "audio_1";
+static const char kSdpStringWithStream1AudioTrackOnly[] =
+ "v=0\r\n"
+ "o=- 0 0 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "m=audio 1 RTP/AVPF 111\r\n"
+ "a=ice-ufrag:e5785931\r\n"
+ "a=ice-pwd:36fb7878390db89481c1d46daa4278d8\r\n"
+ "a=fingerprint:sha-256 58:AB:6E:F5:F1:E4:57:B7:E9:46:F4:86:04:28:F9:A7:ED:"
+ "BD:AB:AE:40:EF:CE:9A:51:2C:2A:B1:9B:8B:78:84\r\n"
+ "a=mid:audio\r\n"
+ "a=sendrecv\r\n"
+ "a=rtpmap:111 OPUS/48000/2\r\n"
+ "a=ssrc:1 cname:stream1\r\n"
+ "a=ssrc:1 msid:stream1 audiotrack0\r\n"
+ "a=rtcp-mux\r\n";
+
+// Reference SDP with two MediaStreams with label "stream1" and "stream2. Each
+// MediaStreams have one audio track and one video track.
+// This uses MSID.
+static const char kSdpStringWithStream1And2PlanB[] =
+ "v=0\r\n"
+ "o=- 0 0 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "a=msid-semantic: WMS stream1 stream2\r\n"
+ "m=audio 1 RTP/AVPF 111\r\n"
+ "a=ice-ufrag:e5785931\r\n"
+ "a=ice-pwd:36fb7878390db89481c1d46daa4278d8\r\n"
+ "a=fingerprint:sha-256 58:AB:6E:F5:F1:E4:57:B7:E9:46:F4:86:04:28:F9:A7:ED:"
+ "BD:AB:AE:40:EF:CE:9A:51:2C:2A:B1:9B:8B:78:84\r\n"
+ "a=mid:audio\r\n"
+ "a=sendrecv\r\n"
+ "a=rtcp-mux\r\n"
+ "a=rtpmap:111 OPUS/48000/2\r\n"
+ "a=ssrc:1 cname:stream1\r\n"
+ "a=ssrc:1 msid:stream1 audiotrack0\r\n"
+ "a=ssrc:3 cname:stream2\r\n"
+ "a=ssrc:3 msid:stream2 audiotrack1\r\n"
+ "m=video 1 RTP/AVPF 120\r\n"
+ "a=ice-ufrag:e5785931\r\n"
+ "a=ice-pwd:36fb7878390db89481c1d46daa4278d8\r\n"
+ "a=fingerprint:sha-256 58:AB:6E:F5:F1:E4:57:B7:E9:46:F4:86:04:28:F9:A7:ED:"
+ "BD:AB:AE:40:EF:CE:9A:51:2C:2A:B1:9B:8B:78:84\r\n"
+ "a=mid:video\r\n"
+ "a=sendrecv\r\n"
+ "a=rtcp-mux\r\n"
+ "a=rtpmap:120 VP8/0\r\n"
+ "a=ssrc:2 cname:stream1\r\n"
+ "a=ssrc:2 msid:stream1 videotrack0\r\n"
+ "a=ssrc:4 cname:stream2\r\n"
+ "a=ssrc:4 msid:stream2 videotrack1\r\n";
+static const char kSdpStringWithStream1And2UnifiedPlan[] =
+ "v=0\r\n"
+ "o=- 0 0 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "a=msid-semantic: WMS stream1 stream2\r\n"
+ "m=audio 1 RTP/AVPF 111\r\n"
+ "a=ice-ufrag:e5785931\r\n"
+ "a=ice-pwd:36fb7878390db89481c1d46daa4278d8\r\n"
+ "a=fingerprint:sha-256 58:AB:6E:F5:F1:E4:57:B7:E9:46:F4:86:04:28:F9:A7:ED:"
+ "BD:AB:AE:40:EF:CE:9A:51:2C:2A:B1:9B:8B:78:84\r\n"
+ "a=mid:0\r\n"
+ "a=sendrecv\r\n"
+ "a=rtcp-mux\r\n"
+ "a=rtpmap:111 OPUS/48000/2\r\n"
+ "a=ssrc:1 cname:stream1\r\n"
+ "a=ssrc:1 msid:stream1 audiotrack0\r\n"
+ "m=video 1 RTP/AVPF 120\r\n"
+ "a=ice-ufrag:e5785931\r\n"
+ "a=ice-pwd:36fb7878390db89481c1d46daa4278d8\r\n"
+ "a=fingerprint:sha-256 58:AB:6E:F5:F1:E4:57:B7:E9:46:F4:86:04:28:F9:A7:ED:"
+ "BD:AB:AE:40:EF:CE:9A:51:2C:2A:B1:9B:8B:78:84\r\n"
+ "a=mid:1\r\n"
+ "a=sendrecv\r\n"
+ "a=rtcp-mux\r\n"
+ "a=rtpmap:120 VP8/0\r\n"
+ "a=ssrc:2 cname:stream1\r\n"
+ "a=ssrc:2 msid:stream1 videotrack0\r\n"
+ "m=audio 1 RTP/AVPF 111\r\n"
+ "a=ice-ufrag:e5785931\r\n"
+ "a=ice-pwd:36fb7878390db89481c1d46daa4278d8\r\n"
+ "a=fingerprint:sha-256 58:AB:6E:F5:F1:E4:57:B7:E9:46:F4:86:04:28:F9:A7:ED:"
+ "BD:AB:AE:40:EF:CE:9A:51:2C:2A:B1:9B:8B:78:84\r\n"
+ "a=mid:2\r\n"
+ "a=sendrecv\r\n"
+ "a=rtcp-mux\r\n"
+ "a=rtpmap:111 OPUS/48000/2\r\n"
+ "a=ssrc:3 cname:stream2\r\n"
+ "a=ssrc:3 msid:stream2 audiotrack1\r\n"
+ "m=video 1 RTP/AVPF 120\r\n"
+ "a=ice-ufrag:e5785931\r\n"
+ "a=ice-pwd:36fb7878390db89481c1d46daa4278d8\r\n"
+ "a=fingerprint:sha-256 58:AB:6E:F5:F1:E4:57:B7:E9:46:F4:86:04:28:F9:A7:ED:"
+ "BD:AB:AE:40:EF:CE:9A:51:2C:2A:B1:9B:8B:78:84\r\n"
+ "a=mid:3\r\n"
+ "a=sendrecv\r\n"
+ "a=rtcp-mux\r\n"
+ "a=rtpmap:120 VP8/0\r\n"
+ "a=ssrc:4 cname:stream2\r\n"
+ "a=ssrc:4 msid:stream2 videotrack1\r\n";
+
+// Reference SDP without MediaStreams. Msid is not supported.
+static const char kSdpStringWithoutStreams[] =
+ "v=0\r\n"
+ "o=- 0 0 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "m=audio 1 RTP/AVPF 111\r\n"
+ "a=ice-ufrag:e5785931\r\n"
+ "a=ice-pwd:36fb7878390db89481c1d46daa4278d8\r\n"
+ "a=fingerprint:sha-256 58:AB:6E:F5:F1:E4:57:B7:E9:46:F4:86:04:28:F9:A7:ED:"
+ "BD:AB:AE:40:EF:CE:9A:51:2C:2A:B1:9B:8B:78:84\r\n"
+ "a=mid:audio\r\n"
+ "a=sendrecv\r\n"
+ "a=rtcp-mux\r\n"
+ "a=rtpmap:111 OPUS/48000/2\r\n"
+ "m=video 1 RTP/AVPF 120\r\n"
+ "a=ice-ufrag:e5785931\r\n"
+ "a=ice-pwd:36fb7878390db89481c1d46daa4278d8\r\n"
+ "a=fingerprint:sha-256 58:AB:6E:F5:F1:E4:57:B7:E9:46:F4:86:04:28:F9:A7:ED:"
+ "BD:AB:AE:40:EF:CE:9A:51:2C:2A:B1:9B:8B:78:84\r\n"
+ "a=mid:video\r\n"
+ "a=sendrecv\r\n"
+ "a=rtcp-mux\r\n"
+ "a=rtpmap:120 VP8/90000\r\n";
+
+// Reference SDP without MediaStreams. Msid is supported.
+static const char kSdpStringWithMsidWithoutStreams[] =
+ "v=0\r\n"
+ "o=- 0 0 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "a=msid-semantic: WMS\r\n"
+ "m=audio 1 RTP/AVPF 111\r\n"
+ "a=ice-ufrag:e5785931\r\n"
+ "a=ice-pwd:36fb7878390db89481c1d46daa4278d8\r\n"
+ "a=fingerprint:sha-256 58:AB:6E:F5:F1:E4:57:B7:E9:46:F4:86:04:28:F9:A7:ED:"
+ "BD:AB:AE:40:EF:CE:9A:51:2C:2A:B1:9B:8B:78:84\r\n"
+ "a=mid:audio\r\n"
+ "a=sendrecv\r\n"
+ "a=rtcp-mux\r\n"
+ "a=rtpmap:111 OPUS/48000/2\r\n"
+ "m=video 1 RTP/AVPF 120\r\n"
+ "a=ice-ufrag:e5785931\r\n"
+ "a=ice-pwd:36fb7878390db89481c1d46daa4278d8\r\n"
+ "a=fingerprint:sha-256 58:AB:6E:F5:F1:E4:57:B7:E9:46:F4:86:04:28:F9:A7:ED:"
+ "BD:AB:AE:40:EF:CE:9A:51:2C:2A:B1:9B:8B:78:84\r\n"
+ "a=mid:video\r\n"
+ "a=sendrecv\r\n"
+ "a=rtcp-mux\r\n"
+ "a=rtpmap:120 VP8/90000\r\n";
+
+// Reference SDP without MediaStreams and audio only.
+static const char kSdpStringWithoutStreamsAudioOnly[] =
+ "v=0\r\n"
+ "o=- 0 0 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "m=audio 1 RTP/AVPF 111\r\n"
+ "a=ice-ufrag:e5785931\r\n"
+ "a=ice-pwd:36fb7878390db89481c1d46daa4278d8\r\n"
+ "a=fingerprint:sha-256 58:AB:6E:F5:F1:E4:57:B7:E9:46:F4:86:04:28:F9:A7:ED:"
+ "BD:AB:AE:40:EF:CE:9A:51:2C:2A:B1:9B:8B:78:84\r\n"
+ "a=mid:audio\r\n"
+ "a=sendrecv\r\n"
+ "a=rtcp-mux\r\n"
+ "a=rtpmap:111 OPUS/48000/2\r\n";
+
+// Reference SENDONLY SDP without MediaStreams. Msid is not supported.
+static const char kSdpStringSendOnlyWithoutStreams[] =
+ "v=0\r\n"
+ "o=- 0 0 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "m=audio 1 RTP/AVPF 111\r\n"
+ "a=ice-ufrag:e5785931\r\n"
+ "a=ice-pwd:36fb7878390db89481c1d46daa4278d8\r\n"
+ "a=fingerprint:sha-256 58:AB:6E:F5:F1:E4:57:B7:E9:46:F4:86:04:28:F9:A7:ED:"
+ "BD:AB:AE:40:EF:CE:9A:51:2C:2A:B1:9B:8B:78:84\r\n"
+ "a=mid:audio\r\n"
+ "a=sendrecv\r\n"
+ "a=sendonly\r\n"
+ "a=rtcp-mux\r\n"
+ "a=rtpmap:111 OPUS/48000/2\r\n"
+ "m=video 1 RTP/AVPF 120\r\n"
+ "a=ice-ufrag:e5785931\r\n"
+ "a=ice-pwd:36fb7878390db89481c1d46daa4278d8\r\n"
+ "a=fingerprint:sha-256 58:AB:6E:F5:F1:E4:57:B7:E9:46:F4:86:04:28:F9:A7:ED:"
+ "BD:AB:AE:40:EF:CE:9A:51:2C:2A:B1:9B:8B:78:84\r\n"
+ "a=mid:video\r\n"
+ "a=sendrecv\r\n"
+ "a=sendonly\r\n"
+ "a=rtcp-mux\r\n"
+ "a=rtpmap:120 VP8/90000\r\n";
+
+static const char kSdpStringInit[] =
+ "v=0\r\n"
+ "o=- 0 0 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "a=msid-semantic: WMS\r\n";
+
+static const char kSdpStringAudio[] =
+ "m=audio 1 RTP/AVPF 111\r\n"
+ "a=ice-ufrag:e5785931\r\n"
+ "a=ice-pwd:36fb7878390db89481c1d46daa4278d8\r\n"
+ "a=fingerprint:sha-256 58:AB:6E:F5:F1:E4:57:B7:E9:46:F4:86:04:28:F9:A7:ED:"
+ "BD:AB:AE:40:EF:CE:9A:51:2C:2A:B1:9B:8B:78:84\r\n"
+ "a=mid:audio\r\n"
+ "a=sendrecv\r\n"
+ "a=rtcp-mux\r\n"
+ "a=rtpmap:111 OPUS/48000/2\r\n";
+
+static const char kSdpStringVideo[] =
+ "m=video 1 RTP/AVPF 120\r\n"
+ "a=ice-ufrag:e5785931\r\n"
+ "a=ice-pwd:36fb7878390db89481c1d46daa4278d8\r\n"
+ "a=fingerprint:sha-256 58:AB:6E:F5:F1:E4:57:B7:E9:46:F4:86:04:28:F9:A7:ED:"
+ "BD:AB:AE:40:EF:CE:9A:51:2C:2A:B1:9B:8B:78:84\r\n"
+ "a=mid:video\r\n"
+ "a=sendrecv\r\n"
+ "a=rtcp-mux\r\n"
+ "a=rtpmap:120 VP8/90000\r\n";
+
+static const char kSdpStringMs1Audio0[] =
+ "a=ssrc:1 cname:stream1\r\n"
+ "a=ssrc:1 msid:stream1 audiotrack0\r\n";
+
+static const char kSdpStringMs1Video0[] =
+ "a=ssrc:2 cname:stream1\r\n"
+ "a=ssrc:2 msid:stream1 videotrack0\r\n";
+
+static const char kSdpStringMs1Audio1[] =
+ "a=ssrc:3 cname:stream1\r\n"
+ "a=ssrc:3 msid:stream1 audiotrack1\r\n";
+
+static const char kSdpStringMs1Video1[] =
+ "a=ssrc:4 cname:stream1\r\n"
+ "a=ssrc:4 msid:stream1 videotrack1\r\n";
+
+static const char kDtlsSdesFallbackSdp[] =
+ "v=0\r\n"
+ "o=xxxxxx 7 2 IN IP4 0.0.0.0\r\n"
+ "s=-\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "t=0 0\r\n"
+ "a=group:BUNDLE audio\r\n"
+ "a=msid-semantic: WMS\r\n"
+ "m=audio 1 RTP/SAVPF 0\r\n"
+ "a=sendrecv\r\n"
+ "a=rtcp-mux\r\n"
+ "a=mid:audio\r\n"
+ "a=ssrc:1 cname:stream1\r\n"
+ "a=ice-ufrag:e5785931\r\n"
+ "a=ice-pwd:36fb7878390db89481c1d46daa4278d8\r\n"
+ "a=rtpmap:0 pcmu/8000\r\n"
+ "a=fingerprint:sha-1 "
+ "4A:AD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B:19:E5:7C:AB\r\n"
+ "a=setup:actpass\r\n"
+ "a=crypto:0 AES_CM_128_HMAC_SHA1_80 "
+ "inline:NzB4d1BINUAvLEw6UzF3WSJ+PSdFcGdUJShpX1Zj|2^20|1:32 "
+ "dummy_session_params\r\n";
+
+class RtcEventLogOutputNull final : public RtcEventLogOutput {
+ public:
+ bool IsActive() const override { return true; }
+ bool Write(const absl::string_view /*output*/) override { return true; }
+};
+
+using ::cricket::StreamParams;
+using ::testing::Eq;
+using ::testing::Exactly;
+using ::testing::SizeIs;
+using ::testing::Values;
+
+using RTCConfiguration = PeerConnectionInterface::RTCConfiguration;
+using RTCOfferAnswerOptions = PeerConnectionInterface::RTCOfferAnswerOptions;
+
+// Gets the first ssrc of given content type from the ContentInfo.
+bool GetFirstSsrc(const cricket::ContentInfo* content_info, int* ssrc) {
+ if (!content_info || !ssrc) {
+ return false;
+ }
+ const cricket::MediaContentDescription* media_desc =
+ content_info->media_description();
+ if (!media_desc || media_desc->streams().empty()) {
+ return false;
+ }
+ *ssrc = media_desc->streams().begin()->first_ssrc();
+ return true;
+}
+
+// Get the ufrags out of an SDP blob. Useful for testing ICE restart
+// behavior.
+std::vector<std::string> GetUfrags(
+ const webrtc::SessionDescriptionInterface* desc) {
+ std::vector<std::string> ufrags;
+ for (const cricket::TransportInfo& info :
+ desc->description()->transport_infos()) {
+ ufrags.push_back(info.description.ice_ufrag);
+ }
+ return ufrags;
+}
+
+void SetSsrcToZero(std::string* sdp) {
+ const char kSdpSsrcAtribute[] = "a=ssrc:";
+ const char kSdpSsrcAtributeZero[] = "a=ssrc:0";
+ size_t ssrc_pos = 0;
+ while ((ssrc_pos = sdp->find(kSdpSsrcAtribute, ssrc_pos)) !=
+ std::string::npos) {
+ size_t end_ssrc = sdp->find(" ", ssrc_pos);
+ sdp->replace(ssrc_pos, end_ssrc - ssrc_pos, kSdpSsrcAtributeZero);
+ ssrc_pos = end_ssrc;
+ }
+}
+
+// Check if `streams` contains the specified track.
+bool ContainsTrack(const std::vector<cricket::StreamParams>& streams,
+ const std::string& stream_id,
+ const std::string& track_id) {
+ for (const cricket::StreamParams& params : streams) {
+ if (params.first_stream_id() == stream_id && params.id == track_id) {
+ return true;
+ }
+ }
+ return false;
+}
+
+// Check if `senders` contains the specified sender, by id.
+bool ContainsSender(
+ const std::vector<rtc::scoped_refptr<RtpSenderInterface>>& senders,
+ const std::string& id) {
+ for (const auto& sender : senders) {
+ if (sender->id() == id) {
+ return true;
+ }
+ }
+ return false;
+}
+
+// Check if `senders` contains the specified sender, by id and stream id.
+bool ContainsSender(
+ const std::vector<rtc::scoped_refptr<RtpSenderInterface>>& senders,
+ const std::string& id,
+ const std::string& stream_id) {
+ for (const auto& sender : senders) {
+ if (sender->id() == id && sender->stream_ids()[0] == stream_id) {
+ return true;
+ }
+ }
+ return false;
+}
+
+// Create a collection of streams.
+// CreateStreamCollection(1) creates a collection that
+// correspond to kSdpStringWithStream1.
+// CreateStreamCollection(2) correspond to kSdpStringWithStream1And2.
+rtc::scoped_refptr<StreamCollection> CreateStreamCollection(
+ int number_of_streams,
+ int tracks_per_stream) {
+ rtc::scoped_refptr<StreamCollection> local_collection(
+ StreamCollection::Create());
+
+ for (int i = 0; i < number_of_streams; ++i) {
+ rtc::scoped_refptr<webrtc::MediaStreamInterface> stream(
+ webrtc::MediaStream::Create(kStreams[i]));
+
+ for (int j = 0; j < tracks_per_stream; ++j) {
+ // Add a local audio track.
+ rtc::scoped_refptr<webrtc::AudioTrackInterface> audio_track(
+ webrtc::AudioTrack::Create(kAudioTracks[i * tracks_per_stream + j],
+ nullptr));
+ stream->AddTrack(audio_track);
+
+ // Add a local video track.
+ rtc::scoped_refptr<webrtc::VideoTrackInterface> video_track(
+ webrtc::VideoTrack::Create(kVideoTracks[i * tracks_per_stream + j],
+ webrtc::FakeVideoTrackSource::Create(),
+ rtc::Thread::Current()));
+ stream->AddTrack(video_track);
+ }
+
+ local_collection->AddStream(stream);
+ }
+ return local_collection;
+}
+
+// Check equality of StreamCollections.
+bool CompareStreamCollections(StreamCollectionInterface* s1,
+ StreamCollectionInterface* s2) {
+ if (s1 == nullptr || s2 == nullptr || s1->count() != s2->count()) {
+ return false;
+ }
+
+ for (size_t i = 0; i != s1->count(); ++i) {
+ if (s1->at(i)->id() != s2->at(i)->id()) {
+ return false;
+ }
+ webrtc::AudioTrackVector audio_tracks1 = s1->at(i)->GetAudioTracks();
+ webrtc::AudioTrackVector audio_tracks2 = s2->at(i)->GetAudioTracks();
+ webrtc::VideoTrackVector video_tracks1 = s1->at(i)->GetVideoTracks();
+ webrtc::VideoTrackVector video_tracks2 = s2->at(i)->GetVideoTracks();
+
+ if (audio_tracks1.size() != audio_tracks2.size()) {
+ return false;
+ }
+ for (size_t j = 0; j != audio_tracks1.size(); ++j) {
+ if (audio_tracks1[j]->id() != audio_tracks2[j]->id()) {
+ return false;
+ }
+ }
+ if (video_tracks1.size() != video_tracks2.size()) {
+ return false;
+ }
+ for (size_t j = 0; j != video_tracks1.size(); ++j) {
+ if (video_tracks1[j]->id() != video_tracks2[j]->id()) {
+ return false;
+ }
+ }
+ }
+ return true;
+}
+
+// Helper class to test Observer.
+class MockTrackObserver : public ObserverInterface {
+ public:
+ explicit MockTrackObserver(NotifierInterface* notifier)
+ : notifier_(notifier) {
+ notifier_->RegisterObserver(this);
+ }
+
+ ~MockTrackObserver() { Unregister(); }
+
+ void Unregister() {
+ if (notifier_) {
+ notifier_->UnregisterObserver(this);
+ notifier_ = nullptr;
+ }
+ }
+
+ MOCK_METHOD(void, OnChanged, (), (override));
+
+ private:
+ NotifierInterface* notifier_;
+};
+
+// The PeerConnectionMediaConfig tests below verify that configuration and
+// constraints are propagated into the PeerConnection's MediaConfig. These
+// settings are intended for MediaChannel constructors, but that is not
+// exercised by these unittest.
+class PeerConnectionFactoryForTest : public webrtc::PeerConnectionFactory {
+ public:
+ static rtc::scoped_refptr<PeerConnectionFactoryForTest>
+ CreatePeerConnectionFactoryForTest() {
+ PeerConnectionFactoryDependencies dependencies;
+ dependencies.worker_thread = rtc::Thread::Current();
+ dependencies.network_thread = rtc::Thread::Current();
+ dependencies.signaling_thread = rtc::Thread::Current();
+ dependencies.task_queue_factory = CreateDefaultTaskQueueFactory();
+ dependencies.trials = std::make_unique<FieldTrialBasedConfig>();
+ cricket::MediaEngineDependencies media_deps;
+ media_deps.task_queue_factory = dependencies.task_queue_factory.get();
+ // Use fake audio device module since we're only testing the interface
+ // level, and using a real one could make tests flaky when run in parallel.
+ media_deps.adm = FakeAudioCaptureModule::Create();
+ SetMediaEngineDefaults(&media_deps);
+ media_deps.trials = dependencies.trials.get();
+ dependencies.media_engine =
+ cricket::CreateMediaEngine(std::move(media_deps));
+ dependencies.call_factory = webrtc::CreateCallFactory();
+ dependencies.event_log_factory = std::make_unique<RtcEventLogFactory>(
+ dependencies.task_queue_factory.get());
+
+ return rtc::make_ref_counted<PeerConnectionFactoryForTest>(
+ std::move(dependencies));
+ }
+
+ using PeerConnectionFactory::PeerConnectionFactory;
+
+ private:
+ rtc::scoped_refptr<FakeAudioCaptureModule> fake_audio_capture_module_;
+};
+
+// TODO(steveanton): Convert to use the new PeerConnectionWrapper.
+class PeerConnectionInterfaceBaseTest : public ::testing::Test {
+ protected:
+ explicit PeerConnectionInterfaceBaseTest(SdpSemantics sdp_semantics)
+ : vss_(new rtc::VirtualSocketServer()),
+ main_(vss_.get()),
+ sdp_semantics_(sdp_semantics) {
+#ifdef WEBRTC_ANDROID
+ webrtc::InitializeAndroidObjects();
+#endif
+ }
+
+ void SetUp() override {
+ // Use fake audio capture module since we're only testing the interface
+ // level, and using a real one could make tests flaky when run in parallel.
+ fake_audio_capture_module_ = FakeAudioCaptureModule::Create();
+ pc_factory_ = webrtc::CreatePeerConnectionFactory(
+ rtc::Thread::Current(), rtc::Thread::Current(), rtc::Thread::Current(),
+ rtc::scoped_refptr<webrtc::AudioDeviceModule>(
+ fake_audio_capture_module_),
+ webrtc::CreateBuiltinAudioEncoderFactory(),
+ webrtc::CreateBuiltinAudioDecoderFactory(),
+ std::make_unique<webrtc::VideoEncoderFactoryTemplate<
+ webrtc::LibvpxVp8EncoderTemplateAdapter,
+ webrtc::LibvpxVp9EncoderTemplateAdapter,
+ webrtc::OpenH264EncoderTemplateAdapter,
+ webrtc::LibaomAv1EncoderTemplateAdapter>>(),
+ std::make_unique<webrtc::VideoDecoderFactoryTemplate<
+ webrtc::LibvpxVp8DecoderTemplateAdapter,
+ webrtc::LibvpxVp9DecoderTemplateAdapter,
+ webrtc::OpenH264DecoderTemplateAdapter,
+ webrtc::Dav1dDecoderTemplateAdapter>>(),
+ nullptr /* audio_mixer */, nullptr /* audio_processing */);
+ ASSERT_TRUE(pc_factory_);
+ }
+
+ void TearDown() override {
+ if (pc_)
+ pc_->Close();
+ }
+
+ void CreatePeerConnection() {
+ CreatePeerConnection(PeerConnectionInterface::RTCConfiguration());
+ }
+
+ // DTLS does not work in a loopback call, so is disabled for many
+ // tests in this file.
+ void CreatePeerConnectionWithoutDtls() {
+ RTCConfiguration config;
+ PeerConnectionFactoryInterface::Options options;
+ options.disable_encryption = true;
+ pc_factory_->SetOptions(options);
+ CreatePeerConnection(config);
+ options.disable_encryption = false;
+ pc_factory_->SetOptions(options);
+ }
+
+ void CreatePeerConnectionWithIceTransportsType(
+ PeerConnectionInterface::IceTransportsType type) {
+ PeerConnectionInterface::RTCConfiguration config;
+ config.type = type;
+ return CreatePeerConnection(config);
+ }
+
+ void CreatePeerConnectionWithIceServer(const std::string& uri,
+ const std::string& username,
+ const std::string& password) {
+ PeerConnectionInterface::RTCConfiguration config;
+ PeerConnectionInterface::IceServer server;
+ server.uri = uri;
+ server.username = username;
+ server.password = password;
+ config.servers.push_back(server);
+ CreatePeerConnection(config);
+ }
+
+ void CreatePeerConnection(const RTCConfiguration& config) {
+ if (pc_) {
+ pc_->Close();
+ pc_ = nullptr;
+ }
+ std::unique_ptr<cricket::FakePortAllocator> port_allocator(
+ new cricket::FakePortAllocator(
+ rtc::Thread::Current(),
+ std::make_unique<rtc::BasicPacketSocketFactory>(vss_.get()),
+ &field_trials_));
+ port_allocator_ = port_allocator.get();
+ port_allocator_->SetIceTiebreaker(kTiebreakerDefault);
+
+ // Create certificate generator unless DTLS constraint is explicitly set to
+ // false.
+ std::unique_ptr<rtc::RTCCertificateGeneratorInterface> cert_generator;
+
+ // These won't be used if encryption is turned off, but that's harmless.
+ fake_certificate_generator_ = new FakeRTCCertificateGenerator();
+ cert_generator.reset(fake_certificate_generator_);
+
+ RTCConfiguration modified_config = config;
+ modified_config.sdp_semantics = sdp_semantics_;
+ PeerConnectionDependencies pc_dependencies(&observer_);
+ pc_dependencies.cert_generator = std::move(cert_generator);
+ pc_dependencies.allocator = std::move(port_allocator);
+ auto result = pc_factory_->CreatePeerConnectionOrError(
+ modified_config, std::move(pc_dependencies));
+ ASSERT_TRUE(result.ok());
+ pc_ = result.MoveValue();
+ observer_.SetPeerConnectionInterface(pc_.get());
+ EXPECT_EQ(PeerConnectionInterface::kStable, observer_.state_);
+ }
+
+ void CreatePeerConnectionExpectFail(const std::string& uri) {
+ PeerConnectionInterface::RTCConfiguration config;
+ PeerConnectionInterface::IceServer server;
+ server.uri = uri;
+ config.servers.push_back(server);
+ config.sdp_semantics = sdp_semantics_;
+ PeerConnectionDependencies pc_dependencies(&observer_);
+ auto result = pc_factory_->CreatePeerConnectionOrError(
+ config, std::move(pc_dependencies));
+ EXPECT_FALSE(result.ok());
+ }
+
+ void CreatePeerConnectionExpectFail(
+ PeerConnectionInterface::RTCConfiguration config) {
+ PeerConnectionInterface::IceServer server;
+ server.uri = kTurnIceServerUri;
+ server.password = kTurnPassword;
+ config.servers.push_back(server);
+ config.sdp_semantics = sdp_semantics_;
+ PeerConnectionDependencies pc_dependencies(&observer_);
+ auto result = pc_factory_->CreatePeerConnectionOrError(
+ config, std::move(pc_dependencies));
+ EXPECT_FALSE(result.ok());
+ }
+
+ void CreatePeerConnectionWithDifferentConfigurations() {
+ CreatePeerConnectionWithIceServer(kStunAddressOnly, "", "");
+ EXPECT_EQ(1u, port_allocator_->stun_servers().size());
+ EXPECT_EQ(0u, port_allocator_->turn_servers().size());
+ EXPECT_EQ("address", port_allocator_->stun_servers().begin()->hostname());
+ EXPECT_EQ(kDefaultStunPort,
+ port_allocator_->stun_servers().begin()->port());
+
+ CreatePeerConnectionExpectFail(kStunInvalidPort);
+ CreatePeerConnectionExpectFail(kStunAddressPortAndMore1);
+ CreatePeerConnectionExpectFail(kStunAddressPortAndMore2);
+
+ CreatePeerConnectionWithIceServer(kTurnIceServerUri, kTurnUsername,
+ kTurnPassword);
+ EXPECT_EQ(0u, port_allocator_->stun_servers().size());
+ EXPECT_EQ(1u, port_allocator_->turn_servers().size());
+ EXPECT_EQ(kTurnUsername,
+ port_allocator_->turn_servers()[0].credentials.username);
+ EXPECT_EQ(kTurnPassword,
+ port_allocator_->turn_servers()[0].credentials.password);
+ EXPECT_EQ(kTurnHostname,
+ port_allocator_->turn_servers()[0].ports[0].address.hostname());
+ }
+
+ void ReleasePeerConnection() {
+ pc_ = nullptr;
+ observer_.SetPeerConnectionInterface(nullptr);
+ }
+
+ rtc::scoped_refptr<VideoTrackInterface> CreateVideoTrack(
+ const std::string& label) {
+ return pc_factory_->CreateVideoTrack(FakeVideoTrackSource::Create(), label);
+ }
+
+ void AddVideoTrack(const std::string& track_label,
+ const std::vector<std::string>& stream_ids = {}) {
+ auto sender_or_error =
+ pc_->AddTrack(CreateVideoTrack(track_label), stream_ids);
+ ASSERT_EQ(RTCErrorType::NONE, sender_or_error.error().type());
+ }
+
+ void AddVideoStream(const std::string& label) {
+ rtc::scoped_refptr<MediaStreamInterface> stream(
+ pc_factory_->CreateLocalMediaStream(label));
+ stream->AddTrack(CreateVideoTrack(label + "v0"));
+ ASSERT_TRUE(pc_->AddStream(stream.get()));
+ }
+
+ rtc::scoped_refptr<AudioTrackInterface> CreateAudioTrack(
+ const std::string& label) {
+ return pc_factory_->CreateAudioTrack(label, nullptr);
+ }
+
+ void AddAudioTrack(const std::string& track_label,
+ const std::vector<std::string>& stream_ids = {}) {
+ auto sender_or_error =
+ pc_->AddTrack(CreateAudioTrack(track_label), stream_ids);
+ ASSERT_EQ(RTCErrorType::NONE, sender_or_error.error().type());
+ }
+
+ void AddAudioStream(const std::string& label) {
+ rtc::scoped_refptr<MediaStreamInterface> stream(
+ pc_factory_->CreateLocalMediaStream(label));
+ stream->AddTrack(CreateAudioTrack(label + "a0"));
+ ASSERT_TRUE(pc_->AddStream(stream.get()));
+ }
+
+ void AddAudioVideoStream(const std::string& stream_id,
+ const std::string& audio_track_label,
+ const std::string& video_track_label) {
+ // Create a local stream.
+ rtc::scoped_refptr<MediaStreamInterface> stream(
+ pc_factory_->CreateLocalMediaStream(stream_id));
+ stream->AddTrack(CreateAudioTrack(audio_track_label));
+ stream->AddTrack(CreateVideoTrack(video_track_label));
+ ASSERT_TRUE(pc_->AddStream(stream.get()));
+ }
+
+ rtc::scoped_refptr<RtpReceiverInterface> GetFirstReceiverOfType(
+ cricket::MediaType media_type) {
+ for (auto receiver : pc_->GetReceivers()) {
+ if (receiver->media_type() == media_type) {
+ return receiver;
+ }
+ }
+ return nullptr;
+ }
+
+ bool DoCreateOfferAnswer(std::unique_ptr<SessionDescriptionInterface>* desc,
+ const RTCOfferAnswerOptions* options,
+ bool offer) {
+ auto observer =
+ rtc::make_ref_counted<MockCreateSessionDescriptionObserver>();
+ if (offer) {
+ pc_->CreateOffer(observer.get(),
+ options ? *options : RTCOfferAnswerOptions());
+ } else {
+ pc_->CreateAnswer(observer.get(),
+ options ? *options : RTCOfferAnswerOptions());
+ }
+ EXPECT_EQ_WAIT(true, observer->called(), kTimeout);
+ *desc = observer->MoveDescription();
+ return observer->result();
+ }
+
+ bool DoCreateOffer(std::unique_ptr<SessionDescriptionInterface>* desc,
+ const RTCOfferAnswerOptions* options) {
+ return DoCreateOfferAnswer(desc, options, true);
+ }
+
+ bool DoCreateAnswer(std::unique_ptr<SessionDescriptionInterface>* desc,
+ const RTCOfferAnswerOptions* options) {
+ return DoCreateOfferAnswer(desc, options, false);
+ }
+
+ bool DoSetSessionDescription(
+ std::unique_ptr<SessionDescriptionInterface> desc,
+ bool local) {
+ auto observer = rtc::make_ref_counted<MockSetSessionDescriptionObserver>();
+ if (local) {
+ pc_->SetLocalDescription(observer.get(), desc.release());
+ } else {
+ pc_->SetRemoteDescription(observer.get(), desc.release());
+ }
+ if (pc_->signaling_state() != PeerConnectionInterface::kClosed) {
+ EXPECT_EQ_WAIT(true, observer->called(), kTimeout);
+ }
+ return observer->result();
+ }
+
+ bool DoSetLocalDescription(
+ std::unique_ptr<SessionDescriptionInterface> desc) {
+ return DoSetSessionDescription(std::move(desc), true);
+ }
+
+ bool DoSetRemoteDescription(
+ std::unique_ptr<SessionDescriptionInterface> desc) {
+ return DoSetSessionDescription(std::move(desc), false);
+ }
+
+ // Calls PeerConnection::GetStats and check the return value.
+ // It does not verify the values in the StatReports since a RTCP packet might
+ // be required.
+ bool DoGetStats(MediaStreamTrackInterface* track) {
+ auto observer = rtc::make_ref_counted<MockStatsObserver>();
+ if (!pc_->GetStats(observer.get(), track,
+ PeerConnectionInterface::kStatsOutputLevelStandard))
+ return false;
+ EXPECT_TRUE_WAIT(observer->called(), kTimeout);
+ return observer->called();
+ }
+
+ // Call the standards-compliant GetStats function.
+ bool DoGetRTCStats() {
+ auto callback =
+ rtc::make_ref_counted<webrtc::MockRTCStatsCollectorCallback>();
+ pc_->GetStats(callback.get());
+ EXPECT_TRUE_WAIT(callback->called(), kTimeout);
+ return callback->called();
+ }
+
+ void InitiateCall() {
+ CreatePeerConnectionWithoutDtls();
+ // Create a local stream with audio&video tracks.
+ if (sdp_semantics_ == SdpSemantics::kPlanB_DEPRECATED) {
+ AddAudioVideoStream(kStreamId1, "audio_track", "video_track");
+ } else {
+ // Unified Plan does not support AddStream, so just add an audio and video
+ // track.
+ AddAudioTrack(kAudioTracks[0], {kStreamId1});
+ AddVideoTrack(kVideoTracks[0], {kStreamId1});
+ }
+ CreateOfferReceiveAnswer();
+ }
+
+ // Verify that RTP Header extensions has been negotiated for audio and video.
+ void VerifyRemoteRtpHeaderExtensions() {
+ const cricket::MediaContentDescription* desc =
+ cricket::GetFirstAudioContentDescription(
+ pc_->remote_description()->description());
+ ASSERT_TRUE(desc != nullptr);
+ EXPECT_GT(desc->rtp_header_extensions().size(), 0u);
+
+ desc = cricket::GetFirstVideoContentDescription(
+ pc_->remote_description()->description());
+ ASSERT_TRUE(desc != nullptr);
+ EXPECT_GT(desc->rtp_header_extensions().size(), 0u);
+ }
+
+ void CreateOfferAsRemoteDescription() {
+ std::unique_ptr<SessionDescriptionInterface> offer;
+ ASSERT_TRUE(DoCreateOffer(&offer, nullptr));
+ std::string sdp;
+ EXPECT_TRUE(offer->ToString(&sdp));
+ std::unique_ptr<SessionDescriptionInterface> remote_offer(
+ webrtc::CreateSessionDescription(SdpType::kOffer, sdp));
+ EXPECT_TRUE(DoSetRemoteDescription(std::move(remote_offer)));
+ EXPECT_EQ(PeerConnectionInterface::kHaveRemoteOffer, observer_.state_);
+ }
+
+ void CreateAndSetRemoteOffer(const std::string& sdp) {
+ std::unique_ptr<SessionDescriptionInterface> remote_offer(
+ webrtc::CreateSessionDescription(SdpType::kOffer, sdp));
+ EXPECT_TRUE(DoSetRemoteDescription(std::move(remote_offer)));
+ EXPECT_EQ(PeerConnectionInterface::kHaveRemoteOffer, observer_.state_);
+ }
+
+ void CreateAnswerAsLocalDescription() {
+ std::unique_ptr<SessionDescriptionInterface> answer;
+ ASSERT_TRUE(DoCreateAnswer(&answer, nullptr));
+
+ // TODO(perkj): Currently SetLocalDescription fails if any parameters in an
+ // audio codec change, even if the parameter has nothing to do with
+ // receiving. Not all parameters are serialized to SDP.
+ // Since CreatePrAnswerAsLocalDescription serialize/deserialize
+ // the SessionDescription, it is necessary to do that here to in order to
+ // get ReceiveOfferCreatePrAnswerAndAnswer and RenegotiateAudioOnly to pass.
+ // https://code.google.com/p/webrtc/issues/detail?id=1356
+ std::string sdp;
+ EXPECT_TRUE(answer->ToString(&sdp));
+ std::unique_ptr<SessionDescriptionInterface> new_answer(
+ webrtc::CreateSessionDescription(SdpType::kAnswer, sdp));
+ EXPECT_TRUE(DoSetLocalDescription(std::move(new_answer)));
+ EXPECT_EQ(PeerConnectionInterface::kStable, observer_.state_);
+ }
+
+ void CreatePrAnswerAsLocalDescription() {
+ std::unique_ptr<SessionDescriptionInterface> answer;
+ ASSERT_TRUE(DoCreateAnswer(&answer, nullptr));
+
+ std::string sdp;
+ EXPECT_TRUE(answer->ToString(&sdp));
+ std::unique_ptr<SessionDescriptionInterface> pr_answer(
+ webrtc::CreateSessionDescription(SdpType::kPrAnswer, sdp));
+ EXPECT_TRUE(DoSetLocalDescription(std::move(pr_answer)));
+ EXPECT_EQ(PeerConnectionInterface::kHaveLocalPrAnswer, observer_.state_);
+ }
+
+ void CreateOfferReceiveAnswer() {
+ CreateOfferAsLocalDescription();
+ std::string sdp;
+ EXPECT_TRUE(pc_->local_description()->ToString(&sdp));
+ CreateAnswerAsRemoteDescription(sdp);
+ }
+
+ void CreateOfferAsLocalDescription() {
+ std::unique_ptr<SessionDescriptionInterface> offer;
+ ASSERT_TRUE(DoCreateOffer(&offer, nullptr));
+ // TODO(perkj): Currently SetLocalDescription fails if any parameters in an
+ // audio codec change, even if the parameter has nothing to do with
+ // receiving. Not all parameters are serialized to SDP.
+ // Since CreatePrAnswerAsLocalDescription serialize/deserialize
+ // the SessionDescription, it is necessary to do that here to in order to
+ // get ReceiveOfferCreatePrAnswerAndAnswer and RenegotiateAudioOnly to pass.
+ // https://code.google.com/p/webrtc/issues/detail?id=1356
+ std::string sdp;
+ EXPECT_TRUE(offer->ToString(&sdp));
+ std::unique_ptr<SessionDescriptionInterface> new_offer(
+ webrtc::CreateSessionDescription(SdpType::kOffer, sdp));
+
+ EXPECT_TRUE(DoSetLocalDescription(std::move(new_offer)));
+ EXPECT_EQ(PeerConnectionInterface::kHaveLocalOffer, observer_.state_);
+ // Wait for the ice_complete message, so that SDP will have candidates.
+ EXPECT_TRUE_WAIT(observer_.ice_gathering_complete_, kTimeout);
+ }
+
+ void CreateAnswerAsRemoteDescription(const std::string& sdp) {
+ std::unique_ptr<SessionDescriptionInterface> answer(
+ webrtc::CreateSessionDescription(SdpType::kAnswer, sdp));
+ ASSERT_TRUE(answer);
+ EXPECT_TRUE(DoSetRemoteDescription(std::move(answer)));
+ EXPECT_EQ(PeerConnectionInterface::kStable, observer_.state_);
+ }
+
+ void CreatePrAnswerAndAnswerAsRemoteDescription(const std::string& sdp) {
+ std::unique_ptr<SessionDescriptionInterface> pr_answer(
+ webrtc::CreateSessionDescription(SdpType::kPrAnswer, sdp));
+ ASSERT_TRUE(pr_answer);
+ EXPECT_TRUE(DoSetRemoteDescription(std::move(pr_answer)));
+ EXPECT_EQ(PeerConnectionInterface::kHaveRemotePrAnswer, observer_.state_);
+ std::unique_ptr<SessionDescriptionInterface> answer(
+ webrtc::CreateSessionDescription(SdpType::kAnswer, sdp));
+ ASSERT_TRUE(answer);
+ EXPECT_TRUE(DoSetRemoteDescription(std::move(answer)));
+ EXPECT_EQ(PeerConnectionInterface::kStable, observer_.state_);
+ }
+
+ // Waits until a remote stream with the given id is signaled. This helper
+ // function will verify both OnAddTrack and OnAddStream (Plan B only) are
+ // called with the given stream id and expected number of tracks.
+ void WaitAndVerifyOnAddStream(const std::string& stream_id,
+ int expected_num_tracks) {
+ // Verify that both OnAddStream and OnAddTrack are called.
+ EXPECT_EQ_WAIT(stream_id, observer_.GetLastAddedStreamId(), kTimeout);
+ EXPECT_EQ_WAIT(expected_num_tracks,
+ observer_.CountAddTrackEventsForStream(stream_id), kTimeout);
+ }
+
+ // Creates an offer and applies it as a local session description.
+ // Creates an answer with the same SDP an the offer but removes all lines
+ // that start with a:ssrc"
+ void CreateOfferReceiveAnswerWithoutSsrc() {
+ CreateOfferAsLocalDescription();
+ std::string sdp;
+ EXPECT_TRUE(pc_->local_description()->ToString(&sdp));
+ SetSsrcToZero(&sdp);
+ CreateAnswerAsRemoteDescription(sdp);
+ }
+
+ // This function creates a MediaStream with label kStreams[0] and
+ // `number_of_audio_tracks` and `number_of_video_tracks` tracks and the
+ // corresponding SessionDescriptionInterface. The SessionDescriptionInterface
+ // is returned and the MediaStream is stored in
+ // `reference_collection_`
+ std::unique_ptr<SessionDescriptionInterface>
+ CreateSessionDescriptionAndReference(size_t number_of_audio_tracks,
+ size_t number_of_video_tracks) {
+ EXPECT_LE(number_of_audio_tracks, 2u);
+ EXPECT_LE(number_of_video_tracks, 2u);
+
+ reference_collection_ = StreamCollection::Create();
+ std::string sdp_ms1 = std::string(kSdpStringInit);
+
+ std::string mediastream_id = kStreams[0];
+
+ rtc::scoped_refptr<webrtc::MediaStreamInterface> stream(
+ webrtc::MediaStream::Create(mediastream_id));
+ reference_collection_->AddStream(stream);
+
+ if (number_of_audio_tracks > 0) {
+ sdp_ms1 += std::string(kSdpStringAudio);
+ sdp_ms1 += std::string(kSdpStringMs1Audio0);
+ AddAudioTrack(kAudioTracks[0], stream.get());
+ }
+ if (number_of_audio_tracks > 1) {
+ sdp_ms1 += kSdpStringMs1Audio1;
+ AddAudioTrack(kAudioTracks[1], stream.get());
+ }
+
+ if (number_of_video_tracks > 0) {
+ sdp_ms1 += std::string(kSdpStringVideo);
+ sdp_ms1 += std::string(kSdpStringMs1Video0);
+ AddVideoTrack(kVideoTracks[0], stream.get());
+ }
+ if (number_of_video_tracks > 1) {
+ sdp_ms1 += kSdpStringMs1Video1;
+ AddVideoTrack(kVideoTracks[1], stream.get());
+ }
+
+ return std::unique_ptr<SessionDescriptionInterface>(
+ webrtc::CreateSessionDescription(SdpType::kOffer, sdp_ms1));
+ }
+
+ void AddAudioTrack(const std::string& track_id,
+ MediaStreamInterface* stream) {
+ rtc::scoped_refptr<webrtc::AudioTrackInterface> audio_track(
+ webrtc::AudioTrack::Create(track_id, nullptr));
+ ASSERT_TRUE(stream->AddTrack(audio_track));
+ }
+
+ void AddVideoTrack(const std::string& track_id,
+ MediaStreamInterface* stream) {
+ rtc::scoped_refptr<webrtc::VideoTrackInterface> video_track(
+ webrtc::VideoTrack::Create(track_id,
+ webrtc::FakeVideoTrackSource::Create(),
+ rtc::Thread::Current()));
+ ASSERT_TRUE(stream->AddTrack(video_track));
+ }
+
+ std::unique_ptr<SessionDescriptionInterface> CreateOfferWithOneAudioTrack() {
+ CreatePeerConnectionWithoutDtls();
+ AddAudioTrack(kAudioTracks[0]);
+ std::unique_ptr<SessionDescriptionInterface> offer;
+ EXPECT_TRUE(DoCreateOffer(&offer, nullptr));
+ return offer;
+ }
+
+ std::unique_ptr<SessionDescriptionInterface> CreateOfferWithOneAudioStream() {
+ CreatePeerConnectionWithoutDtls();
+ AddAudioStream(kStreamId1);
+ std::unique_ptr<SessionDescriptionInterface> offer;
+ EXPECT_TRUE(DoCreateOffer(&offer, nullptr));
+ return offer;
+ }
+
+ std::unique_ptr<SessionDescriptionInterface> CreateAnswerWithOneAudioTrack() {
+ EXPECT_TRUE(DoSetRemoteDescription(CreateOfferWithOneAudioTrack()));
+ std::unique_ptr<SessionDescriptionInterface> answer;
+ EXPECT_TRUE(DoCreateAnswer(&answer, nullptr));
+ return answer;
+ }
+
+ std::unique_ptr<SessionDescriptionInterface>
+ CreateAnswerWithOneAudioStream() {
+ EXPECT_TRUE(DoSetRemoteDescription(CreateOfferWithOneAudioStream()));
+ std::unique_ptr<SessionDescriptionInterface> answer;
+ EXPECT_TRUE(DoCreateAnswer(&answer, nullptr));
+ return answer;
+ }
+
+ const std::string& GetFirstAudioStreamCname(
+ const SessionDescriptionInterface* desc) {
+ const cricket::AudioContentDescription* audio_desc =
+ cricket::GetFirstAudioContentDescription(desc->description());
+ return audio_desc->streams()[0].cname;
+ }
+
+ std::unique_ptr<SessionDescriptionInterface> CreateOfferWithOptions(
+ const RTCOfferAnswerOptions& offer_answer_options) {
+ RTC_DCHECK(pc_);
+ auto observer =
+ rtc::make_ref_counted<MockCreateSessionDescriptionObserver>();
+ pc_->CreateOffer(observer.get(), offer_answer_options);
+ EXPECT_EQ_WAIT(true, observer->called(), kTimeout);
+ return observer->MoveDescription();
+ }
+
+ void CreateOfferWithOptionsAsRemoteDescription(
+ std::unique_ptr<SessionDescriptionInterface>* desc,
+ const RTCOfferAnswerOptions& offer_answer_options) {
+ *desc = CreateOfferWithOptions(offer_answer_options);
+ ASSERT_TRUE(desc != nullptr);
+ std::string sdp;
+ EXPECT_TRUE((*desc)->ToString(&sdp));
+ std::unique_ptr<SessionDescriptionInterface> remote_offer(
+ webrtc::CreateSessionDescription(SdpType::kOffer, sdp));
+ EXPECT_TRUE(DoSetRemoteDescription(std::move(remote_offer)));
+ EXPECT_EQ(PeerConnectionInterface::kHaveRemoteOffer, observer_.state_);
+ }
+
+ void CreateOfferWithOptionsAsLocalDescription(
+ std::unique_ptr<SessionDescriptionInterface>* desc,
+ const RTCOfferAnswerOptions& offer_answer_options) {
+ *desc = CreateOfferWithOptions(offer_answer_options);
+ ASSERT_TRUE(desc != nullptr);
+ std::string sdp;
+ EXPECT_TRUE((*desc)->ToString(&sdp));
+ std::unique_ptr<SessionDescriptionInterface> new_offer(
+ webrtc::CreateSessionDescription(SdpType::kOffer, sdp));
+
+ EXPECT_TRUE(DoSetLocalDescription(std::move(new_offer)));
+ EXPECT_EQ(PeerConnectionInterface::kHaveLocalOffer, observer_.state_);
+ }
+
+ bool HasCNCodecs(const cricket::ContentInfo* content) {
+ RTC_DCHECK(content);
+ RTC_DCHECK(content->media_description());
+ for (const cricket::AudioCodec& codec :
+ content->media_description()->as_audio()->codecs()) {
+ if (codec.name == "CN") {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ const char* GetSdpStringWithStream1() const {
+ if (sdp_semantics_ == SdpSemantics::kPlanB_DEPRECATED) {
+ return kSdpStringWithStream1PlanB;
+ } else {
+ return kSdpStringWithStream1UnifiedPlan;
+ }
+ }
+
+ const char* GetSdpStringWithStream1And2() const {
+ if (sdp_semantics_ == SdpSemantics::kPlanB_DEPRECATED) {
+ return kSdpStringWithStream1And2PlanB;
+ } else {
+ return kSdpStringWithStream1And2UnifiedPlan;
+ }
+ }
+
+ rtc::SocketServer* socket_server() const { return vss_.get(); }
+
+ webrtc::test::ScopedKeyValueConfig field_trials_;
+ std::unique_ptr<rtc::VirtualSocketServer> vss_;
+ rtc::AutoSocketServerThread main_;
+ rtc::scoped_refptr<FakeAudioCaptureModule> fake_audio_capture_module_;
+ cricket::FakePortAllocator* port_allocator_ = nullptr;
+ FakeRTCCertificateGenerator* fake_certificate_generator_ = nullptr;
+ rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface> pc_factory_;
+ rtc::scoped_refptr<PeerConnectionInterface> pc_;
+ MockPeerConnectionObserver observer_;
+ rtc::scoped_refptr<StreamCollection> reference_collection_;
+ const SdpSemantics sdp_semantics_;
+};
+
+class PeerConnectionInterfaceTest
+ : public PeerConnectionInterfaceBaseTest,
+ public ::testing::WithParamInterface<SdpSemantics> {
+ protected:
+ PeerConnectionInterfaceTest() : PeerConnectionInterfaceBaseTest(GetParam()) {}
+};
+
+class PeerConnectionInterfaceTestPlanB
+ : public PeerConnectionInterfaceBaseTest {
+ protected:
+ PeerConnectionInterfaceTestPlanB()
+ : PeerConnectionInterfaceBaseTest(SdpSemantics::kPlanB_DEPRECATED) {}
+};
+
+// Generate different CNAMEs when PeerConnections are created.
+// The CNAMEs are expected to be generated randomly. It is possible
+// that the test fails, though the possibility is very low.
+TEST_P(PeerConnectionInterfaceTest, CnameGenerationInOffer) {
+ std::unique_ptr<SessionDescriptionInterface> offer1 =
+ CreateOfferWithOneAudioTrack();
+ std::unique_ptr<SessionDescriptionInterface> offer2 =
+ CreateOfferWithOneAudioTrack();
+ EXPECT_NE(GetFirstAudioStreamCname(offer1.get()),
+ GetFirstAudioStreamCname(offer2.get()));
+}
+
+TEST_P(PeerConnectionInterfaceTest, CnameGenerationInAnswer) {
+ std::unique_ptr<SessionDescriptionInterface> answer1 =
+ CreateAnswerWithOneAudioTrack();
+ std::unique_ptr<SessionDescriptionInterface> answer2 =
+ CreateAnswerWithOneAudioTrack();
+ EXPECT_NE(GetFirstAudioStreamCname(answer1.get()),
+ GetFirstAudioStreamCname(answer2.get()));
+}
+
+TEST_P(PeerConnectionInterfaceTest,
+ CreatePeerConnectionWithDifferentConfigurations) {
+ CreatePeerConnectionWithDifferentConfigurations();
+}
+
+TEST_P(PeerConnectionInterfaceTest,
+ CreatePeerConnectionWithDifferentIceTransportsTypes) {
+ CreatePeerConnectionWithIceTransportsType(PeerConnectionInterface::kNone);
+ EXPECT_EQ(cricket::CF_NONE, port_allocator_->candidate_filter());
+ CreatePeerConnectionWithIceTransportsType(PeerConnectionInterface::kRelay);
+ EXPECT_EQ(cricket::CF_RELAY, port_allocator_->candidate_filter());
+ CreatePeerConnectionWithIceTransportsType(PeerConnectionInterface::kNoHost);
+ EXPECT_EQ(cricket::CF_ALL & ~cricket::CF_HOST,
+ port_allocator_->candidate_filter());
+ CreatePeerConnectionWithIceTransportsType(PeerConnectionInterface::kAll);
+ EXPECT_EQ(cricket::CF_ALL, port_allocator_->candidate_filter());
+}
+
+// Test that when a PeerConnection is created with a nonzero candidate pool
+// size, the pooled PortAllocatorSession is created with all the attributes
+// in the RTCConfiguration.
+TEST_P(PeerConnectionInterfaceTest, CreatePeerConnectionWithPooledCandidates) {
+ PeerConnectionInterface::RTCConfiguration config;
+ config.sdp_semantics = sdp_semantics_;
+ PeerConnectionInterface::IceServer server;
+ server.uri = kStunAddressOnly;
+ config.servers.push_back(server);
+ config.type = PeerConnectionInterface::kRelay;
+ config.tcp_candidate_policy =
+ PeerConnectionInterface::kTcpCandidatePolicyDisabled;
+ config.candidate_network_policy =
+ PeerConnectionInterface::kCandidateNetworkPolicyLowCost;
+ config.ice_candidate_pool_size = 1;
+ CreatePeerConnection(config);
+
+ const cricket::FakePortAllocatorSession* session =
+ static_cast<const cricket::FakePortAllocatorSession*>(
+ port_allocator_->GetPooledSession());
+ ASSERT_NE(nullptr, session);
+ EXPECT_EQ(1UL, session->stun_servers().size());
+ EXPECT_LT(0U, session->flags() & cricket::PORTALLOCATOR_DISABLE_TCP);
+ EXPECT_LT(0U,
+ session->flags() & cricket::PORTALLOCATOR_DISABLE_COSTLY_NETWORKS);
+}
+
+// Test that network-related RTCConfiguration members are applied to the
+// PortAllocator when CreatePeerConnection is called. Specifically:
+// - disable_ipv6_on_wifi
+// - max_ipv6_networks
+// - tcp_candidate_policy
+// - candidate_network_policy
+// - prune_turn_ports
+//
+// Note that the candidate filter (RTCConfiguration::type) is already tested
+// above.
+TEST_P(PeerConnectionInterfaceTest,
+ CreatePeerConnectionAppliesNetworkConfigToPortAllocator) {
+ // Create fake port allocator.
+ std::unique_ptr<rtc::PacketSocketFactory> packet_socket_factory(
+ new rtc::BasicPacketSocketFactory(socket_server()));
+ std::unique_ptr<cricket::FakePortAllocator> port_allocator(
+ new cricket::FakePortAllocator(
+ rtc::Thread::Current(), packet_socket_factory.get(), &field_trials_));
+ cricket::FakePortAllocator* raw_port_allocator = port_allocator.get();
+
+ // Create RTCConfiguration with some network-related fields relevant to
+ // PortAllocator populated.
+ PeerConnectionInterface::RTCConfiguration config;
+ config.sdp_semantics = sdp_semantics_;
+ config.disable_ipv6_on_wifi = true;
+ config.max_ipv6_networks = 10;
+ config.tcp_candidate_policy =
+ PeerConnectionInterface::kTcpCandidatePolicyDisabled;
+ config.candidate_network_policy =
+ PeerConnectionInterface::kCandidateNetworkPolicyLowCost;
+ config.prune_turn_ports = true;
+
+ // Create the PC factory and PC with the above config.
+ rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface> pc_factory(
+ webrtc::CreatePeerConnectionFactory(
+ rtc::Thread::Current(), rtc::Thread::Current(),
+ rtc::Thread::Current(), fake_audio_capture_module_,
+ webrtc::CreateBuiltinAudioEncoderFactory(),
+ webrtc::CreateBuiltinAudioDecoderFactory(),
+ std::make_unique<webrtc::VideoEncoderFactoryTemplate<
+ webrtc::LibvpxVp8EncoderTemplateAdapter,
+ webrtc::LibvpxVp9EncoderTemplateAdapter,
+ webrtc::OpenH264EncoderTemplateAdapter,
+ webrtc::LibaomAv1EncoderTemplateAdapter>>(),
+ std::make_unique<webrtc::VideoDecoderFactoryTemplate<
+ webrtc::LibvpxVp8DecoderTemplateAdapter,
+ webrtc::LibvpxVp9DecoderTemplateAdapter,
+ webrtc::OpenH264DecoderTemplateAdapter,
+ webrtc::Dav1dDecoderTemplateAdapter>>(),
+ nullptr /* audio_mixer */, nullptr /* audio_processing */));
+ PeerConnectionDependencies pc_dependencies(&observer_);
+ pc_dependencies.allocator = std::move(port_allocator);
+ auto result = pc_factory_->CreatePeerConnectionOrError(
+ config, std::move(pc_dependencies));
+ EXPECT_TRUE(result.ok());
+ observer_.SetPeerConnectionInterface(result.value().get());
+
+ // Now validate that the config fields set above were applied to the
+ // PortAllocator, as flags or otherwise.
+ EXPECT_FALSE(raw_port_allocator->flags() &
+ cricket::PORTALLOCATOR_ENABLE_IPV6_ON_WIFI);
+ EXPECT_EQ(10, raw_port_allocator->max_ipv6_networks());
+ EXPECT_TRUE(raw_port_allocator->flags() & cricket::PORTALLOCATOR_DISABLE_TCP);
+ EXPECT_TRUE(raw_port_allocator->flags() &
+ cricket::PORTALLOCATOR_DISABLE_COSTLY_NETWORKS);
+ EXPECT_EQ(webrtc::PRUNE_BASED_ON_PRIORITY,
+ raw_port_allocator->turn_port_prune_policy());
+}
+
+// Check that GetConfiguration returns the configuration the PeerConnection was
+// constructed with, before SetConfiguration is called.
+TEST_P(PeerConnectionInterfaceTest, GetConfigurationAfterCreatePeerConnection) {
+ PeerConnectionInterface::RTCConfiguration config;
+ config.sdp_semantics = sdp_semantics_;
+ config.type = PeerConnectionInterface::kRelay;
+ CreatePeerConnection(config);
+
+ PeerConnectionInterface::RTCConfiguration returned_config =
+ pc_->GetConfiguration();
+ EXPECT_EQ(PeerConnectionInterface::kRelay, returned_config.type);
+}
+
+// Check that GetConfiguration returns the last configuration passed into
+// SetConfiguration.
+TEST_P(PeerConnectionInterfaceTest, GetConfigurationAfterSetConfiguration) {
+ PeerConnectionInterface::RTCConfiguration starting_config;
+ starting_config.sdp_semantics = sdp_semantics_;
+ starting_config.bundle_policy =
+ webrtc::PeerConnection::kBundlePolicyMaxBundle;
+ CreatePeerConnection(starting_config);
+
+ PeerConnectionInterface::RTCConfiguration config = pc_->GetConfiguration();
+ config.type = PeerConnectionInterface::kRelay;
+ EXPECT_TRUE(pc_->SetConfiguration(config).ok());
+
+ PeerConnectionInterface::RTCConfiguration returned_config =
+ pc_->GetConfiguration();
+ EXPECT_EQ(PeerConnectionInterface::kRelay, returned_config.type);
+}
+
+TEST_P(PeerConnectionInterfaceTest, SetConfigurationFailsAfterClose) {
+ CreatePeerConnection();
+
+ pc_->Close();
+
+ EXPECT_FALSE(
+ pc_->SetConfiguration(PeerConnectionInterface::RTCConfiguration()).ok());
+}
+
+TEST_F(PeerConnectionInterfaceTestPlanB, AddStreams) {
+ CreatePeerConnectionWithoutDtls();
+ AddVideoStream(kStreamId1);
+ AddAudioStream(kStreamId2);
+ ASSERT_EQ(2u, pc_->local_streams()->count());
+
+ // Test we can add multiple local streams to one peerconnection.
+ rtc::scoped_refptr<MediaStreamInterface> stream(
+ pc_factory_->CreateLocalMediaStream(kStreamId3));
+ rtc::scoped_refptr<AudioTrackInterface> audio_track(
+ pc_factory_->CreateAudioTrack(
+ kStreamId3, static_cast<AudioSourceInterface*>(nullptr)));
+ stream->AddTrack(audio_track);
+ EXPECT_TRUE(pc_->AddStream(stream.get()));
+ EXPECT_EQ(3u, pc_->local_streams()->count());
+
+ // Remove the third stream.
+ pc_->RemoveStream(pc_->local_streams()->at(2));
+ EXPECT_EQ(2u, pc_->local_streams()->count());
+
+ // Remove the second stream.
+ pc_->RemoveStream(pc_->local_streams()->at(1));
+ EXPECT_EQ(1u, pc_->local_streams()->count());
+
+ // Remove the first stream.
+ pc_->RemoveStream(pc_->local_streams()->at(0));
+ EXPECT_EQ(0u, pc_->local_streams()->count());
+}
+
+// Test that the created offer includes streams we added.
+// Don't run under Unified Plan since the stream API is not available.
+TEST_F(PeerConnectionInterfaceTestPlanB, AddedStreamsPresentInOffer) {
+ CreatePeerConnectionWithoutDtls();
+ AddAudioVideoStream(kStreamId1, "audio_track", "video_track");
+ std::unique_ptr<SessionDescriptionInterface> offer;
+ ASSERT_TRUE(DoCreateOffer(&offer, nullptr));
+
+ const cricket::AudioContentDescription* audio_desc =
+ cricket::GetFirstAudioContentDescription(offer->description());
+ EXPECT_TRUE(ContainsTrack(audio_desc->streams(), kStreamId1, "audio_track"));
+
+ const cricket::VideoContentDescription* video_desc =
+ cricket::GetFirstVideoContentDescription(offer->description());
+ EXPECT_TRUE(ContainsTrack(video_desc->streams(), kStreamId1, "video_track"));
+
+ // Add another stream and ensure the offer includes both the old and new
+ // streams.
+ AddAudioVideoStream(kStreamId2, "audio_track2", "video_track2");
+ ASSERT_TRUE(DoCreateOffer(&offer, nullptr));
+
+ audio_desc = cricket::GetFirstAudioContentDescription(offer->description());
+ EXPECT_TRUE(ContainsTrack(audio_desc->streams(), kStreamId1, "audio_track"));
+ EXPECT_TRUE(ContainsTrack(audio_desc->streams(), kStreamId2, "audio_track2"));
+
+ video_desc = cricket::GetFirstVideoContentDescription(offer->description());
+ EXPECT_TRUE(ContainsTrack(video_desc->streams(), kStreamId1, "video_track"));
+ EXPECT_TRUE(ContainsTrack(video_desc->streams(), kStreamId2, "video_track2"));
+}
+
+// Don't run under Unified Plan since the stream API is not available.
+TEST_F(PeerConnectionInterfaceTestPlanB, RemoveStream) {
+ CreatePeerConnectionWithoutDtls();
+ AddVideoStream(kStreamId1);
+ ASSERT_EQ(1u, pc_->local_streams()->count());
+ pc_->RemoveStream(pc_->local_streams()->at(0));
+ EXPECT_EQ(0u, pc_->local_streams()->count());
+}
+
+// Test for AddTrack and RemoveTrack methods.
+// Tests that the created offer includes tracks we added,
+// and that the RtpSenders are created correctly.
+// Also tests that RemoveTrack removes the tracks from subsequent offers.
+// Only tested with Plan B since Unified Plan is covered in more detail by tests
+// in peerconnection_jsep_unittests.cc
+TEST_F(PeerConnectionInterfaceTestPlanB, AddTrackRemoveTrack) {
+ CreatePeerConnectionWithoutDtls();
+ rtc::scoped_refptr<AudioTrackInterface> audio_track(
+ CreateAudioTrack("audio_track"));
+ rtc::scoped_refptr<VideoTrackInterface> video_track(
+ CreateVideoTrack("video_track"));
+ auto audio_sender = pc_->AddTrack(audio_track, {kStreamId1}).MoveValue();
+ auto video_sender = pc_->AddTrack(video_track, {kStreamId1}).MoveValue();
+ EXPECT_EQ(1UL, audio_sender->stream_ids().size());
+ EXPECT_EQ(kStreamId1, audio_sender->stream_ids()[0]);
+ EXPECT_EQ("audio_track", audio_sender->id());
+ EXPECT_EQ(audio_track, audio_sender->track());
+ EXPECT_EQ(1UL, video_sender->stream_ids().size());
+ EXPECT_EQ(kStreamId1, video_sender->stream_ids()[0]);
+ EXPECT_EQ("video_track", video_sender->id());
+ EXPECT_EQ(video_track, video_sender->track());
+
+ // Now create an offer and check for the senders.
+ std::unique_ptr<SessionDescriptionInterface> offer;
+ ASSERT_TRUE(DoCreateOffer(&offer, nullptr));
+
+ const cricket::ContentInfo* audio_content =
+ cricket::GetFirstAudioContent(offer->description());
+ EXPECT_TRUE(ContainsTrack(audio_content->media_description()->streams(),
+ kStreamId1, "audio_track"));
+
+ const cricket::ContentInfo* video_content =
+ cricket::GetFirstVideoContent(offer->description());
+ EXPECT_TRUE(ContainsTrack(video_content->media_description()->streams(),
+ kStreamId1, "video_track"));
+
+ EXPECT_TRUE(DoSetLocalDescription(std::move(offer)));
+
+ // Now try removing the tracks.
+ EXPECT_TRUE(pc_->RemoveTrackOrError(audio_sender).ok());
+ EXPECT_TRUE(pc_->RemoveTrackOrError(video_sender).ok());
+
+ // Create a new offer and ensure it doesn't contain the removed senders.
+ ASSERT_TRUE(DoCreateOffer(&offer, nullptr));
+
+ audio_content = cricket::GetFirstAudioContent(offer->description());
+ EXPECT_FALSE(ContainsTrack(audio_content->media_description()->streams(),
+ kStreamId1, "audio_track"));
+
+ video_content = cricket::GetFirstVideoContent(offer->description());
+ EXPECT_FALSE(ContainsTrack(video_content->media_description()->streams(),
+ kStreamId1, "video_track"));
+
+ EXPECT_TRUE(DoSetLocalDescription(std::move(offer)));
+
+ // Calling RemoveTrack on a sender no longer attached to a PeerConnection
+ // should return false.
+ EXPECT_FALSE(pc_->RemoveTrackOrError(audio_sender).ok());
+ EXPECT_FALSE(pc_->RemoveTrackOrError(video_sender).ok());
+}
+
+// Test for AddTrack with init_send_encoding.
+TEST_F(PeerConnectionInterfaceTestPlanB, AddTrackWithSendEncodings) {
+ CreatePeerConnectionWithoutDtls();
+ rtc::scoped_refptr<AudioTrackInterface> audio_track(
+ CreateAudioTrack("audio_track"));
+ rtc::scoped_refptr<VideoTrackInterface> video_track(
+ CreateVideoTrack("video_track"));
+ RtpEncodingParameters audio_encodings;
+ audio_encodings.active = false;
+ auto audio_sender =
+ pc_->AddTrack(audio_track, {kStreamId1}, {audio_encodings}).MoveValue();
+ RtpEncodingParameters video_encodings;
+ video_encodings.active = true;
+ auto video_sender =
+ pc_->AddTrack(video_track, {kStreamId1}, {video_encodings}).MoveValue();
+ EXPECT_EQ(1UL, audio_sender->stream_ids().size());
+ EXPECT_EQ(kStreamId1, audio_sender->stream_ids()[0]);
+ EXPECT_EQ("audio_track", audio_sender->id());
+ EXPECT_EQ(audio_track, audio_sender->track());
+ EXPECT_EQ(1UL, video_sender->stream_ids().size());
+ EXPECT_EQ(kStreamId1, video_sender->stream_ids()[0]);
+ EXPECT_EQ("video_track", video_sender->id());
+ EXPECT_EQ(video_track, video_sender->track());
+
+ // Now create an offer and check for the senders.
+ std::unique_ptr<SessionDescriptionInterface> offer;
+ ASSERT_TRUE(DoCreateOffer(&offer, nullptr));
+
+ const cricket::ContentInfo* audio_content =
+ cricket::GetFirstAudioContent(offer->description());
+ EXPECT_TRUE(ContainsTrack(audio_content->media_description()->streams(),
+ kStreamId1, "audio_track"));
+
+ const cricket::ContentInfo* video_content =
+ cricket::GetFirstVideoContent(offer->description());
+ EXPECT_TRUE(ContainsTrack(video_content->media_description()->streams(),
+ kStreamId1, "video_track"));
+
+ EXPECT_TRUE(DoSetLocalDescription(std::move(offer)));
+
+ // Check the encodings.
+ ASSERT_THAT(audio_sender->GetParameters().encodings, SizeIs(1));
+ EXPECT_THAT(audio_sender->GetParameters().encodings[0].active, Eq(false));
+ ASSERT_THAT(video_sender->GetParameters().encodings, SizeIs(1));
+ EXPECT_THAT(video_sender->GetParameters().encodings[0].active, Eq(true));
+
+ // Now try removing the tracks.
+ EXPECT_TRUE(pc_->RemoveTrackOrError(audio_sender).ok());
+ EXPECT_TRUE(pc_->RemoveTrackOrError(video_sender).ok());
+}
+
+// Test creating senders without a stream specified,
+// expecting a random stream ID to be generated.
+TEST_P(PeerConnectionInterfaceTest, AddTrackWithoutStream) {
+ CreatePeerConnectionWithoutDtls();
+ rtc::scoped_refptr<AudioTrackInterface> audio_track(
+ CreateAudioTrack("audio_track"));
+ rtc::scoped_refptr<VideoTrackInterface> video_track(
+ CreateVideoTrack("video_track"));
+ auto audio_sender =
+ pc_->AddTrack(audio_track, std::vector<std::string>()).MoveValue();
+ auto video_sender =
+ pc_->AddTrack(video_track, std::vector<std::string>()).MoveValue();
+ EXPECT_EQ("audio_track", audio_sender->id());
+ EXPECT_EQ(audio_track, audio_sender->track());
+ EXPECT_EQ("video_track", video_sender->id());
+ EXPECT_EQ(video_track, video_sender->track());
+ if (sdp_semantics_ == SdpSemantics::kPlanB_DEPRECATED) {
+ // If the ID is truly a random GUID, it should be infinitely unlikely they
+ // will be the same.
+ EXPECT_NE(video_sender->stream_ids(), audio_sender->stream_ids());
+ } else {
+ // We allows creating tracks without stream ids under Unified Plan
+ // semantics.
+ EXPECT_EQ(0u, video_sender->stream_ids().size());
+ EXPECT_EQ(0u, audio_sender->stream_ids().size());
+ }
+}
+
+// Test that we can call GetStats() after AddTrack but before connecting
+// the PeerConnection to a peer.
+TEST_P(PeerConnectionInterfaceTest, AddTrackBeforeConnecting) {
+ CreatePeerConnectionWithoutDtls();
+ rtc::scoped_refptr<AudioTrackInterface> audio_track(
+ CreateAudioTrack("audio_track"));
+ rtc::scoped_refptr<VideoTrackInterface> video_track(
+ CreateVideoTrack("video_track"));
+ auto audio_sender = pc_->AddTrack(audio_track, std::vector<std::string>());
+ auto video_sender = pc_->AddTrack(video_track, std::vector<std::string>());
+ EXPECT_TRUE(DoGetStats(nullptr));
+}
+
+TEST_P(PeerConnectionInterfaceTest, AttachmentIdIsSetOnAddTrack) {
+ CreatePeerConnectionWithoutDtls();
+ rtc::scoped_refptr<AudioTrackInterface> audio_track(
+ CreateAudioTrack("audio_track"));
+ rtc::scoped_refptr<VideoTrackInterface> video_track(
+ CreateVideoTrack("video_track"));
+ auto audio_sender = pc_->AddTrack(audio_track, std::vector<std::string>());
+ ASSERT_TRUE(audio_sender.ok());
+ auto* audio_sender_proxy =
+ static_cast<RtpSenderProxyWithInternal<RtpSenderInternal>*>(
+ audio_sender.value().get());
+ EXPECT_NE(0, audio_sender_proxy->internal()->AttachmentId());
+
+ auto video_sender = pc_->AddTrack(video_track, std::vector<std::string>());
+ ASSERT_TRUE(video_sender.ok());
+ auto* video_sender_proxy =
+ static_cast<RtpSenderProxyWithInternal<RtpSenderInternal>*>(
+ video_sender.value().get());
+ EXPECT_NE(0, video_sender_proxy->internal()->AttachmentId());
+}
+
+// Don't run under Unified Plan since the stream API is not available.
+TEST_F(PeerConnectionInterfaceTestPlanB, AttachmentIdIsSetOnAddStream) {
+ CreatePeerConnectionWithoutDtls();
+ AddVideoStream(kStreamId1);
+ auto senders = pc_->GetSenders();
+ ASSERT_EQ(1u, senders.size());
+ auto* sender_proxy =
+ static_cast<RtpSenderProxyWithInternal<RtpSenderInternal>*>(
+ senders[0].get());
+ EXPECT_NE(0, sender_proxy->internal()->AttachmentId());
+}
+
+TEST_P(PeerConnectionInterfaceTest, CreateOfferReceiveAnswer) {
+ InitiateCall();
+ WaitAndVerifyOnAddStream(kStreamId1, 2);
+ VerifyRemoteRtpHeaderExtensions();
+}
+
+TEST_P(PeerConnectionInterfaceTest, CreateOfferReceivePrAnswerAndAnswer) {
+ CreatePeerConnectionWithoutDtls();
+ AddVideoTrack(kVideoTracks[0], {kStreamId1});
+ CreateOfferAsLocalDescription();
+ std::string offer;
+ EXPECT_TRUE(pc_->local_description()->ToString(&offer));
+ CreatePrAnswerAndAnswerAsRemoteDescription(offer);
+ WaitAndVerifyOnAddStream(kStreamId1, 1);
+}
+
+TEST_P(PeerConnectionInterfaceTest, ReceiveOfferCreateAnswer) {
+ CreatePeerConnectionWithoutDtls();
+ AddVideoTrack(kVideoTracks[0], {kStreamId1});
+
+ CreateOfferAsRemoteDescription();
+ CreateAnswerAsLocalDescription();
+
+ WaitAndVerifyOnAddStream(kStreamId1, 1);
+}
+
+TEST_P(PeerConnectionInterfaceTest, ReceiveOfferCreatePrAnswerAndAnswer) {
+ CreatePeerConnectionWithoutDtls();
+ AddVideoTrack(kVideoTracks[0], {kStreamId1});
+
+ CreateOfferAsRemoteDescription();
+ CreatePrAnswerAsLocalDescription();
+ CreateAnswerAsLocalDescription();
+
+ WaitAndVerifyOnAddStream(kStreamId1, 1);
+}
+
+// Don't run under Unified Plan since the stream API is not available.
+TEST_F(PeerConnectionInterfaceTestPlanB, Renegotiate) {
+ InitiateCall();
+ ASSERT_EQ(1u, pc_->remote_streams()->count());
+ pc_->RemoveStream(pc_->local_streams()->at(0));
+ CreateOfferReceiveAnswer();
+ EXPECT_EQ(0u, pc_->remote_streams()->count());
+ AddVideoStream(kStreamId1);
+ CreateOfferReceiveAnswer();
+}
+
+// Tests that after negotiating an audio only call, the respondent can perform a
+// renegotiation that removes the audio stream.
+TEST_F(PeerConnectionInterfaceTestPlanB, RenegotiateAudioOnly) {
+ CreatePeerConnectionWithoutDtls();
+ AddAudioStream(kStreamId1);
+ CreateOfferAsRemoteDescription();
+ CreateAnswerAsLocalDescription();
+
+ ASSERT_EQ(1u, pc_->remote_streams()->count());
+ pc_->RemoveStream(pc_->local_streams()->at(0));
+ CreateOfferReceiveAnswer();
+ EXPECT_EQ(0u, pc_->remote_streams()->count());
+}
+
+// Test that candidates are generated and that we can parse our own candidates.
+TEST_P(PeerConnectionInterfaceTest, IceCandidates) {
+ CreatePeerConnectionWithoutDtls();
+
+ EXPECT_FALSE(pc_->AddIceCandidate(observer_.last_candidate()));
+ // SetRemoteDescription takes ownership of offer.
+ std::unique_ptr<SessionDescriptionInterface> offer;
+ AddVideoTrack(kVideoTracks[0]);
+ EXPECT_TRUE(DoCreateOffer(&offer, nullptr));
+ EXPECT_TRUE(DoSetRemoteDescription(std::move(offer)));
+
+ // SetLocalDescription takes ownership of answer.
+ std::unique_ptr<SessionDescriptionInterface> answer;
+ EXPECT_TRUE(DoCreateAnswer(&answer, nullptr));
+ EXPECT_TRUE(DoSetLocalDescription(std::move(answer)));
+
+ EXPECT_TRUE_WAIT(observer_.last_candidate() != nullptr, kTimeout);
+ EXPECT_TRUE_WAIT(observer_.ice_gathering_complete_, kTimeout);
+
+ EXPECT_TRUE(pc_->AddIceCandidate(observer_.last_candidate()));
+}
+
+// Test that CreateOffer and CreateAnswer will fail if the track labels are
+// not unique.
+TEST_F(PeerConnectionInterfaceTestPlanB, CreateOfferAnswerWithInvalidStream) {
+ CreatePeerConnectionWithoutDtls();
+ // Create a regular offer for the CreateAnswer test later.
+ std::unique_ptr<SessionDescriptionInterface> offer;
+ EXPECT_TRUE(DoCreateOffer(&offer, nullptr));
+ EXPECT_TRUE(offer);
+ offer.reset();
+
+ // Create a local stream with audio&video tracks having same label.
+ AddAudioTrack("track_label", {kStreamId1});
+ AddVideoTrack("track_label", {kStreamId1});
+
+ // Test CreateOffer
+ EXPECT_FALSE(DoCreateOffer(&offer, nullptr));
+
+ // Test CreateAnswer
+ std::unique_ptr<SessionDescriptionInterface> answer;
+ EXPECT_FALSE(DoCreateAnswer(&answer, nullptr));
+}
+
+// Test that we will get different SSRCs for each tracks in the offer and answer
+// we created.
+TEST_P(PeerConnectionInterfaceTest, SsrcInOfferAnswer) {
+ CreatePeerConnectionWithoutDtls();
+ // Create a local stream with audio&video tracks having different labels.
+ AddAudioTrack(kAudioTracks[0], {kStreamId1});
+ AddVideoTrack(kVideoTracks[0], {kStreamId1});
+
+ // Test CreateOffer
+ std::unique_ptr<SessionDescriptionInterface> offer;
+ ASSERT_TRUE(DoCreateOffer(&offer, nullptr));
+ int audio_ssrc = 0;
+ int video_ssrc = 0;
+ EXPECT_TRUE(
+ GetFirstSsrc(GetFirstAudioContent(offer->description()), &audio_ssrc));
+ EXPECT_TRUE(
+ GetFirstSsrc(GetFirstVideoContent(offer->description()), &video_ssrc));
+ EXPECT_NE(audio_ssrc, video_ssrc);
+
+ // Test CreateAnswer
+ EXPECT_TRUE(DoSetRemoteDescription(std::move(offer)));
+ std::unique_ptr<SessionDescriptionInterface> answer;
+ ASSERT_TRUE(DoCreateAnswer(&answer, nullptr));
+ audio_ssrc = 0;
+ video_ssrc = 0;
+ EXPECT_TRUE(
+ GetFirstSsrc(GetFirstAudioContent(answer->description()), &audio_ssrc));
+ EXPECT_TRUE(
+ GetFirstSsrc(GetFirstVideoContent(answer->description()), &video_ssrc));
+ EXPECT_NE(audio_ssrc, video_ssrc);
+}
+
+// Test that it's possible to call AddTrack on a MediaStream after adding
+// the stream to a PeerConnection.
+// TODO(deadbeef): Remove this test once this behavior is no longer supported.
+// Don't run under Unified Plan since the stream API is not available.
+TEST_F(PeerConnectionInterfaceTestPlanB, AddTrackAfterAddStream) {
+ CreatePeerConnectionWithoutDtls();
+ // Create audio stream and add to PeerConnection.
+ AddAudioStream(kStreamId1);
+ MediaStreamInterface* stream = pc_->local_streams()->at(0);
+
+ // Add video track to the audio-only stream.
+ rtc::scoped_refptr<VideoTrackInterface> video_track(
+ CreateVideoTrack("video_label"));
+ stream->AddTrack(video_track);
+
+ std::unique_ptr<SessionDescriptionInterface> offer;
+ ASSERT_TRUE(DoCreateOffer(&offer, nullptr));
+
+ const cricket::MediaContentDescription* video_desc =
+ cricket::GetFirstVideoContentDescription(offer->description());
+ EXPECT_TRUE(video_desc != nullptr);
+}
+
+// Test that it's possible to call RemoveTrack on a MediaStream after adding
+// the stream to a PeerConnection.
+// TODO(deadbeef): Remove this test once this behavior is no longer supported.
+// Don't run under Unified Plan since the stream API is not available.
+TEST_F(PeerConnectionInterfaceTestPlanB, RemoveTrackAfterAddStream) {
+ CreatePeerConnectionWithoutDtls();
+ // Create audio/video stream and add to PeerConnection.
+ AddAudioVideoStream(kStreamId1, "audio_label", "video_label");
+ MediaStreamInterface* stream = pc_->local_streams()->at(0);
+
+ // Remove the video track.
+ stream->RemoveTrack(stream->GetVideoTracks()[0]);
+
+ std::unique_ptr<SessionDescriptionInterface> offer;
+ ASSERT_TRUE(DoCreateOffer(&offer, nullptr));
+
+ const cricket::MediaContentDescription* video_desc =
+ cricket::GetFirstVideoContentDescription(offer->description());
+ EXPECT_TRUE(video_desc == nullptr);
+}
+
+// Test creating a sender with a stream ID, and ensure the ID is populated
+// in the offer.
+// Don't run under Unified Plan since the stream API is not available.
+TEST_F(PeerConnectionInterfaceTestPlanB, CreateSenderWithStream) {
+ CreatePeerConnectionWithoutDtls();
+ pc_->CreateSender("video", kStreamId1);
+
+ std::unique_ptr<SessionDescriptionInterface> offer;
+ ASSERT_TRUE(DoCreateOffer(&offer, nullptr));
+
+ const cricket::MediaContentDescription* video_desc =
+ cricket::GetFirstVideoContentDescription(offer->description());
+ ASSERT_TRUE(video_desc != nullptr);
+ ASSERT_EQ(1u, video_desc->streams().size());
+ EXPECT_EQ(kStreamId1, video_desc->streams()[0].first_stream_id());
+}
+
+// Test that we can specify a certain track that we want statistics about.
+TEST_P(PeerConnectionInterfaceTest, GetStatsForSpecificTrack) {
+ InitiateCall();
+ ASSERT_LT(0u, pc_->GetSenders().size());
+ ASSERT_LT(0u, pc_->GetReceivers().size());
+ rtc::scoped_refptr<MediaStreamTrackInterface> remote_audio =
+ pc_->GetReceivers()[0]->track();
+ EXPECT_TRUE(DoGetStats(remote_audio.get()));
+
+ // Remove the stream. Since we are sending to our selves the local
+ // and the remote stream is the same.
+ pc_->RemoveTrackOrError(pc_->GetSenders()[0]);
+ // Do a re-negotiation.
+ CreateOfferReceiveAnswer();
+
+ // Test that we still can get statistics for the old track. Even if it is not
+ // sent any longer.
+ EXPECT_TRUE(DoGetStats(remote_audio.get()));
+}
+
+// Test that we can get stats on a video track.
+TEST_P(PeerConnectionInterfaceTest, GetStatsForVideoTrack) {
+ InitiateCall();
+ auto video_receiver = GetFirstReceiverOfType(cricket::MEDIA_TYPE_VIDEO);
+ ASSERT_TRUE(video_receiver);
+ EXPECT_TRUE(DoGetStats(video_receiver->track().get()));
+}
+
+// Test that we don't get statistics for an invalid track.
+TEST_P(PeerConnectionInterfaceTest, GetStatsForInvalidTrack) {
+ InitiateCall();
+ rtc::scoped_refptr<AudioTrackInterface> unknown_audio_track(
+ pc_factory_->CreateAudioTrack("unknown track", nullptr));
+ EXPECT_FALSE(DoGetStats(unknown_audio_track.get()));
+}
+
+TEST_P(PeerConnectionInterfaceTest, GetRTCStatsBeforeAndAfterCalling) {
+ CreatePeerConnectionWithoutDtls();
+ EXPECT_TRUE(DoGetRTCStats());
+ // Clearing stats cache is needed now, but should be temporary.
+ // https://bugs.chromium.org/p/webrtc/issues/detail?id=8693
+ pc_->ClearStatsCache();
+ AddAudioTrack(kAudioTracks[0], {kStreamId1});
+ AddVideoTrack(kVideoTracks[0], {kStreamId1});
+ EXPECT_TRUE(DoGetRTCStats());
+ pc_->ClearStatsCache();
+ CreateOfferReceiveAnswer();
+ EXPECT_TRUE(DoGetRTCStats());
+}
+
+// This tests that a SCTP data channel is returned using different
+// DataChannelInit configurations.
+TEST_P(PeerConnectionInterfaceTest, CreateSctpDataChannel) {
+ RTCConfiguration rtc_config;
+ CreatePeerConnection(rtc_config);
+
+ webrtc::DataChannelInit config;
+ auto channel = pc_->CreateDataChannelOrError("1", &config);
+ EXPECT_TRUE(channel.ok());
+ EXPECT_TRUE(channel.value()->reliable());
+ EXPECT_TRUE(observer_.renegotiation_needed_);
+ observer_.renegotiation_needed_ = false;
+
+ config.ordered = false;
+ channel = pc_->CreateDataChannelOrError("2", &config);
+ EXPECT_TRUE(channel.ok());
+ EXPECT_TRUE(channel.value()->reliable());
+ EXPECT_FALSE(observer_.renegotiation_needed_);
+
+ config.ordered = true;
+ config.maxRetransmits = 0;
+ channel = pc_->CreateDataChannelOrError("3", &config);
+ EXPECT_TRUE(channel.ok());
+ EXPECT_FALSE(channel.value()->reliable());
+ EXPECT_FALSE(observer_.renegotiation_needed_);
+
+ config.maxRetransmits = absl::nullopt;
+ config.maxRetransmitTime = 0;
+ channel = pc_->CreateDataChannelOrError("4", &config);
+ EXPECT_TRUE(channel.ok());
+ EXPECT_FALSE(channel.value()->reliable());
+ EXPECT_FALSE(observer_.renegotiation_needed_);
+}
+
+TEST_P(PeerConnectionInterfaceTest, CreateSctpDataChannelWhenClosed) {
+ RTCConfiguration rtc_config;
+ CreatePeerConnection(rtc_config);
+ pc_->Close();
+ webrtc::DataChannelInit config;
+ auto ret = pc_->CreateDataChannelOrError("1", &config);
+ ASSERT_FALSE(ret.ok());
+ EXPECT_EQ(ret.error().type(), RTCErrorType::INVALID_STATE);
+}
+
+// For backwards compatibility, we want people who "unset" maxRetransmits
+// and maxRetransmitTime by setting them to -1 to get what they want.
+TEST_P(PeerConnectionInterfaceTest, CreateSctpDataChannelWithMinusOne) {
+ RTCConfiguration rtc_config;
+ CreatePeerConnection(rtc_config);
+
+ webrtc::DataChannelInit config;
+ config.maxRetransmitTime = -1;
+ config.maxRetransmits = -1;
+ auto channel = pc_->CreateDataChannelOrError("1", &config);
+ EXPECT_TRUE(channel.ok());
+}
+
+// This tests that no data channel is returned if both maxRetransmits and
+// maxRetransmitTime are set for SCTP data channels.
+TEST_P(PeerConnectionInterfaceTest,
+ CreateSctpDataChannelShouldFailForInvalidConfig) {
+ RTCConfiguration rtc_config;
+ CreatePeerConnection(rtc_config);
+
+ std::string label = "test";
+ webrtc::DataChannelInit config;
+ config.maxRetransmits = 0;
+ config.maxRetransmitTime = 0;
+
+ auto channel = pc_->CreateDataChannelOrError(label, &config);
+ EXPECT_FALSE(channel.ok());
+}
+
+// The test verifies that creating a SCTP data channel with an id already in use
+// or out of range should fail.
+TEST_P(PeerConnectionInterfaceTest,
+ CreateSctpDataChannelWithInvalidIdShouldFail) {
+ RTCConfiguration rtc_config;
+ CreatePeerConnection(rtc_config);
+
+ webrtc::DataChannelInit config;
+
+ config.id = 1;
+ config.negotiated = true;
+ auto channel = pc_->CreateDataChannelOrError("1", &config);
+ EXPECT_TRUE(channel.ok());
+ EXPECT_EQ(1, channel.value()->id());
+
+ channel = pc_->CreateDataChannelOrError("x", &config);
+ EXPECT_FALSE(channel.ok());
+
+ config.id = cricket::kMaxSctpSid;
+ config.negotiated = true;
+ channel = pc_->CreateDataChannelOrError("max", &config);
+ EXPECT_TRUE(channel.ok());
+ EXPECT_EQ(config.id, channel.value()->id());
+
+ config.id = cricket::kMaxSctpSid + 1;
+ config.negotiated = true;
+ channel = pc_->CreateDataChannelOrError("x", &config);
+ EXPECT_FALSE(channel.ok());
+}
+
+// Verifies that duplicated label is allowed for SCTP data channel.
+TEST_P(PeerConnectionInterfaceTest, SctpDuplicatedLabelAllowed) {
+ RTCConfiguration rtc_config;
+ CreatePeerConnection(rtc_config);
+
+ std::string label = "test";
+ auto channel = pc_->CreateDataChannelOrError(label, nullptr);
+ EXPECT_TRUE(channel.ok());
+
+ auto dup_channel = pc_->CreateDataChannelOrError(label, nullptr);
+ EXPECT_TRUE(dup_channel.ok());
+}
+
+#ifdef WEBRTC_HAVE_SCTP
+// This tests that SCTP data channels can be rejected in an answer.
+TEST_P(PeerConnectionInterfaceTest, TestRejectSctpDataChannelInAnswer)
+#else
+TEST_P(PeerConnectionInterfaceTest, DISABLED_TestRejectSctpDataChannelInAnswer)
+#endif
+{
+ RTCConfiguration rtc_config;
+ CreatePeerConnection(rtc_config);
+
+ auto offer_channel = pc_->CreateDataChannelOrError("offer_channel", NULL);
+
+ CreateOfferAsLocalDescription();
+
+ // Create an answer where the m-line for data channels are rejected.
+ std::string sdp;
+ EXPECT_TRUE(pc_->local_description()->ToString(&sdp));
+ std::unique_ptr<SessionDescriptionInterface> answer(
+ webrtc::CreateSessionDescription(SdpType::kAnswer, sdp));
+ ASSERT_TRUE(answer);
+ cricket::ContentInfo* data_info =
+ cricket::GetFirstDataContent(answer->description());
+ data_info->rejected = true;
+
+ DoSetRemoteDescription(std::move(answer));
+ EXPECT_EQ(DataChannelInterface::kClosed, offer_channel.value()->state());
+}
+
+// Test that we can create a session description from an SDP string from
+// FireFox, use it as a remote session description, generate an answer and use
+// the answer as a local description.
+TEST_P(PeerConnectionInterfaceTest, ReceiveFireFoxOffer) {
+ RTCConfiguration rtc_config;
+ CreatePeerConnection(rtc_config);
+ AddAudioTrack("audio_label");
+ AddVideoTrack("video_label");
+ std::unique_ptr<SessionDescriptionInterface> desc(
+ webrtc::CreateSessionDescription(SdpType::kOffer,
+ webrtc::kFireFoxSdpOffer, nullptr));
+ EXPECT_TRUE(DoSetSessionDescription(std::move(desc), false));
+ CreateAnswerAsLocalDescription();
+ ASSERT_TRUE(pc_->local_description() != nullptr);
+ ASSERT_TRUE(pc_->remote_description() != nullptr);
+
+ const cricket::ContentInfo* content =
+ cricket::GetFirstAudioContent(pc_->local_description()->description());
+ ASSERT_TRUE(content != nullptr);
+ EXPECT_FALSE(content->rejected);
+
+ content =
+ cricket::GetFirstVideoContent(pc_->local_description()->description());
+ ASSERT_TRUE(content != nullptr);
+ EXPECT_FALSE(content->rejected);
+#ifdef WEBRTC_HAVE_SCTP
+ content =
+ cricket::GetFirstDataContent(pc_->local_description()->description());
+ ASSERT_TRUE(content != nullptr);
+ EXPECT_FALSE(content->rejected);
+#endif
+}
+
+// Test that fallback from DTLS to SDES is not supported.
+// The fallback was previously supported but was removed to simplify the code
+// and because it's non-standard.
+TEST_P(PeerConnectionInterfaceTest, DtlsSdesFallbackNotSupported) {
+ RTCConfiguration rtc_config;
+ CreatePeerConnection(rtc_config);
+ // Wait for fake certificate to be generated. Previously, this is what caused
+ // the "a=crypto" lines to be rejected.
+ AddAudioTrack("audio_label");
+ AddVideoTrack("video_label");
+ ASSERT_NE(nullptr, fake_certificate_generator_);
+ EXPECT_EQ_WAIT(1, fake_certificate_generator_->generated_certificates(),
+ kTimeout);
+ std::unique_ptr<SessionDescriptionInterface> desc(
+ webrtc::CreateSessionDescription(SdpType::kOffer, kDtlsSdesFallbackSdp,
+ nullptr));
+ EXPECT_FALSE(DoSetSessionDescription(std::move(desc), /*local=*/false));
+}
+
+// Test that we can create an audio only offer and receive an answer with a
+// limited set of audio codecs and receive an updated offer with more audio
+// codecs, where the added codecs are not supported.
+TEST_P(PeerConnectionInterfaceTest, ReceiveUpdatedAudioOfferWithBadCodecs) {
+ CreatePeerConnectionWithoutDtls();
+ AddAudioTrack("audio_label");
+ CreateOfferAsLocalDescription();
+
+ const char* answer_sdp = (sdp_semantics_ == SdpSemantics::kPlanB_DEPRECATED
+ ? webrtc::kAudioSdpPlanB
+ : webrtc::kAudioSdpUnifiedPlan);
+ std::unique_ptr<SessionDescriptionInterface> answer(
+ webrtc::CreateSessionDescription(SdpType::kAnswer, answer_sdp, nullptr));
+ EXPECT_TRUE(DoSetSessionDescription(std::move(answer), false));
+
+ const char* reoffer_sdp =
+ (sdp_semantics_ == SdpSemantics::kPlanB_DEPRECATED
+ ? webrtc::kAudioSdpWithUnsupportedCodecsPlanB
+ : webrtc::kAudioSdpWithUnsupportedCodecsUnifiedPlan);
+ std::unique_ptr<SessionDescriptionInterface> updated_offer(
+ webrtc::CreateSessionDescription(SdpType::kOffer, reoffer_sdp, nullptr));
+ EXPECT_TRUE(DoSetSessionDescription(std::move(updated_offer), false));
+ CreateAnswerAsLocalDescription();
+}
+
+// Test that if we're receiving (but not sending) a track, subsequent offers
+// will have m-lines with a=recvonly.
+TEST_P(PeerConnectionInterfaceTest, CreateSubsequentRecvOnlyOffer) {
+ RTCConfiguration rtc_config;
+ CreatePeerConnection(rtc_config);
+ CreateAndSetRemoteOffer(GetSdpStringWithStream1());
+ CreateAnswerAsLocalDescription();
+
+ // At this point we should be receiving stream 1, but not sending anything.
+ // A new offer should be recvonly.
+ std::unique_ptr<SessionDescriptionInterface> offer;
+ DoCreateOffer(&offer, nullptr);
+
+ const cricket::ContentInfo* video_content =
+ cricket::GetFirstVideoContent(offer->description());
+ ASSERT_EQ(RtpTransceiverDirection::kRecvOnly,
+ video_content->media_description()->direction());
+
+ const cricket::ContentInfo* audio_content =
+ cricket::GetFirstAudioContent(offer->description());
+ ASSERT_EQ(RtpTransceiverDirection::kRecvOnly,
+ audio_content->media_description()->direction());
+}
+
+// Test that if we're receiving (but not sending) a track, and the
+// offerToReceiveVideo/offerToReceiveAudio constraints are explicitly set to
+// false, the generated m-lines will be a=inactive.
+TEST_P(PeerConnectionInterfaceTest, CreateSubsequentInactiveOffer) {
+ RTCConfiguration rtc_config;
+ CreatePeerConnection(rtc_config);
+ CreateAndSetRemoteOffer(GetSdpStringWithStream1());
+ CreateAnswerAsLocalDescription();
+
+ // At this point we should be receiving stream 1, but not sending anything.
+ // A new offer would be recvonly, but we'll set the "no receive" constraints
+ // to make it inactive.
+ std::unique_ptr<SessionDescriptionInterface> offer;
+ RTCOfferAnswerOptions options;
+ options.offer_to_receive_audio = 0;
+ options.offer_to_receive_video = 0;
+ DoCreateOffer(&offer, &options);
+
+ const cricket::ContentInfo* video_content =
+ cricket::GetFirstVideoContent(offer->description());
+ ASSERT_EQ(RtpTransceiverDirection::kInactive,
+ video_content->media_description()->direction());
+
+ const cricket::ContentInfo* audio_content =
+ cricket::GetFirstAudioContent(offer->description());
+ ASSERT_EQ(RtpTransceiverDirection::kInactive,
+ audio_content->media_description()->direction());
+}
+
+// Test that we can use SetConfiguration to change the ICE servers of the
+// PortAllocator.
+TEST_P(PeerConnectionInterfaceTest, SetConfigurationChangesIceServers) {
+ CreatePeerConnection();
+
+ PeerConnectionInterface::RTCConfiguration config = pc_->GetConfiguration();
+ PeerConnectionInterface::IceServer server;
+ server.uri = "stun:test_hostname";
+ config.servers.push_back(server);
+ EXPECT_TRUE(pc_->SetConfiguration(config).ok());
+
+ EXPECT_EQ(1u, port_allocator_->stun_servers().size());
+ EXPECT_EQ("test_hostname",
+ port_allocator_->stun_servers().begin()->hostname());
+}
+
+TEST_P(PeerConnectionInterfaceTest, SetConfigurationChangesCandidateFilter) {
+ CreatePeerConnection();
+ PeerConnectionInterface::RTCConfiguration config = pc_->GetConfiguration();
+ config.type = PeerConnectionInterface::kRelay;
+ EXPECT_TRUE(pc_->SetConfiguration(config).ok());
+ EXPECT_EQ(cricket::CF_RELAY, port_allocator_->candidate_filter());
+}
+
+TEST_P(PeerConnectionInterfaceTest, SetConfigurationChangesPruneTurnPortsFlag) {
+ PeerConnectionInterface::RTCConfiguration config;
+ config.prune_turn_ports = false;
+ CreatePeerConnection(config);
+ config = pc_->GetConfiguration();
+ EXPECT_EQ(webrtc::NO_PRUNE, port_allocator_->turn_port_prune_policy());
+
+ config.prune_turn_ports = true;
+ EXPECT_TRUE(pc_->SetConfiguration(config).ok());
+ EXPECT_EQ(webrtc::PRUNE_BASED_ON_PRIORITY,
+ port_allocator_->turn_port_prune_policy());
+}
+
+// Test that the ice check interval can be changed. This does not verify that
+// the setting makes it all the way to P2PTransportChannel, as that would
+// require a very complex set of mocks.
+TEST_P(PeerConnectionInterfaceTest, SetConfigurationChangesIceCheckInterval) {
+ PeerConnectionInterface::RTCConfiguration config;
+ config.ice_check_min_interval = absl::nullopt;
+ CreatePeerConnection(config);
+ config = pc_->GetConfiguration();
+ config.ice_check_min_interval = 100;
+ EXPECT_TRUE(pc_->SetConfiguration(config).ok());
+ config = pc_->GetConfiguration();
+ EXPECT_EQ(config.ice_check_min_interval, 100);
+}
+
+TEST_P(PeerConnectionInterfaceTest,
+ SetConfigurationChangesSurfaceIceCandidatesOnIceTransportTypeChanged) {
+ PeerConnectionInterface::RTCConfiguration config;
+ config.surface_ice_candidates_on_ice_transport_type_changed = false;
+ CreatePeerConnection(config);
+ config = pc_->GetConfiguration();
+ EXPECT_FALSE(config.surface_ice_candidates_on_ice_transport_type_changed);
+
+ config.surface_ice_candidates_on_ice_transport_type_changed = true;
+ EXPECT_TRUE(pc_->SetConfiguration(config).ok());
+ config = pc_->GetConfiguration();
+ EXPECT_TRUE(config.surface_ice_candidates_on_ice_transport_type_changed);
+}
+
+// Test that when SetConfiguration changes both the pool size and other
+// attributes, the pooled session is created with the updated attributes.
+TEST_P(PeerConnectionInterfaceTest,
+ SetConfigurationCreatesPooledSessionCorrectly) {
+ CreatePeerConnection();
+ PeerConnectionInterface::RTCConfiguration config = pc_->GetConfiguration();
+ config.ice_candidate_pool_size = 1;
+ PeerConnectionInterface::IceServer server;
+ server.uri = kStunAddressOnly;
+ config.servers.push_back(server);
+ config.type = PeerConnectionInterface::kRelay;
+ EXPECT_TRUE(pc_->SetConfiguration(config).ok());
+
+ const cricket::FakePortAllocatorSession* session =
+ static_cast<const cricket::FakePortAllocatorSession*>(
+ port_allocator_->GetPooledSession());
+ ASSERT_NE(nullptr, session);
+ EXPECT_EQ(1UL, session->stun_servers().size());
+}
+
+// Test that after SetLocalDescription, changing the pool size is not allowed,
+// and an invalid modification error is returned.
+TEST_P(PeerConnectionInterfaceTest,
+ CantChangePoolSizeAfterSetLocalDescription) {
+ CreatePeerConnection();
+ // Start by setting a size of 1.
+ PeerConnectionInterface::RTCConfiguration config = pc_->GetConfiguration();
+ config.ice_candidate_pool_size = 1;
+ EXPECT_TRUE(pc_->SetConfiguration(config).ok());
+
+ // Set remote offer; can still change pool size at this point.
+ CreateOfferAsRemoteDescription();
+ config.ice_candidate_pool_size = 2;
+ EXPECT_TRUE(pc_->SetConfiguration(config).ok());
+
+ // Set local answer; now it's too late.
+ CreateAnswerAsLocalDescription();
+ config.ice_candidate_pool_size = 3;
+ RTCError error = pc_->SetConfiguration(config);
+ EXPECT_EQ(RTCErrorType::INVALID_MODIFICATION, error.type());
+}
+
+// Test that after setting an answer, extra pooled sessions are discarded. The
+// ICE candidate pool is only intended to be used for the first offer/answer.
+TEST_P(PeerConnectionInterfaceTest,
+ ExtraPooledSessionsDiscardedAfterApplyingAnswer) {
+ CreatePeerConnection();
+
+ // Set a larger-than-necessary size.
+ PeerConnectionInterface::RTCConfiguration config = pc_->GetConfiguration();
+ config.ice_candidate_pool_size = 4;
+ EXPECT_TRUE(pc_->SetConfiguration(config).ok());
+
+ // Do offer/answer.
+ CreateOfferAsRemoteDescription();
+ CreateAnswerAsLocalDescription();
+
+ // Expect no pooled sessions to be left.
+ const cricket::PortAllocatorSession* session =
+ port_allocator_->GetPooledSession();
+ EXPECT_EQ(nullptr, session);
+}
+
+// After Close is called, pooled candidates should be discarded so as to not
+// waste network resources.
+TEST_P(PeerConnectionInterfaceTest, PooledSessionsDiscardedAfterClose) {
+ CreatePeerConnection();
+
+ PeerConnectionInterface::RTCConfiguration config = pc_->GetConfiguration();
+ config.ice_candidate_pool_size = 3;
+ EXPECT_TRUE(pc_->SetConfiguration(config).ok());
+ pc_->Close();
+
+ // Expect no pooled sessions to be left.
+ const cricket::PortAllocatorSession* session =
+ port_allocator_->GetPooledSession();
+ EXPECT_EQ(nullptr, session);
+}
+
+// Test that SetConfiguration returns an invalid modification error if
+// modifying a field in the configuration that isn't allowed to be modified.
+TEST_P(PeerConnectionInterfaceTest,
+ SetConfigurationReturnsInvalidModificationError) {
+ PeerConnectionInterface::RTCConfiguration config;
+ config.bundle_policy = PeerConnectionInterface::kBundlePolicyBalanced;
+ config.rtcp_mux_policy = PeerConnectionInterface::kRtcpMuxPolicyNegotiate;
+ config.continual_gathering_policy = PeerConnectionInterface::GATHER_ONCE;
+ CreatePeerConnection(config);
+
+ PeerConnectionInterface::RTCConfiguration modified_config =
+ pc_->GetConfiguration();
+ modified_config.bundle_policy =
+ PeerConnectionInterface::kBundlePolicyMaxBundle;
+ RTCError error = pc_->SetConfiguration(modified_config);
+ EXPECT_EQ(RTCErrorType::INVALID_MODIFICATION, error.type());
+
+ modified_config = pc_->GetConfiguration();
+ modified_config.rtcp_mux_policy =
+ PeerConnectionInterface::kRtcpMuxPolicyRequire;
+ error = pc_->SetConfiguration(modified_config);
+ EXPECT_EQ(RTCErrorType::INVALID_MODIFICATION, error.type());
+
+ modified_config = pc_->GetConfiguration();
+ modified_config.continual_gathering_policy =
+ PeerConnectionInterface::GATHER_CONTINUALLY;
+ error = pc_->SetConfiguration(modified_config);
+ EXPECT_EQ(RTCErrorType::INVALID_MODIFICATION, error.type());
+}
+
+// Test that SetConfiguration returns a range error if the candidate pool size
+// is negative or larger than allowed by the spec.
+TEST_P(PeerConnectionInterfaceTest,
+ SetConfigurationReturnsRangeErrorForBadCandidatePoolSize) {
+ PeerConnectionInterface::RTCConfiguration config;
+ CreatePeerConnection(config);
+ config = pc_->GetConfiguration();
+
+ config.ice_candidate_pool_size = -1;
+ RTCError error = pc_->SetConfiguration(config);
+ EXPECT_EQ(RTCErrorType::INVALID_RANGE, error.type());
+
+ config.ice_candidate_pool_size = INT_MAX;
+ error = pc_->SetConfiguration(config);
+ EXPECT_EQ(RTCErrorType::INVALID_RANGE, error.type());
+}
+
+// Test that SetConfiguration returns a syntax error if parsing an ICE server
+// URL failed.
+TEST_P(PeerConnectionInterfaceTest,
+ SetConfigurationReturnsSyntaxErrorFromBadIceUrls) {
+ PeerConnectionInterface::RTCConfiguration config;
+ CreatePeerConnection(config);
+ config = pc_->GetConfiguration();
+
+ PeerConnectionInterface::IceServer bad_server;
+ bad_server.uri = "stunn:www.example.com";
+ config.servers.push_back(bad_server);
+ RTCError error = pc_->SetConfiguration(config);
+ EXPECT_EQ(RTCErrorType::SYNTAX_ERROR, error.type());
+}
+
+// Test that SetConfiguration returns an invalid parameter error if a TURN
+// IceServer is missing a username or password.
+TEST_P(PeerConnectionInterfaceTest,
+ SetConfigurationReturnsInvalidParameterIfCredentialsMissing) {
+ PeerConnectionInterface::RTCConfiguration config;
+ CreatePeerConnection(config);
+ config = pc_->GetConfiguration();
+
+ PeerConnectionInterface::IceServer bad_server;
+ bad_server.uri = "turn:www.example.com";
+ // Missing password.
+ bad_server.username = "foo";
+ config.servers.push_back(bad_server);
+ RTCError error;
+ EXPECT_EQ(pc_->SetConfiguration(config).type(),
+ RTCErrorType::INVALID_PARAMETER);
+}
+
+// Test that PeerConnection::Close changes the states to closed and all remote
+// tracks change state to ended.
+TEST_P(PeerConnectionInterfaceTest, CloseAndTestStreamsAndStates) {
+ // Initialize a PeerConnection and negotiate local and remote session
+ // description.
+ InitiateCall();
+
+ // With Plan B, verify the stream count. The analog with Unified Plan is the
+ // RtpTransceiver count.
+ if (sdp_semantics_ == SdpSemantics::kPlanB_DEPRECATED) {
+ ASSERT_EQ(1u, pc_->local_streams()->count());
+ ASSERT_EQ(1u, pc_->remote_streams()->count());
+ } else {
+ ASSERT_EQ(2u, pc_->GetTransceivers().size());
+ }
+
+ pc_->Close();
+
+ EXPECT_EQ(PeerConnectionInterface::kClosed, pc_->signaling_state());
+ EXPECT_EQ(PeerConnectionInterface::kIceConnectionClosed,
+ pc_->ice_connection_state());
+ EXPECT_EQ(PeerConnectionInterface::kIceGatheringComplete,
+ pc_->ice_gathering_state());
+
+ if (sdp_semantics_ == SdpSemantics::kPlanB_DEPRECATED) {
+ EXPECT_EQ(1u, pc_->local_streams()->count());
+ EXPECT_EQ(1u, pc_->remote_streams()->count());
+ } else {
+ // Verify that the RtpTransceivers are still returned.
+ EXPECT_EQ(2u, pc_->GetTransceivers().size());
+ }
+
+ auto audio_receiver = GetFirstReceiverOfType(cricket::MEDIA_TYPE_AUDIO);
+ auto video_receiver = GetFirstReceiverOfType(cricket::MEDIA_TYPE_VIDEO);
+ if (sdp_semantics_ == SdpSemantics::kPlanB_DEPRECATED) {
+ ASSERT_TRUE(audio_receiver);
+ ASSERT_TRUE(video_receiver);
+ // Track state may be updated asynchronously.
+ EXPECT_EQ_WAIT(MediaStreamTrackInterface::kEnded,
+ audio_receiver->track()->state(), kTimeout);
+ EXPECT_EQ_WAIT(MediaStreamTrackInterface::kEnded,
+ video_receiver->track()->state(), kTimeout);
+ } else {
+ ASSERT_FALSE(audio_receiver);
+ ASSERT_FALSE(video_receiver);
+ }
+}
+
+// Test that PeerConnection methods fails gracefully after
+// PeerConnection::Close has been called.
+// Don't run under Unified Plan since the stream API is not available.
+TEST_F(PeerConnectionInterfaceTestPlanB, CloseAndTestMethods) {
+ CreatePeerConnectionWithoutDtls();
+ AddAudioVideoStream(kStreamId1, "audio_label", "video_label");
+ CreateOfferAsRemoteDescription();
+ CreateAnswerAsLocalDescription();
+
+ ASSERT_EQ(1u, pc_->local_streams()->count());
+ rtc::scoped_refptr<MediaStreamInterface> local_stream(
+ pc_->local_streams()->at(0));
+
+ pc_->Close();
+
+ pc_->RemoveStream(local_stream.get());
+ EXPECT_FALSE(pc_->AddStream(local_stream.get()));
+
+ EXPECT_FALSE(pc_->CreateDataChannelOrError("test", NULL).ok());
+
+ EXPECT_TRUE(pc_->local_description() != nullptr);
+ EXPECT_TRUE(pc_->remote_description() != nullptr);
+
+ std::unique_ptr<SessionDescriptionInterface> offer;
+ EXPECT_FALSE(DoCreateOffer(&offer, nullptr));
+ std::unique_ptr<SessionDescriptionInterface> answer;
+ EXPECT_FALSE(DoCreateAnswer(&answer, nullptr));
+
+ std::string sdp;
+ ASSERT_TRUE(pc_->remote_description()->ToString(&sdp));
+ std::unique_ptr<SessionDescriptionInterface> remote_offer(
+ webrtc::CreateSessionDescription(SdpType::kOffer, sdp));
+ EXPECT_FALSE(DoSetRemoteDescription(std::move(remote_offer)));
+
+ ASSERT_TRUE(pc_->local_description()->ToString(&sdp));
+ std::unique_ptr<SessionDescriptionInterface> local_offer(
+ webrtc::CreateSessionDescription(SdpType::kOffer, sdp));
+ EXPECT_FALSE(DoSetLocalDescription(std::move(local_offer)));
+}
+
+// Test that GetStats can still be called after PeerConnection::Close.
+TEST_P(PeerConnectionInterfaceTest, CloseAndGetStats) {
+ InitiateCall();
+ pc_->Close();
+ DoGetStats(nullptr);
+}
+
+// NOTE: The series of tests below come from what used to be
+// mediastreamsignaling_unittest.cc, and are mostly aimed at testing that
+// setting a remote or local description has the expected effects.
+
+// This test verifies that the remote MediaStreams corresponding to a received
+// SDP string is created. In this test the two separate MediaStreams are
+// signaled.
+TEST_P(PeerConnectionInterfaceTest, UpdateRemoteStreams) {
+ RTCConfiguration config;
+ CreatePeerConnection(config);
+ CreateAndSetRemoteOffer(GetSdpStringWithStream1());
+
+ rtc::scoped_refptr<StreamCollection> reference(CreateStreamCollection(1, 1));
+ EXPECT_TRUE(
+ CompareStreamCollections(observer_.remote_streams(), reference.get()));
+ MediaStreamInterface* remote_stream = observer_.remote_streams()->at(0);
+ EXPECT_TRUE(remote_stream->GetVideoTracks()[0]->GetSource() != nullptr);
+
+ // Create a session description based on another SDP with another
+ // MediaStream.
+ CreateAndSetRemoteOffer(GetSdpStringWithStream1And2());
+
+ rtc::scoped_refptr<StreamCollection> reference2(CreateStreamCollection(2, 1));
+ EXPECT_TRUE(
+ CompareStreamCollections(observer_.remote_streams(), reference2.get()));
+}
+
+// This test verifies that when remote tracks are added/removed from SDP, the
+// created remote streams are updated appropriately.
+// Don't run under Unified Plan since this test uses Plan B SDP to test Plan B
+// specific behavior.
+TEST_F(PeerConnectionInterfaceTestPlanB,
+ AddRemoveTrackFromExistingRemoteMediaStream) {
+ RTCConfiguration config;
+ CreatePeerConnection(config);
+ std::unique_ptr<SessionDescriptionInterface> desc_ms1 =
+ CreateSessionDescriptionAndReference(1, 1);
+ EXPECT_TRUE(DoSetRemoteDescription(std::move(desc_ms1)));
+ EXPECT_TRUE(CompareStreamCollections(observer_.remote_streams(),
+ reference_collection_.get()));
+
+ // Add extra audio and video tracks to the same MediaStream.
+ std::unique_ptr<SessionDescriptionInterface> desc_ms1_two_tracks =
+ CreateSessionDescriptionAndReference(2, 2);
+ EXPECT_TRUE(DoSetRemoteDescription(std::move(desc_ms1_two_tracks)));
+ EXPECT_TRUE(CompareStreamCollections(observer_.remote_streams(),
+ reference_collection_.get()));
+ rtc::scoped_refptr<AudioTrackInterface> audio_track2 =
+ observer_.remote_streams()->at(0)->GetAudioTracks()[1];
+ EXPECT_EQ(webrtc::MediaStreamTrackInterface::kLive, audio_track2->state());
+ rtc::scoped_refptr<VideoTrackInterface> video_track2 =
+ observer_.remote_streams()->at(0)->GetVideoTracks()[1];
+ EXPECT_EQ(webrtc::MediaStreamTrackInterface::kLive, video_track2->state());
+
+ // Remove the extra audio and video tracks.
+ std::unique_ptr<SessionDescriptionInterface> desc_ms2 =
+ CreateSessionDescriptionAndReference(1, 1);
+ MockTrackObserver audio_track_observer(audio_track2.get());
+ MockTrackObserver video_track_observer(video_track2.get());
+
+ EXPECT_CALL(audio_track_observer, OnChanged()).Times(Exactly(1));
+ EXPECT_CALL(video_track_observer, OnChanged()).Times(Exactly(1));
+ EXPECT_TRUE(DoSetRemoteDescription(std::move(desc_ms2)));
+ EXPECT_TRUE(CompareStreamCollections(observer_.remote_streams(),
+ reference_collection_.get()));
+ // Track state may be updated asynchronously.
+ EXPECT_EQ_WAIT(webrtc::MediaStreamTrackInterface::kEnded,
+ audio_track2->state(), kTimeout);
+ EXPECT_EQ_WAIT(webrtc::MediaStreamTrackInterface::kEnded,
+ video_track2->state(), kTimeout);
+}
+
+// This tests that remote tracks are ended if a local session description is set
+// that rejects the media content type.
+TEST_P(PeerConnectionInterfaceTest, RejectMediaContent) {
+ RTCConfiguration config;
+ CreatePeerConnection(config);
+ // First create and set a remote offer, then reject its video content in our
+ // answer.
+ CreateAndSetRemoteOffer(kSdpStringWithStream1PlanB);
+ auto audio_receiver = GetFirstReceiverOfType(cricket::MEDIA_TYPE_AUDIO);
+ ASSERT_TRUE(audio_receiver);
+ auto video_receiver = GetFirstReceiverOfType(cricket::MEDIA_TYPE_VIDEO);
+ ASSERT_TRUE(video_receiver);
+
+ rtc::scoped_refptr<MediaStreamTrackInterface> remote_audio =
+ audio_receiver->track();
+ EXPECT_EQ(webrtc::MediaStreamTrackInterface::kLive, remote_audio->state());
+ rtc::scoped_refptr<MediaStreamTrackInterface> remote_video =
+ video_receiver->track();
+ EXPECT_EQ(MediaStreamTrackInterface::kLive, remote_video->state());
+
+ std::unique_ptr<SessionDescriptionInterface> local_answer;
+ EXPECT_TRUE(DoCreateAnswer(&local_answer, nullptr));
+ cricket::ContentInfo* video_info =
+ local_answer->description()->GetContentByName("video");
+ video_info->rejected = true;
+ EXPECT_TRUE(DoSetLocalDescription(std::move(local_answer)));
+ EXPECT_EQ(MediaStreamTrackInterface::kEnded, remote_video->state());
+ EXPECT_EQ(MediaStreamTrackInterface::kLive, remote_audio->state());
+
+ // Now create an offer where we reject both video and audio.
+ std::unique_ptr<SessionDescriptionInterface> local_offer;
+ EXPECT_TRUE(DoCreateOffer(&local_offer, nullptr));
+ video_info = local_offer->description()->GetContentByName("video");
+ ASSERT_TRUE(video_info != nullptr);
+ video_info->rejected = true;
+ cricket::ContentInfo* audio_info =
+ local_offer->description()->GetContentByName("audio");
+ ASSERT_TRUE(audio_info != nullptr);
+ audio_info->rejected = true;
+ EXPECT_TRUE(DoSetLocalDescription(std::move(local_offer)));
+ // Track state may be updated asynchronously.
+ EXPECT_EQ_WAIT(MediaStreamTrackInterface::kEnded, remote_audio->state(),
+ kTimeout);
+ EXPECT_EQ_WAIT(MediaStreamTrackInterface::kEnded, remote_video->state(),
+ kTimeout);
+}
+
+// This tests that we won't crash if the remote track has been removed outside
+// of PeerConnection and then PeerConnection tries to reject the track.
+// Don't run under Unified Plan since the stream API is not available.
+TEST_F(PeerConnectionInterfaceTestPlanB, RemoveTrackThenRejectMediaContent) {
+ RTCConfiguration config;
+ CreatePeerConnection(config);
+ CreateAndSetRemoteOffer(GetSdpStringWithStream1());
+ MediaStreamInterface* remote_stream = observer_.remote_streams()->at(0);
+ remote_stream->RemoveTrack(remote_stream->GetVideoTracks()[0]);
+ remote_stream->RemoveTrack(remote_stream->GetAudioTracks()[0]);
+
+ std::unique_ptr<SessionDescriptionInterface> local_answer(
+ webrtc::CreateSessionDescription(SdpType::kAnswer,
+ GetSdpStringWithStream1(), nullptr));
+ cricket::ContentInfo* video_info =
+ local_answer->description()->GetContentByName("video");
+ video_info->rejected = true;
+ cricket::ContentInfo* audio_info =
+ local_answer->description()->GetContentByName("audio");
+ audio_info->rejected = true;
+ EXPECT_TRUE(DoSetLocalDescription(std::move(local_answer)));
+
+ // No crash is a pass.
+}
+
+// This tests that if a recvonly remote description is set, no remote streams
+// will be created, even if the description contains SSRCs/MSIDs.
+// See: https://code.google.com/p/webrtc/issues/detail?id=5054
+TEST_P(PeerConnectionInterfaceTest, RecvonlyDescriptionDoesntCreateStream) {
+ RTCConfiguration config;
+ CreatePeerConnection(config);
+
+ std::string recvonly_offer = GetSdpStringWithStream1();
+ absl::StrReplaceAll({{kSendrecv, kRecvonly}}, &recvonly_offer);
+ CreateAndSetRemoteOffer(recvonly_offer);
+
+ EXPECT_EQ(0u, observer_.remote_streams()->count());
+}
+
+// This tests that a default MediaStream is created if a remote session
+// description doesn't contain any streams and no MSID support.
+// It also tests that the default stream is updated if a video m-line is added
+// in a subsequent session description.
+// Don't run under Unified Plan since this behavior is Plan B specific.
+TEST_F(PeerConnectionInterfaceTestPlanB, SdpWithoutMsidCreatesDefaultStream) {
+ RTCConfiguration config;
+ CreatePeerConnection(config);
+ CreateAndSetRemoteOffer(kSdpStringWithoutStreamsAudioOnly);
+
+ ASSERT_EQ(1u, observer_.remote_streams()->count());
+ MediaStreamInterface* remote_stream = observer_.remote_streams()->at(0);
+
+ EXPECT_EQ(1u, remote_stream->GetAudioTracks().size());
+ EXPECT_EQ(0u, remote_stream->GetVideoTracks().size());
+ EXPECT_EQ("default", remote_stream->id());
+
+ CreateAndSetRemoteOffer(kSdpStringWithoutStreams);
+ ASSERT_EQ(1u, observer_.remote_streams()->count());
+ ASSERT_EQ(1u, remote_stream->GetAudioTracks().size());
+ EXPECT_EQ("defaulta0", remote_stream->GetAudioTracks()[0]->id());
+ EXPECT_EQ(MediaStreamTrackInterface::kLive,
+ remote_stream->GetAudioTracks()[0]->state());
+ ASSERT_EQ(1u, remote_stream->GetVideoTracks().size());
+ EXPECT_EQ("defaultv0", remote_stream->GetVideoTracks()[0]->id());
+ EXPECT_EQ(MediaStreamTrackInterface::kLive,
+ remote_stream->GetVideoTracks()[0]->state());
+}
+
+// This tests that a default MediaStream is created if a remote session
+// description doesn't contain any streams and media direction is send only.
+// Don't run under Unified Plan since this behavior is Plan B specific.
+TEST_F(PeerConnectionInterfaceTestPlanB,
+ SendOnlySdpWithoutMsidCreatesDefaultStream) {
+ RTCConfiguration config;
+ CreatePeerConnection(config);
+ CreateAndSetRemoteOffer(kSdpStringSendOnlyWithoutStreams);
+
+ ASSERT_EQ(1u, observer_.remote_streams()->count());
+ MediaStreamInterface* remote_stream = observer_.remote_streams()->at(0);
+
+ EXPECT_EQ(1u, remote_stream->GetAudioTracks().size());
+ EXPECT_EQ(1u, remote_stream->GetVideoTracks().size());
+ EXPECT_EQ("default", remote_stream->id());
+}
+
+// This tests that it won't crash when PeerConnection tries to remove
+// a remote track that as already been removed from the MediaStream.
+// Don't run under Unified Plan since this behavior is Plan B specific.
+TEST_F(PeerConnectionInterfaceTestPlanB, RemoveAlreadyGoneRemoteStream) {
+ RTCConfiguration config;
+ CreatePeerConnection(config);
+ CreateAndSetRemoteOffer(GetSdpStringWithStream1());
+ MediaStreamInterface* remote_stream = observer_.remote_streams()->at(0);
+ remote_stream->RemoveTrack(remote_stream->GetAudioTracks()[0]);
+ remote_stream->RemoveTrack(remote_stream->GetVideoTracks()[0]);
+
+ CreateAndSetRemoteOffer(kSdpStringWithoutStreams);
+
+ // No crash is a pass.
+}
+
+// This tests that a default MediaStream is created if the remote session
+// description doesn't contain any streams and don't contain an indication if
+// MSID is supported.
+// Don't run under Unified Plan since this behavior is Plan B specific.
+TEST_F(PeerConnectionInterfaceTestPlanB,
+ SdpWithoutMsidAndStreamsCreatesDefaultStream) {
+ RTCConfiguration config;
+ CreatePeerConnection(config);
+ CreateAndSetRemoteOffer(kSdpStringWithoutStreams);
+
+ ASSERT_EQ(1u, observer_.remote_streams()->count());
+ MediaStreamInterface* remote_stream = observer_.remote_streams()->at(0);
+ EXPECT_EQ(1u, remote_stream->GetAudioTracks().size());
+ EXPECT_EQ(1u, remote_stream->GetVideoTracks().size());
+}
+
+// This tests that a default MediaStream is not created if the remote session
+// description doesn't contain any streams but does support MSID.
+// Don't run under Unified Plan since this behavior is Plan B specific.
+TEST_F(PeerConnectionInterfaceTestPlanB, SdpWithMsidDontCreatesDefaultStream) {
+ RTCConfiguration config;
+ CreatePeerConnection(config);
+ CreateAndSetRemoteOffer(kSdpStringWithMsidWithoutStreams);
+ EXPECT_EQ(0u, observer_.remote_streams()->count());
+}
+
+// This tests that when setting a new description, the old default tracks are
+// not destroyed and recreated.
+// See: https://bugs.chromium.org/p/webrtc/issues/detail?id=5250
+// Don't run under Unified Plan since this behavior is Plan B specific.
+TEST_F(PeerConnectionInterfaceTestPlanB,
+ DefaultTracksNotDestroyedAndRecreated) {
+ RTCConfiguration config;
+ CreatePeerConnection(config);
+ CreateAndSetRemoteOffer(kSdpStringWithoutStreamsAudioOnly);
+
+ ASSERT_EQ(1u, observer_.remote_streams()->count());
+ MediaStreamInterface* remote_stream = observer_.remote_streams()->at(0);
+ ASSERT_EQ(1u, remote_stream->GetAudioTracks().size());
+
+ // Set the track to "disabled", then set a new description and ensure the
+ // track is still disabled, which ensures it hasn't been recreated.
+ remote_stream->GetAudioTracks()[0]->set_enabled(false);
+ CreateAndSetRemoteOffer(kSdpStringWithoutStreamsAudioOnly);
+ ASSERT_EQ(1u, remote_stream->GetAudioTracks().size());
+ EXPECT_FALSE(remote_stream->GetAudioTracks()[0]->enabled());
+}
+
+// This tests that a default MediaStream is not created if a remote session
+// description is updated to not have any MediaStreams.
+// Don't run under Unified Plan since this behavior is Plan B specific.
+TEST_F(PeerConnectionInterfaceTestPlanB, VerifyDefaultStreamIsNotCreated) {
+ RTCConfiguration config;
+ CreatePeerConnection(config);
+ CreateAndSetRemoteOffer(GetSdpStringWithStream1());
+ rtc::scoped_refptr<StreamCollection> reference(CreateStreamCollection(1, 1));
+ EXPECT_TRUE(
+ CompareStreamCollections(observer_.remote_streams(), reference.get()));
+
+ CreateAndSetRemoteOffer(kSdpStringWithoutStreams);
+ EXPECT_EQ(0u, observer_.remote_streams()->count());
+}
+
+// This tests that a default MediaStream is created if a remote SDP comes from
+// an endpoint that doesn't signal SSRCs, but signals media stream IDs.
+TEST_F(PeerConnectionInterfaceTestPlanB,
+ SdpWithMsidWithoutSsrcCreatesDefaultStream) {
+ RTCConfiguration config;
+ CreatePeerConnection(config);
+ std::string sdp_string = kSdpStringWithoutStreamsAudioOnly;
+ // Add a=msid lines to simulate a Unified Plan endpoint that only
+ // signals stream IDs with a=msid lines.
+ sdp_string.append("a=msid:audio_stream_id audio_track_id\n");
+
+ CreateAndSetRemoteOffer(sdp_string);
+
+ ASSERT_EQ(1u, observer_.remote_streams()->count());
+ MediaStreamInterface* remote_stream = observer_.remote_streams()->at(0);
+ EXPECT_EQ("default", remote_stream->id());
+ ASSERT_EQ(1u, remote_stream->GetAudioTracks().size());
+}
+
+// This tests that when a Plan B endpoint receives an SDP that signals no media
+// stream IDs indicated by the special character "-" in the a=msid line, that
+// a default stream ID will be used for the MediaStream ID. This can occur
+// when a Unified Plan endpoint signals no media stream IDs, but signals both
+// a=ssrc msid and a=msid lines for interop signaling with Plan B.
+TEST_F(PeerConnectionInterfaceTestPlanB,
+ SdpWithEmptyMsidAndSsrcCreatesDefaultStreamId) {
+ RTCConfiguration config;
+ CreatePeerConnection(config);
+ // Add a a=msid line to the SDP. This is prioritized when parsing the SDP, so
+ // the sender's stream ID will be interpreted as no stream IDs.
+ std::string sdp_string = kSdpStringWithStream1AudioTrackOnly;
+ sdp_string.append("a=msid:- audiotrack0\n");
+
+ CreateAndSetRemoteOffer(sdp_string);
+
+ ASSERT_EQ(1u, observer_.remote_streams()->count());
+ // Because SSRCs are signaled the track ID will be what was signaled in the
+ // a=msid line.
+ EXPECT_EQ("audiotrack0", observer_.last_added_track_label_);
+ MediaStreamInterface* remote_stream = observer_.remote_streams()->at(0);
+ EXPECT_EQ("default", remote_stream->id());
+ ASSERT_EQ(1u, remote_stream->GetAudioTracks().size());
+
+ // Previously a bug ocurred when setting the remote description a second time.
+ // This is because we checked equality of the remote StreamParams stream ID
+ // (empty), and the previously set stream ID for the remote sender
+ // ("default"). This cause a track to be removed, then added, when really
+ // nothing should occur because it is the same track.
+ CreateAndSetRemoteOffer(sdp_string);
+ EXPECT_EQ(0u, observer_.remove_track_events_.size());
+ EXPECT_EQ(1u, observer_.add_track_events_.size());
+ EXPECT_EQ("audiotrack0", observer_.last_added_track_label_);
+ remote_stream = observer_.remote_streams()->at(0);
+ EXPECT_EQ("default", remote_stream->id());
+ ASSERT_EQ(1u, remote_stream->GetAudioTracks().size());
+}
+
+// This tests that an RtpSender is created when the local description is set
+// after adding a local stream.
+// TODO(deadbeef): This test and the one below it need to be updated when
+// an RtpSender's lifetime isn't determined by when a local description is set.
+// Don't run under Unified Plan since this behavior is Plan B specific.
+TEST_F(PeerConnectionInterfaceTestPlanB, LocalDescriptionChanged) {
+ RTCConfiguration config;
+ CreatePeerConnection(config);
+
+ // Create an offer with 1 stream with 2 tracks of each type.
+ rtc::scoped_refptr<StreamCollection> stream_collection =
+ CreateStreamCollection(1, 2);
+ pc_->AddStream(stream_collection->at(0));
+ std::unique_ptr<SessionDescriptionInterface> offer;
+ ASSERT_TRUE(DoCreateOffer(&offer, nullptr));
+ EXPECT_TRUE(DoSetLocalDescription(std::move(offer)));
+
+ auto senders = pc_->GetSenders();
+ EXPECT_EQ(4u, senders.size());
+ EXPECT_TRUE(ContainsSender(senders, kAudioTracks[0]));
+ EXPECT_TRUE(ContainsSender(senders, kVideoTracks[0]));
+ EXPECT_TRUE(ContainsSender(senders, kAudioTracks[1]));
+ EXPECT_TRUE(ContainsSender(senders, kVideoTracks[1]));
+
+ // Remove an audio and video track.
+ pc_->RemoveStream(stream_collection->at(0));
+ stream_collection = CreateStreamCollection(1, 1);
+ pc_->AddStream(stream_collection->at(0));
+ ASSERT_TRUE(DoCreateOffer(&offer, nullptr));
+ EXPECT_TRUE(DoSetLocalDescription(std::move(offer)));
+
+ senders = pc_->GetSenders();
+ EXPECT_EQ(2u, senders.size());
+ EXPECT_TRUE(ContainsSender(senders, kAudioTracks[0]));
+ EXPECT_TRUE(ContainsSender(senders, kVideoTracks[0]));
+ EXPECT_FALSE(ContainsSender(senders, kAudioTracks[1]));
+ EXPECT_FALSE(ContainsSender(senders, kVideoTracks[1]));
+}
+
+// This tests that an RtpSender is created when the local description is set
+// before adding a local stream.
+// Don't run under Unified Plan since this behavior is Plan B specific.
+TEST_F(PeerConnectionInterfaceTestPlanB,
+ AddLocalStreamAfterLocalDescriptionChanged) {
+ RTCConfiguration config;
+ CreatePeerConnection(config);
+
+ rtc::scoped_refptr<StreamCollection> stream_collection =
+ CreateStreamCollection(1, 2);
+ // Add a stream to create the offer, but remove it afterwards.
+ pc_->AddStream(stream_collection->at(0));
+ std::unique_ptr<SessionDescriptionInterface> offer;
+ ASSERT_TRUE(DoCreateOffer(&offer, nullptr));
+ pc_->RemoveStream(stream_collection->at(0));
+
+ EXPECT_TRUE(DoSetLocalDescription(std::move(offer)));
+ auto senders = pc_->GetSenders();
+ EXPECT_EQ(0u, senders.size());
+
+ pc_->AddStream(stream_collection->at(0));
+ senders = pc_->GetSenders();
+ EXPECT_EQ(4u, senders.size());
+ EXPECT_TRUE(ContainsSender(senders, kAudioTracks[0]));
+ EXPECT_TRUE(ContainsSender(senders, kVideoTracks[0]));
+ EXPECT_TRUE(ContainsSender(senders, kAudioTracks[1]));
+ EXPECT_TRUE(ContainsSender(senders, kVideoTracks[1]));
+}
+
+// This tests that the expected behavior occurs if the SSRC on a local track is
+// changed when SetLocalDescription is called.
+TEST_P(PeerConnectionInterfaceTest,
+ ChangeSsrcOnTrackInLocalSessionDescription) {
+ RTCConfiguration config;
+ CreatePeerConnection(config);
+
+ AddAudioTrack(kAudioTracks[0]);
+ AddVideoTrack(kVideoTracks[0]);
+ std::unique_ptr<SessionDescriptionInterface> offer;
+ ASSERT_TRUE(DoCreateOffer(&offer, nullptr));
+ // Grab a copy of the offer before it gets passed into the PC.
+ std::unique_ptr<SessionDescriptionInterface> modified_offer =
+ webrtc::CreateSessionDescription(
+ webrtc::SdpType::kOffer, offer->session_id(),
+ offer->session_version(), offer->description()->Clone());
+ EXPECT_TRUE(DoSetLocalDescription(std::move(offer)));
+
+ auto senders = pc_->GetSenders();
+ EXPECT_EQ(2u, senders.size());
+ EXPECT_TRUE(ContainsSender(senders, kAudioTracks[0]));
+ EXPECT_TRUE(ContainsSender(senders, kVideoTracks[0]));
+
+ // Change the ssrc of the audio and video track.
+ for (auto content : modified_offer->description()->contents()) {
+ cricket::MediaContentDescription* desc = content.media_description();
+ ASSERT_TRUE(desc);
+ for (StreamParams& stream : desc->mutable_streams()) {
+ for (unsigned int& ssrc : stream.ssrcs) {
+ unsigned int old_ssrc = ssrc++;
+ for (auto& group : stream.ssrc_groups) {
+ for (unsigned int& secondary_ssrc : group.ssrcs) {
+ if (secondary_ssrc == old_ssrc) {
+ secondary_ssrc = ssrc;
+ }
+ }
+ }
+ }
+ }
+ }
+
+ EXPECT_TRUE(DoSetLocalDescription(std::move(modified_offer)));
+ senders = pc_->GetSenders();
+ EXPECT_EQ(2u, senders.size());
+ EXPECT_TRUE(ContainsSender(senders, kAudioTracks[0]));
+ EXPECT_TRUE(ContainsSender(senders, kVideoTracks[0]));
+ // TODO(deadbeef): Once RtpSenders expose parameters, check that the SSRC
+ // changed.
+}
+
+// This tests that the expected behavior occurs if a new session description is
+// set with the same tracks, but on a different MediaStream.
+// Don't run under Unified Plan since the stream API is not available.
+TEST_F(PeerConnectionInterfaceTestPlanB,
+ SignalSameTracksInSeparateMediaStream) {
+ RTCConfiguration config;
+ CreatePeerConnection(config);
+
+ rtc::scoped_refptr<StreamCollection> stream_collection =
+ CreateStreamCollection(2, 1);
+ pc_->AddStream(stream_collection->at(0));
+ std::unique_ptr<SessionDescriptionInterface> offer;
+ ASSERT_TRUE(DoCreateOffer(&offer, nullptr));
+ EXPECT_TRUE(DoSetLocalDescription(std::move(offer)));
+
+ auto senders = pc_->GetSenders();
+ EXPECT_EQ(2u, senders.size());
+ EXPECT_TRUE(ContainsSender(senders, kAudioTracks[0], kStreams[0]));
+ EXPECT_TRUE(ContainsSender(senders, kVideoTracks[0], kStreams[0]));
+
+ // Add a new MediaStream but with the same tracks as in the first stream.
+ rtc::scoped_refptr<webrtc::MediaStreamInterface> stream_1(
+ webrtc::MediaStream::Create(kStreams[1]));
+ stream_1->AddTrack(stream_collection->at(0)->GetVideoTracks()[0]);
+ stream_1->AddTrack(stream_collection->at(0)->GetAudioTracks()[0]);
+ pc_->AddStream(stream_1.get());
+
+ ASSERT_TRUE(DoCreateOffer(&offer, nullptr));
+ EXPECT_TRUE(DoSetLocalDescription(std::move(offer)));
+
+ auto new_senders = pc_->GetSenders();
+ // Should be the same senders as before, but with updated stream id.
+ // Note that this behavior is subject to change in the future.
+ // We may decide the PC should ignore existing tracks in AddStream.
+ EXPECT_EQ(senders, new_senders);
+ EXPECT_TRUE(ContainsSender(new_senders, kAudioTracks[0], kStreams[1]));
+ EXPECT_TRUE(ContainsSender(new_senders, kVideoTracks[0], kStreams[1]));
+}
+
+// This tests that PeerConnectionObserver::OnAddTrack is correctly called.
+TEST_P(PeerConnectionInterfaceTest, OnAddTrackCallback) {
+ RTCConfiguration config;
+ CreatePeerConnection(config);
+ CreateAndSetRemoteOffer(kSdpStringWithStream1AudioTrackOnly);
+ EXPECT_EQ(observer_.num_added_tracks_, 1);
+ EXPECT_EQ(observer_.last_added_track_label_, kAudioTracks[0]);
+
+ // Create and set the updated remote SDP.
+ CreateAndSetRemoteOffer(kSdpStringWithStream1PlanB);
+ EXPECT_EQ(observer_.num_added_tracks_, 2);
+ EXPECT_EQ(observer_.last_added_track_label_, kVideoTracks[0]);
+}
+
+// Test that when SetConfiguration is called and the configuration is
+// changing, the next offer causes an ICE restart.
+TEST_P(PeerConnectionInterfaceTest, SetConfigurationCausingIceRestart) {
+ PeerConnectionInterface::RTCConfiguration config;
+ config.sdp_semantics = sdp_semantics_;
+ config.type = PeerConnectionInterface::kRelay;
+ CreatePeerConnection(config);
+ config = pc_->GetConfiguration();
+ AddAudioTrack(kAudioTracks[0], {kStreamId1});
+ AddVideoTrack(kVideoTracks[0], {kStreamId1});
+
+ // Do initial offer/answer so there's something to restart.
+ CreateOfferAsLocalDescription();
+ CreateAnswerAsRemoteDescription(GetSdpStringWithStream1());
+
+ // Grab the ufrags.
+ std::vector<std::string> initial_ufrags = GetUfrags(pc_->local_description());
+
+ // Change ICE policy, which should trigger an ICE restart on the next offer.
+ config.type = PeerConnectionInterface::kAll;
+ EXPECT_TRUE(pc_->SetConfiguration(config).ok());
+ CreateOfferAsLocalDescription();
+
+ // Grab the new ufrags.
+ std::vector<std::string> subsequent_ufrags =
+ GetUfrags(pc_->local_description());
+
+ // Sanity check.
+ EXPECT_EQ(initial_ufrags.size(), subsequent_ufrags.size());
+ // Check that each ufrag is different.
+ for (int i = 0; i < static_cast<int>(initial_ufrags.size()); ++i) {
+ EXPECT_NE(initial_ufrags[i], subsequent_ufrags[i]);
+ }
+}
+
+// Test that when SetConfiguration is called and the configuration *isn't*
+// changing, the next offer does *not* cause an ICE restart.
+TEST_P(PeerConnectionInterfaceTest, SetConfigurationNotCausingIceRestart) {
+ PeerConnectionInterface::RTCConfiguration config;
+ config.sdp_semantics = sdp_semantics_;
+ config.type = PeerConnectionInterface::kRelay;
+ CreatePeerConnection(config);
+ config = pc_->GetConfiguration();
+ AddAudioTrack(kAudioTracks[0]);
+ AddVideoTrack(kVideoTracks[0]);
+
+ // Do initial offer/answer so there's something to restart.
+ CreateOfferAsLocalDescription();
+ CreateAnswerAsRemoteDescription(GetSdpStringWithStream1());
+
+ // Grab the ufrags.
+ std::vector<std::string> initial_ufrags = GetUfrags(pc_->local_description());
+
+ // Call SetConfiguration with a config identical to what the PC was
+ // constructed with.
+ EXPECT_TRUE(pc_->SetConfiguration(config).ok());
+ CreateOfferAsLocalDescription();
+
+ // Grab the new ufrags.
+ std::vector<std::string> subsequent_ufrags =
+ GetUfrags(pc_->local_description());
+
+ EXPECT_EQ(initial_ufrags, subsequent_ufrags);
+}
+
+// Test for a weird corner case scenario:
+// 1. Audio/video session established.
+// 2. SetConfiguration changes ICE config; ICE restart needed.
+// 3. ICE restart initiated by remote peer, but only for one m= section.
+// 4. Next createOffer should initiate an ICE restart, but only for the other
+// m= section; it would be pointless to do an ICE restart for the m= section
+// that was already restarted.
+TEST_P(PeerConnectionInterfaceTest, SetConfigurationCausingPartialIceRestart) {
+ PeerConnectionInterface::RTCConfiguration config;
+ config.sdp_semantics = sdp_semantics_;
+ config.type = PeerConnectionInterface::kRelay;
+ CreatePeerConnection(config);
+ config = pc_->GetConfiguration();
+ AddAudioTrack(kAudioTracks[0], {kStreamId1});
+ AddVideoTrack(kVideoTracks[0], {kStreamId1});
+
+ // Do initial offer/answer so there's something to restart.
+ CreateOfferAsLocalDescription();
+ CreateAnswerAsRemoteDescription(GetSdpStringWithStream1());
+
+ // Change ICE policy, which should set the "needs-ice-restart" flag.
+ config.type = PeerConnectionInterface::kAll;
+ EXPECT_TRUE(pc_->SetConfiguration(config).ok());
+
+ // Do ICE restart for the first m= section, initiated by remote peer.
+ std::unique_ptr<webrtc::SessionDescriptionInterface> remote_offer(
+ webrtc::CreateSessionDescription(SdpType::kOffer,
+ GetSdpStringWithStream1(), nullptr));
+ ASSERT_TRUE(remote_offer);
+ remote_offer->description()->transport_infos()[0].description.ice_ufrag =
+ "modified";
+ EXPECT_TRUE(DoSetRemoteDescription(std::move(remote_offer)));
+ CreateAnswerAsLocalDescription();
+
+ // Grab the ufrags.
+ std::vector<std::string> initial_ufrags = GetUfrags(pc_->local_description());
+ ASSERT_EQ(2U, initial_ufrags.size());
+
+ // Create offer and grab the new ufrags.
+ CreateOfferAsLocalDescription();
+ std::vector<std::string> subsequent_ufrags =
+ GetUfrags(pc_->local_description());
+ ASSERT_EQ(2U, subsequent_ufrags.size());
+
+ // Ensure that only the ufrag for the second m= section changed.
+ EXPECT_EQ(initial_ufrags[0], subsequent_ufrags[0]);
+ EXPECT_NE(initial_ufrags[1], subsequent_ufrags[1]);
+}
+
+// Tests that the methods to return current/pending descriptions work as
+// expected at different points in the offer/answer exchange. This test does
+// one offer/answer exchange as the offerer, then another as the answerer.
+TEST_P(PeerConnectionInterfaceTest, CurrentAndPendingDescriptions) {
+ // This disables DTLS so we can apply an answer to ourselves.
+ CreatePeerConnection();
+
+ // Create initial local offer and get SDP (which will also be used as
+ // answer/pranswer);
+ std::unique_ptr<SessionDescriptionInterface> local_offer;
+ ASSERT_TRUE(DoCreateOffer(&local_offer, nullptr));
+ std::string sdp;
+ EXPECT_TRUE(local_offer->ToString(&sdp));
+
+ // Set local offer.
+ SessionDescriptionInterface* local_offer_ptr = local_offer.get();
+ EXPECT_TRUE(DoSetLocalDescription(std::move(local_offer)));
+ EXPECT_EQ(local_offer_ptr, pc_->pending_local_description());
+ EXPECT_EQ(nullptr, pc_->pending_remote_description());
+ EXPECT_EQ(nullptr, pc_->current_local_description());
+ EXPECT_EQ(nullptr, pc_->current_remote_description());
+
+ // Set remote pranswer.
+ std::unique_ptr<SessionDescriptionInterface> remote_pranswer(
+ webrtc::CreateSessionDescription(SdpType::kPrAnswer, sdp));
+ SessionDescriptionInterface* remote_pranswer_ptr = remote_pranswer.get();
+ EXPECT_TRUE(DoSetRemoteDescription(std::move(remote_pranswer)));
+ EXPECT_EQ(local_offer_ptr, pc_->pending_local_description());
+ EXPECT_EQ(remote_pranswer_ptr, pc_->pending_remote_description());
+ EXPECT_EQ(nullptr, pc_->current_local_description());
+ EXPECT_EQ(nullptr, pc_->current_remote_description());
+
+ // Set remote answer.
+ std::unique_ptr<SessionDescriptionInterface> remote_answer(
+ webrtc::CreateSessionDescription(SdpType::kAnswer, sdp));
+ SessionDescriptionInterface* remote_answer_ptr = remote_answer.get();
+ EXPECT_TRUE(DoSetRemoteDescription(std::move(remote_answer)));
+ EXPECT_EQ(nullptr, pc_->pending_local_description());
+ EXPECT_EQ(nullptr, pc_->pending_remote_description());
+ EXPECT_EQ(local_offer_ptr, pc_->current_local_description());
+ EXPECT_EQ(remote_answer_ptr, pc_->current_remote_description());
+
+ // Set remote offer.
+ std::unique_ptr<SessionDescriptionInterface> remote_offer(
+ webrtc::CreateSessionDescription(SdpType::kOffer, sdp));
+ SessionDescriptionInterface* remote_offer_ptr = remote_offer.get();
+ EXPECT_TRUE(DoSetRemoteDescription(std::move(remote_offer)));
+ EXPECT_EQ(remote_offer_ptr, pc_->pending_remote_description());
+ EXPECT_EQ(nullptr, pc_->pending_local_description());
+ EXPECT_EQ(local_offer_ptr, pc_->current_local_description());
+ EXPECT_EQ(remote_answer_ptr, pc_->current_remote_description());
+
+ // Set local pranswer.
+ std::unique_ptr<SessionDescriptionInterface> local_pranswer(
+ webrtc::CreateSessionDescription(SdpType::kPrAnswer, sdp));
+ SessionDescriptionInterface* local_pranswer_ptr = local_pranswer.get();
+ EXPECT_TRUE(DoSetLocalDescription(std::move(local_pranswer)));
+ EXPECT_EQ(remote_offer_ptr, pc_->pending_remote_description());
+ EXPECT_EQ(local_pranswer_ptr, pc_->pending_local_description());
+ EXPECT_EQ(local_offer_ptr, pc_->current_local_description());
+ EXPECT_EQ(remote_answer_ptr, pc_->current_remote_description());
+
+ // Set local answer.
+ std::unique_ptr<SessionDescriptionInterface> local_answer(
+ webrtc::CreateSessionDescription(SdpType::kAnswer, sdp));
+ SessionDescriptionInterface* local_answer_ptr = local_answer.get();
+ EXPECT_TRUE(DoSetLocalDescription(std::move(local_answer)));
+ EXPECT_EQ(nullptr, pc_->pending_remote_description());
+ EXPECT_EQ(nullptr, pc_->pending_local_description());
+ EXPECT_EQ(remote_offer_ptr, pc_->current_remote_description());
+ EXPECT_EQ(local_answer_ptr, pc_->current_local_description());
+}
+
+// Tests that it won't crash when calling StartRtcEventLog or StopRtcEventLog
+// after the PeerConnection is closed.
+// This version tests the StartRtcEventLog version that receives an object
+// of type `RtcEventLogOutput`.
+TEST_P(PeerConnectionInterfaceTest,
+ StartAndStopLoggingToOutputAfterPeerConnectionClosed) {
+ CreatePeerConnection();
+ // The RtcEventLog will be reset when the PeerConnection is closed.
+ pc_->Close();
+
+ EXPECT_FALSE(
+ pc_->StartRtcEventLog(std::make_unique<webrtc::RtcEventLogOutputNull>(),
+ webrtc::RtcEventLog::kImmediateOutput));
+ pc_->StopRtcEventLog();
+}
+
+// Test that generated offers/answers include "ice-option:trickle".
+TEST_P(PeerConnectionInterfaceTest, OffersAndAnswersHaveTrickleIceOption) {
+ CreatePeerConnection();
+
+ // First, create an offer with audio/video.
+ RTCOfferAnswerOptions options;
+ options.offer_to_receive_audio = 1;
+ options.offer_to_receive_video = 1;
+ std::unique_ptr<SessionDescriptionInterface> offer;
+ ASSERT_TRUE(DoCreateOffer(&offer, &options));
+ cricket::SessionDescription* desc = offer->description();
+ ASSERT_EQ(2u, desc->transport_infos().size());
+ EXPECT_TRUE(desc->transport_infos()[0].description.HasOption("trickle"));
+ EXPECT_TRUE(desc->transport_infos()[1].description.HasOption("trickle"));
+
+ // Apply the offer as a remote description, then create an answer.
+ EXPECT_FALSE(pc_->can_trickle_ice_candidates());
+ EXPECT_TRUE(DoSetRemoteDescription(std::move(offer)));
+ ASSERT_TRUE(pc_->can_trickle_ice_candidates());
+ EXPECT_TRUE(*(pc_->can_trickle_ice_candidates()));
+ std::unique_ptr<SessionDescriptionInterface> answer;
+ ASSERT_TRUE(DoCreateAnswer(&answer, &options));
+ desc = answer->description();
+ ASSERT_EQ(2u, desc->transport_infos().size());
+ EXPECT_TRUE(desc->transport_infos()[0].description.HasOption("trickle"));
+ EXPECT_TRUE(desc->transport_infos()[1].description.HasOption("trickle"));
+}
+
+// Test that ICE renomination isn't offered if it's not enabled in the PC's
+// RTCConfiguration.
+TEST_P(PeerConnectionInterfaceTest, IceRenominationNotOffered) {
+ PeerConnectionInterface::RTCConfiguration config;
+ config.sdp_semantics = sdp_semantics_;
+ config.enable_ice_renomination = false;
+ CreatePeerConnection(config);
+ AddAudioTrack("foo");
+
+ std::unique_ptr<SessionDescriptionInterface> offer;
+ ASSERT_TRUE(DoCreateOffer(&offer, nullptr));
+ cricket::SessionDescription* desc = offer->description();
+ EXPECT_EQ(1u, desc->transport_infos().size());
+ EXPECT_FALSE(
+ desc->transport_infos()[0].description.GetIceParameters().renomination);
+}
+
+// Test that the ICE renomination option is present in generated offers/answers
+// if it's enabled in the PC's RTCConfiguration.
+TEST_P(PeerConnectionInterfaceTest, IceRenominationOptionInOfferAndAnswer) {
+ PeerConnectionInterface::RTCConfiguration config;
+ config.sdp_semantics = sdp_semantics_;
+ config.enable_ice_renomination = true;
+ CreatePeerConnection(config);
+ AddAudioTrack("foo");
+
+ std::unique_ptr<SessionDescriptionInterface> offer;
+ ASSERT_TRUE(DoCreateOffer(&offer, nullptr));
+ cricket::SessionDescription* desc = offer->description();
+ EXPECT_EQ(1u, desc->transport_infos().size());
+ EXPECT_TRUE(
+ desc->transport_infos()[0].description.GetIceParameters().renomination);
+
+ // Set the offer as a remote description, then create an answer and ensure it
+ // has the renomination flag too.
+ EXPECT_TRUE(DoSetRemoteDescription(std::move(offer)));
+ std::unique_ptr<SessionDescriptionInterface> answer;
+ ASSERT_TRUE(DoCreateAnswer(&answer, nullptr));
+ desc = answer->description();
+ EXPECT_EQ(1u, desc->transport_infos().size());
+ EXPECT_TRUE(
+ desc->transport_infos()[0].description.GetIceParameters().renomination);
+}
+
+// Test that if CreateOffer is called with the deprecated "offer to receive
+// audio/video" constraints, they're processed and result in an offer with
+// audio/video sections just as if RTCOfferAnswerOptions had been used.
+TEST_P(PeerConnectionInterfaceTest, CreateOfferWithOfferToReceiveConstraints) {
+ CreatePeerConnection();
+
+ RTCOfferAnswerOptions options;
+ options.offer_to_receive_audio = 1;
+ options.offer_to_receive_video = 1;
+ std::unique_ptr<SessionDescriptionInterface> offer;
+ ASSERT_TRUE(DoCreateOffer(&offer, &options));
+
+ cricket::SessionDescription* desc = offer->description();
+ const cricket::ContentInfo* audio = cricket::GetFirstAudioContent(desc);
+ const cricket::ContentInfo* video = cricket::GetFirstVideoContent(desc);
+ ASSERT_NE(nullptr, audio);
+ ASSERT_NE(nullptr, video);
+ EXPECT_FALSE(audio->rejected);
+ EXPECT_FALSE(video->rejected);
+}
+
+// Test that if CreateAnswer is called with the deprecated "offer to receive
+// audio/video" constraints, they're processed and can be used to reject an
+// offered m= section just as can be done with RTCOfferAnswerOptions;
+// Don't run under Unified Plan since this behavior is not supported.
+TEST_F(PeerConnectionInterfaceTestPlanB,
+ CreateAnswerWithOfferToReceiveConstraints) {
+ CreatePeerConnection();
+
+ // First, create an offer with audio/video and apply it as a remote
+ // description.
+ RTCOfferAnswerOptions options;
+ options.offer_to_receive_audio = 1;
+ options.offer_to_receive_video = 1;
+ std::unique_ptr<SessionDescriptionInterface> offer;
+ ASSERT_TRUE(DoCreateOffer(&offer, &options));
+ EXPECT_TRUE(DoSetRemoteDescription(std::move(offer)));
+
+ // Now create answer that rejects audio/video.
+ options.offer_to_receive_audio = 0;
+ options.offer_to_receive_video = 0;
+ std::unique_ptr<SessionDescriptionInterface> answer;
+ ASSERT_TRUE(DoCreateAnswer(&answer, &options));
+
+ cricket::SessionDescription* desc = answer->description();
+ const cricket::ContentInfo* audio = cricket::GetFirstAudioContent(desc);
+ const cricket::ContentInfo* video = cricket::GetFirstVideoContent(desc);
+ ASSERT_NE(nullptr, audio);
+ ASSERT_NE(nullptr, video);
+ EXPECT_TRUE(audio->rejected);
+ EXPECT_TRUE(video->rejected);
+}
+
+// Test that negotiation can succeed with a data channel only, and with the max
+// bundle policy. Previously there was a bug that prevented this.
+#ifdef WEBRTC_HAVE_SCTP
+TEST_P(PeerConnectionInterfaceTest, DataChannelOnlyOfferWithMaxBundlePolicy) {
+#else
+TEST_P(PeerConnectionInterfaceTest,
+ DISABLED_DataChannelOnlyOfferWithMaxBundlePolicy) {
+#endif // WEBRTC_HAVE_SCTP
+ PeerConnectionInterface::RTCConfiguration config;
+ config.sdp_semantics = sdp_semantics_;
+ config.bundle_policy = PeerConnectionInterface::kBundlePolicyMaxBundle;
+ CreatePeerConnection(config);
+
+ // First, create an offer with only a data channel and apply it as a remote
+ // description.
+ pc_->CreateDataChannelOrError("test", nullptr);
+ std::unique_ptr<SessionDescriptionInterface> offer;
+ ASSERT_TRUE(DoCreateOffer(&offer, nullptr));
+ EXPECT_TRUE(DoSetRemoteDescription(std::move(offer)));
+
+ // Create and set answer as well.
+ std::unique_ptr<SessionDescriptionInterface> answer;
+ ASSERT_TRUE(DoCreateAnswer(&answer, nullptr));
+ EXPECT_TRUE(DoSetLocalDescription(std::move(answer)));
+}
+
+TEST_P(PeerConnectionInterfaceTest, SetBitrateWithoutMinSucceeds) {
+ CreatePeerConnection();
+ BitrateSettings bitrate;
+ bitrate.start_bitrate_bps = 100000;
+ EXPECT_TRUE(pc_->SetBitrate(bitrate).ok());
+}
+
+TEST_P(PeerConnectionInterfaceTest, SetBitrateNegativeMinFails) {
+ CreatePeerConnection();
+ BitrateSettings bitrate;
+ bitrate.min_bitrate_bps = -1;
+ EXPECT_FALSE(pc_->SetBitrate(bitrate).ok());
+}
+
+TEST_P(PeerConnectionInterfaceTest, SetBitrateCurrentLessThanMinFails) {
+ CreatePeerConnection();
+ BitrateSettings bitrate;
+ bitrate.min_bitrate_bps = 5;
+ bitrate.start_bitrate_bps = 3;
+ EXPECT_FALSE(pc_->SetBitrate(bitrate).ok());
+}
+
+TEST_P(PeerConnectionInterfaceTest, SetBitrateCurrentNegativeFails) {
+ CreatePeerConnection();
+ BitrateSettings bitrate;
+ bitrate.start_bitrate_bps = -1;
+ EXPECT_FALSE(pc_->SetBitrate(bitrate).ok());
+}
+
+TEST_P(PeerConnectionInterfaceTest, SetBitrateMaxLessThanCurrentFails) {
+ CreatePeerConnection();
+ BitrateSettings bitrate;
+ bitrate.start_bitrate_bps = 10;
+ bitrate.max_bitrate_bps = 8;
+ EXPECT_FALSE(pc_->SetBitrate(bitrate).ok());
+}
+
+TEST_P(PeerConnectionInterfaceTest, SetBitrateMaxLessThanMinFails) {
+ CreatePeerConnection();
+ BitrateSettings bitrate;
+ bitrate.min_bitrate_bps = 10;
+ bitrate.max_bitrate_bps = 8;
+ EXPECT_FALSE(pc_->SetBitrate(bitrate).ok());
+}
+
+TEST_P(PeerConnectionInterfaceTest, SetBitrateMaxNegativeFails) {
+ CreatePeerConnection();
+ BitrateSettings bitrate;
+ bitrate.max_bitrate_bps = -1;
+ EXPECT_FALSE(pc_->SetBitrate(bitrate).ok());
+}
+
+// The current bitrate from BitrateSettings is currently clamped
+// by Call's BitrateConstraints, which comes from the SDP or a default value.
+// This test checks that a call to SetBitrate with a current bitrate that will
+// be clamped succeeds.
+TEST_P(PeerConnectionInterfaceTest, SetBitrateCurrentLessThanImplicitMin) {
+ CreatePeerConnection();
+ BitrateSettings bitrate;
+ bitrate.start_bitrate_bps = 1;
+ EXPECT_TRUE(pc_->SetBitrate(bitrate).ok());
+}
+
+// The following tests verify that the offer can be created correctly.
+TEST_P(PeerConnectionInterfaceTest,
+ CreateOfferFailsWithInvalidOfferToReceiveAudio) {
+ RTCOfferAnswerOptions rtc_options;
+
+ // Setting offer_to_receive_audio to a value lower than kUndefined or greater
+ // than kMaxOfferToReceiveMedia should be treated as invalid.
+ rtc_options.offer_to_receive_audio = RTCOfferAnswerOptions::kUndefined - 1;
+ CreatePeerConnection();
+ EXPECT_FALSE(CreateOfferWithOptions(rtc_options));
+
+ rtc_options.offer_to_receive_audio =
+ RTCOfferAnswerOptions::kMaxOfferToReceiveMedia + 1;
+ EXPECT_FALSE(CreateOfferWithOptions(rtc_options));
+}
+
+TEST_P(PeerConnectionInterfaceTest,
+ CreateOfferFailsWithInvalidOfferToReceiveVideo) {
+ RTCOfferAnswerOptions rtc_options;
+
+ // Setting offer_to_receive_video to a value lower than kUndefined or greater
+ // than kMaxOfferToReceiveMedia should be treated as invalid.
+ rtc_options.offer_to_receive_video = RTCOfferAnswerOptions::kUndefined - 1;
+ CreatePeerConnection();
+ EXPECT_FALSE(CreateOfferWithOptions(rtc_options));
+
+ rtc_options.offer_to_receive_video =
+ RTCOfferAnswerOptions::kMaxOfferToReceiveMedia + 1;
+ EXPECT_FALSE(CreateOfferWithOptions(rtc_options));
+}
+
+// Test that the audio and video content will be added to an offer if both
+// `offer_to_receive_audio` and `offer_to_receive_video` options are 1.
+TEST_P(PeerConnectionInterfaceTest, CreateOfferWithAudioVideoOptions) {
+ RTCOfferAnswerOptions rtc_options;
+ rtc_options.offer_to_receive_audio = 1;
+ rtc_options.offer_to_receive_video = 1;
+
+ std::unique_ptr<SessionDescriptionInterface> offer;
+ CreatePeerConnection();
+ offer = CreateOfferWithOptions(rtc_options);
+ ASSERT_TRUE(offer);
+ EXPECT_NE(nullptr, GetFirstAudioContent(offer->description()));
+ EXPECT_NE(nullptr, GetFirstVideoContent(offer->description()));
+}
+
+// Test that only audio content will be added to the offer if only
+// `offer_to_receive_audio` options is 1.
+TEST_P(PeerConnectionInterfaceTest, CreateOfferWithAudioOnlyOptions) {
+ RTCOfferAnswerOptions rtc_options;
+ rtc_options.offer_to_receive_audio = 1;
+ rtc_options.offer_to_receive_video = 0;
+
+ std::unique_ptr<SessionDescriptionInterface> offer;
+ CreatePeerConnection();
+ offer = CreateOfferWithOptions(rtc_options);
+ ASSERT_TRUE(offer);
+ EXPECT_NE(nullptr, GetFirstAudioContent(offer->description()));
+ EXPECT_EQ(nullptr, GetFirstVideoContent(offer->description()));
+}
+
+// Test that only video content will be added if only `offer_to_receive_video`
+// options is 1.
+TEST_P(PeerConnectionInterfaceTest, CreateOfferWithVideoOnlyOptions) {
+ RTCOfferAnswerOptions rtc_options;
+ rtc_options.offer_to_receive_audio = 0;
+ rtc_options.offer_to_receive_video = 1;
+
+ std::unique_ptr<SessionDescriptionInterface> offer;
+ CreatePeerConnection();
+ offer = CreateOfferWithOptions(rtc_options);
+ ASSERT_TRUE(offer);
+ EXPECT_EQ(nullptr, GetFirstAudioContent(offer->description()));
+ EXPECT_NE(nullptr, GetFirstVideoContent(offer->description()));
+}
+
+// Test that no media content will be added to the offer if using default
+// RTCOfferAnswerOptions.
+TEST_P(PeerConnectionInterfaceTest, CreateOfferWithDefaultOfferAnswerOptions) {
+ RTCOfferAnswerOptions rtc_options;
+
+ std::unique_ptr<SessionDescriptionInterface> offer;
+ CreatePeerConnection();
+ offer = CreateOfferWithOptions(rtc_options);
+ ASSERT_TRUE(offer);
+ EXPECT_EQ(nullptr, GetFirstAudioContent(offer->description()));
+ EXPECT_EQ(nullptr, GetFirstVideoContent(offer->description()));
+}
+
+// Test that if `ice_restart` is true, the ufrag/pwd will change, otherwise
+// ufrag/pwd will be the same in the new offer.
+TEST_P(PeerConnectionInterfaceTest, CreateOfferWithIceRestart) {
+ CreatePeerConnection();
+
+ RTCOfferAnswerOptions rtc_options;
+ rtc_options.ice_restart = false;
+ rtc_options.offer_to_receive_audio = 1;
+
+ std::unique_ptr<SessionDescriptionInterface> offer;
+ CreateOfferWithOptionsAsLocalDescription(&offer, rtc_options);
+ std::string mid = cricket::GetFirstAudioContent(offer->description())->name;
+ auto ufrag1 =
+ offer->description()->GetTransportInfoByName(mid)->description.ice_ufrag;
+ auto pwd1 =
+ offer->description()->GetTransportInfoByName(mid)->description.ice_pwd;
+
+ // `ice_restart` is false, the ufrag/pwd shouldn't change.
+ CreateOfferWithOptionsAsLocalDescription(&offer, rtc_options);
+ auto ufrag2 =
+ offer->description()->GetTransportInfoByName(mid)->description.ice_ufrag;
+ auto pwd2 =
+ offer->description()->GetTransportInfoByName(mid)->description.ice_pwd;
+
+ // `ice_restart` is true, the ufrag/pwd should change.
+ rtc_options.ice_restart = true;
+ CreateOfferWithOptionsAsLocalDescription(&offer, rtc_options);
+ auto ufrag3 =
+ offer->description()->GetTransportInfoByName(mid)->description.ice_ufrag;
+ auto pwd3 =
+ offer->description()->GetTransportInfoByName(mid)->description.ice_pwd;
+
+ EXPECT_EQ(ufrag1, ufrag2);
+ EXPECT_EQ(pwd1, pwd2);
+ EXPECT_NE(ufrag2, ufrag3);
+ EXPECT_NE(pwd2, pwd3);
+}
+
+// Test that if `use_rtp_mux` is true, the bundling will be enabled in the
+// offer; if it is false, there won't be any bundle group in the offer.
+TEST_P(PeerConnectionInterfaceTest, CreateOfferWithRtpMux) {
+ RTCOfferAnswerOptions rtc_options;
+ rtc_options.offer_to_receive_audio = 1;
+ rtc_options.offer_to_receive_video = 1;
+
+ std::unique_ptr<SessionDescriptionInterface> offer;
+ CreatePeerConnection();
+
+ rtc_options.use_rtp_mux = true;
+ offer = CreateOfferWithOptions(rtc_options);
+ ASSERT_TRUE(offer);
+ EXPECT_NE(nullptr, GetFirstAudioContent(offer->description()));
+ EXPECT_NE(nullptr, GetFirstVideoContent(offer->description()));
+ EXPECT_TRUE(offer->description()->HasGroup(cricket::GROUP_TYPE_BUNDLE));
+
+ rtc_options.use_rtp_mux = false;
+ offer = CreateOfferWithOptions(rtc_options);
+ ASSERT_TRUE(offer);
+ EXPECT_NE(nullptr, GetFirstAudioContent(offer->description()));
+ EXPECT_NE(nullptr, GetFirstVideoContent(offer->description()));
+ EXPECT_FALSE(offer->description()->HasGroup(cricket::GROUP_TYPE_BUNDLE));
+}
+
+// This test ensures OnRenegotiationNeeded is called when we add track with
+// MediaStream -> AddTrack in the same way it is called when we add track with
+// PeerConnection -> AddTrack.
+// The test can be removed once addStream is rewritten in terms of addTrack
+// https://bugs.chromium.org/p/webrtc/issues/detail?id=7815
+// Don't run under Unified Plan since the stream API is not available.
+TEST_F(PeerConnectionInterfaceTestPlanB,
+ MediaStreamAddTrackRemoveTrackRenegotiate) {
+ CreatePeerConnectionWithoutDtls();
+ rtc::scoped_refptr<MediaStreamInterface> stream(
+ pc_factory_->CreateLocalMediaStream(kStreamId1));
+ pc_->AddStream(stream.get());
+ rtc::scoped_refptr<AudioTrackInterface> audio_track(
+ CreateAudioTrack("audio_track"));
+ rtc::scoped_refptr<VideoTrackInterface> video_track(
+ CreateVideoTrack("video_track"));
+ stream->AddTrack(audio_track);
+ EXPECT_TRUE_WAIT(observer_.renegotiation_needed_, kTimeout);
+ observer_.renegotiation_needed_ = false;
+
+ CreateOfferReceiveAnswer();
+ stream->AddTrack(video_track);
+ EXPECT_TRUE_WAIT(observer_.renegotiation_needed_, kTimeout);
+ observer_.renegotiation_needed_ = false;
+
+ CreateOfferReceiveAnswer();
+ stream->RemoveTrack(audio_track);
+ EXPECT_TRUE_WAIT(observer_.renegotiation_needed_, kTimeout);
+ observer_.renegotiation_needed_ = false;
+
+ CreateOfferReceiveAnswer();
+ stream->RemoveTrack(video_track);
+ EXPECT_TRUE_WAIT(observer_.renegotiation_needed_, kTimeout);
+ observer_.renegotiation_needed_ = false;
+}
+
+// Tests that an error is returned if a description is applied that has fewer
+// media sections than the existing description.
+TEST_P(PeerConnectionInterfaceTest,
+ MediaSectionCountEnforcedForSubsequentOffer) {
+ CreatePeerConnection();
+ AddAudioTrack("audio_label");
+ AddVideoTrack("video_label");
+
+ std::unique_ptr<SessionDescriptionInterface> offer;
+ ASSERT_TRUE(DoCreateOffer(&offer, nullptr));
+ EXPECT_TRUE(DoSetRemoteDescription(std::move(offer)));
+
+ // A remote offer with fewer media sections should be rejected.
+ ASSERT_TRUE(DoCreateOffer(&offer, nullptr));
+ offer->description()->contents().pop_back();
+ offer->description()->contents().pop_back();
+ ASSERT_TRUE(offer->description()->contents().empty());
+ EXPECT_FALSE(DoSetRemoteDescription(std::move(offer)));
+
+ std::unique_ptr<SessionDescriptionInterface> answer;
+ ASSERT_TRUE(DoCreateAnswer(&answer, nullptr));
+ EXPECT_TRUE(DoSetLocalDescription(std::move(answer)));
+
+ // A subsequent local offer with fewer media sections should be rejected.
+ ASSERT_TRUE(DoCreateOffer(&offer, nullptr));
+ offer->description()->contents().pop_back();
+ offer->description()->contents().pop_back();
+ ASSERT_TRUE(offer->description()->contents().empty());
+ EXPECT_FALSE(DoSetLocalDescription(std::move(offer)));
+}
+
+TEST_P(PeerConnectionInterfaceTest, ExtmapAllowMixedIsConfigurable) {
+ RTCConfiguration config;
+ // Default behavior is true.
+ CreatePeerConnection(config);
+ std::unique_ptr<SessionDescriptionInterface> offer;
+ ASSERT_TRUE(DoCreateOffer(&offer, nullptr));
+ EXPECT_TRUE(offer->description()->extmap_allow_mixed());
+ // Possible to set to false.
+ config.offer_extmap_allow_mixed = false;
+ CreatePeerConnection(config);
+ offer = nullptr;
+ ASSERT_TRUE(DoCreateOffer(&offer, nullptr));
+ EXPECT_FALSE(offer->description()->extmap_allow_mixed());
+}
+
+TEST_P(PeerConnectionInterfaceTest,
+ RtpSenderSetDegradationPreferenceWithoutEncodings) {
+ CreatePeerConnection();
+ AddVideoTrack("video_label");
+
+ std::vector<rtc::scoped_refptr<RtpSenderInterface>> rtp_senders =
+ pc_->GetSenders();
+ ASSERT_EQ(rtp_senders.size(), 1u);
+ ASSERT_EQ(rtp_senders[0]->media_type(), cricket::MEDIA_TYPE_VIDEO);
+ rtc::scoped_refptr<RtpSenderInterface> video_rtp_sender = rtp_senders[0];
+ RtpParameters parameters = video_rtp_sender->GetParameters();
+ ASSERT_NE(parameters.degradation_preference,
+ DegradationPreference::MAINTAIN_RESOLUTION);
+ parameters.degradation_preference =
+ DegradationPreference::MAINTAIN_RESOLUTION;
+ ASSERT_TRUE(video_rtp_sender->SetParameters(parameters).ok());
+
+ std::unique_ptr<SessionDescriptionInterface> local_offer;
+ ASSERT_TRUE(DoCreateOffer(&local_offer, nullptr));
+ ASSERT_TRUE(DoSetLocalDescription(std::move(local_offer)));
+
+ RtpParameters parameters_new = video_rtp_sender->GetParameters();
+ ASSERT_EQ(parameters_new.degradation_preference,
+ DegradationPreference::MAINTAIN_RESOLUTION);
+}
+
+INSTANTIATE_TEST_SUITE_P(PeerConnectionInterfaceTest,
+ PeerConnectionInterfaceTest,
+ Values(SdpSemantics::kPlanB_DEPRECATED,
+ SdpSemantics::kUnifiedPlan));
+
+class PeerConnectionMediaConfigTest : public ::testing::Test {
+ protected:
+ void SetUp() override {
+ pcf_ = PeerConnectionFactoryForTest::CreatePeerConnectionFactoryForTest();
+ }
+ const cricket::MediaConfig TestCreatePeerConnection(
+ const RTCConfiguration& config) {
+ PeerConnectionDependencies pc_dependencies(&observer_);
+ auto result =
+ pcf_->CreatePeerConnectionOrError(config, std::move(pc_dependencies));
+ EXPECT_TRUE(result.ok());
+ observer_.SetPeerConnectionInterface(result.value().get());
+ return result.value()->GetConfiguration().media_config;
+ }
+
+ rtc::scoped_refptr<PeerConnectionFactoryForTest> pcf_;
+ MockPeerConnectionObserver observer_;
+};
+
+// This sanity check validates the test infrastructure itself.
+TEST_F(PeerConnectionMediaConfigTest, TestCreateAndClose) {
+ PeerConnectionInterface::RTCConfiguration config;
+ config.sdp_semantics = SdpSemantics::kUnifiedPlan;
+ PeerConnectionDependencies pc_dependencies(&observer_);
+ auto result =
+ pcf_->CreatePeerConnectionOrError(config, std::move(pc_dependencies));
+ EXPECT_TRUE(result.ok());
+ observer_.SetPeerConnectionInterface(result.value().get());
+ result.value()->Close(); // No abort -> ok.
+ SUCCEED();
+}
+
+// This test verifies the default behaviour with no constraints and a
+// default RTCConfiguration.
+TEST_F(PeerConnectionMediaConfigTest, TestDefaults) {
+ PeerConnectionInterface::RTCConfiguration config;
+ config.sdp_semantics = SdpSemantics::kUnifiedPlan;
+
+ const cricket::MediaConfig& media_config = TestCreatePeerConnection(config);
+
+ EXPECT_TRUE(media_config.enable_dscp);
+ EXPECT_TRUE(media_config.video.enable_cpu_adaptation);
+ EXPECT_TRUE(media_config.video.enable_prerenderer_smoothing);
+ EXPECT_FALSE(media_config.video.suspend_below_min_bitrate);
+ EXPECT_FALSE(media_config.video.experiment_cpu_load_estimator);
+}
+
+// This test verifies that the enable_prerenderer_smoothing flag is
+// propagated from RTCConfiguration to the PeerConnection.
+TEST_F(PeerConnectionMediaConfigTest, TestDisablePrerendererSmoothingTrue) {
+ PeerConnectionInterface::RTCConfiguration config;
+ config.sdp_semantics = SdpSemantics::kUnifiedPlan;
+
+ config.set_prerenderer_smoothing(false);
+ const cricket::MediaConfig& media_config = TestCreatePeerConnection(config);
+
+ EXPECT_FALSE(media_config.video.enable_prerenderer_smoothing);
+}
+
+// This test verifies that the experiment_cpu_load_estimator flag is
+// propagated from RTCConfiguration to the PeerConnection.
+TEST_F(PeerConnectionMediaConfigTest, TestEnableExperimentCpuLoadEstimator) {
+ PeerConnectionInterface::RTCConfiguration config;
+ config.sdp_semantics = SdpSemantics::kUnifiedPlan;
+
+ config.set_experiment_cpu_load_estimator(true);
+ const cricket::MediaConfig& media_config = TestCreatePeerConnection(config);
+
+ EXPECT_TRUE(media_config.video.experiment_cpu_load_estimator);
+}
+
+// Tests a few random fields being different.
+TEST(RTCConfigurationTest, ComparisonOperators) {
+ PeerConnectionInterface::RTCConfiguration a;
+ PeerConnectionInterface::RTCConfiguration b;
+ EXPECT_EQ(a, b);
+
+ PeerConnectionInterface::RTCConfiguration c;
+ c.servers.push_back(PeerConnectionInterface::IceServer());
+ EXPECT_NE(a, c);
+
+ PeerConnectionInterface::RTCConfiguration d;
+ d.type = PeerConnectionInterface::kRelay;
+ EXPECT_NE(a, d);
+
+ PeerConnectionInterface::RTCConfiguration e;
+ e.audio_jitter_buffer_max_packets = 5;
+ EXPECT_NE(a, e);
+
+ PeerConnectionInterface::RTCConfiguration f;
+ f.ice_connection_receiving_timeout = 1337;
+ EXPECT_NE(a, f);
+
+ PeerConnectionInterface::RTCConfiguration h(
+ PeerConnectionInterface::RTCConfigurationType::kAggressive);
+ EXPECT_NE(a, h);
+}
+
+} // namespace
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/peer_connection_internal.h b/third_party/libwebrtc/pc/peer_connection_internal.h
new file mode 100644
index 0000000000..6fc1222804
--- /dev/null
+++ b/third_party/libwebrtc/pc/peer_connection_internal.h
@@ -0,0 +1,192 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_PEER_CONNECTION_INTERNAL_H_
+#define PC_PEER_CONNECTION_INTERNAL_H_
+
+#include <map>
+#include <memory>
+#include <set>
+#include <string>
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "api/peer_connection_interface.h"
+#include "call/call.h"
+#include "modules/audio_device/include/audio_device.h"
+#include "pc/jsep_transport_controller.h"
+#include "pc/peer_connection_message_handler.h"
+#include "pc/rtp_transceiver.h"
+#include "pc/rtp_transmission_manager.h"
+#include "pc/sctp_data_channel.h"
+
+namespace webrtc {
+
+class DataChannelController;
+class LegacyStatsCollector;
+
+// This interface defines the functions that are needed for
+// SdpOfferAnswerHandler to access PeerConnection internal state.
+class PeerConnectionSdpMethods {
+ public:
+ virtual ~PeerConnectionSdpMethods() = default;
+
+ // The SDP session ID as defined by RFC 3264.
+ virtual std::string session_id() const = 0;
+
+ // Returns true if the ICE restart flag above was set, and no ICE restart has
+ // occurred yet for this transport (by applying a local description with
+ // changed ufrag/password). If the transport has been deleted as a result of
+ // bundling, returns false.
+ virtual bool NeedsIceRestart(const std::string& content_name) const = 0;
+
+ virtual absl::optional<std::string> sctp_mid() const = 0;
+
+ // Functions below this comment are known to only be accessed
+ // from SdpOfferAnswerHandler.
+ // Return a pointer to the active configuration.
+ virtual const PeerConnectionInterface::RTCConfiguration* configuration()
+ const = 0;
+
+ // Report the UMA metric BundleUsage for the given remote description.
+ virtual void ReportSdpBundleUsage(
+ const SessionDescriptionInterface& remote_description) = 0;
+
+ virtual PeerConnectionMessageHandler* message_handler() = 0;
+ virtual RtpTransmissionManager* rtp_manager() = 0;
+ virtual const RtpTransmissionManager* rtp_manager() const = 0;
+ virtual bool dtls_enabled() const = 0;
+ virtual const PeerConnectionFactoryInterface::Options* options() const = 0;
+
+ // Returns the CryptoOptions for this PeerConnection. This will always
+ // return the RTCConfiguration.crypto_options if set and will only default
+ // back to the PeerConnectionFactory settings if nothing was set.
+ virtual CryptoOptions GetCryptoOptions() = 0;
+ virtual JsepTransportController* transport_controller_s() = 0;
+ virtual JsepTransportController* transport_controller_n() = 0;
+ virtual DataChannelController* data_channel_controller() = 0;
+ virtual cricket::PortAllocator* port_allocator() = 0;
+ virtual LegacyStatsCollector* legacy_stats() = 0;
+ // Returns the observer. Will crash on CHECK if the observer is removed.
+ virtual PeerConnectionObserver* Observer() const = 0;
+ virtual absl::optional<rtc::SSLRole> GetSctpSslRole_n() = 0;
+ virtual PeerConnectionInterface::IceConnectionState
+ ice_connection_state_internal() = 0;
+ virtual void SetIceConnectionState(
+ PeerConnectionInterface::IceConnectionState new_state) = 0;
+ virtual void NoteUsageEvent(UsageEvent event) = 0;
+ virtual bool IsClosed() const = 0;
+ // Returns true if the PeerConnection is configured to use Unified Plan
+ // semantics for creating offers/answers and setting local/remote
+ // descriptions. If this is true the RtpTransceiver API will also be available
+ // to the user. If this is false, Plan B semantics are assumed.
+ // TODO(bugs.webrtc.org/8530): Flip the default to be Unified Plan once
+ // sufficient time has passed.
+ virtual bool IsUnifiedPlan() const = 0;
+ virtual bool ValidateBundleSettings(
+ const cricket::SessionDescription* desc,
+ const std::map<std::string, const cricket::ContentGroup*>&
+ bundle_groups_by_mid) = 0;
+
+ // Internal implementation for AddTransceiver family of methods. If
+ // `fire_callback` is set, fires OnRenegotiationNeeded callback if successful.
+ virtual RTCErrorOr<rtc::scoped_refptr<RtpTransceiverInterface>>
+ AddTransceiver(cricket::MediaType media_type,
+ rtc::scoped_refptr<MediaStreamTrackInterface> track,
+ const RtpTransceiverInit& init,
+ bool fire_callback = true) = 0;
+ // Asynchronously calls SctpTransport::Start() on the network thread for
+ // `sctp_mid()` if set. Called as part of setting the local description.
+ virtual void StartSctpTransport(int local_port,
+ int remote_port,
+ int max_message_size) = 0;
+
+ // Asynchronously adds a remote candidate on the network thread.
+ virtual void AddRemoteCandidate(const std::string& mid,
+ const cricket::Candidate& candidate) = 0;
+
+ virtual Call* call_ptr() = 0;
+ // Returns true if SRTP (either using DTLS-SRTP or SDES) is required by
+ // this session.
+ virtual bool SrtpRequired() const = 0;
+ // Initializes the data channel transport for the peerconnection instance.
+ // This will have the effect that `sctp_mid()` and `sctp_transport_name()`
+ // will return a set value (even though it might be an empty string) and the
+ // dc transport will be initialized on the network thread.
+ virtual bool CreateDataChannelTransport(absl::string_view mid) = 0;
+ // Tears down the data channel transport state and clears the `sctp_mid()` and
+ // `sctp_transport_name()` properties.
+ virtual void DestroyDataChannelTransport(RTCError error) = 0;
+ virtual const FieldTrialsView& trials() const = 0;
+
+ virtual void ClearStatsCache() = 0;
+};
+
+// Functions defined in this class are called by other objects,
+// but not by SdpOfferAnswerHandler.
+class PeerConnectionInternal : public PeerConnectionInterface,
+ public PeerConnectionSdpMethods {
+ public:
+ virtual rtc::Thread* network_thread() const = 0;
+ virtual rtc::Thread* worker_thread() const = 0;
+
+ // Returns true if we were the initial offerer.
+ virtual bool initial_offerer() const = 0;
+
+ virtual std::vector<
+ rtc::scoped_refptr<RtpTransceiverProxyWithInternal<RtpTransceiver>>>
+ GetTransceiversInternal() const = 0;
+
+ // Call on the network thread to fetch stats for all the data channels.
+ // TODO(tommi): Make pure virtual after downstream updates.
+ virtual std::vector<DataChannelStats> GetDataChannelStats() const {
+ return {};
+ }
+
+ virtual absl::optional<std::string> sctp_transport_name() const = 0;
+
+ virtual cricket::CandidateStatsList GetPooledCandidateStats() const = 0;
+
+ // Returns a map from transport name to transport stats for all given
+ // transport names.
+ // Must be called on the network thread.
+ virtual std::map<std::string, cricket::TransportStats>
+ GetTransportStatsByNames(const std::set<std::string>& transport_names) = 0;
+
+ virtual Call::Stats GetCallStats() = 0;
+
+ virtual absl::optional<AudioDeviceModule::Stats> GetAudioDeviceStats() = 0;
+
+ virtual bool GetLocalCertificate(
+ const std::string& transport_name,
+ rtc::scoped_refptr<rtc::RTCCertificate>* certificate) = 0;
+ virtual std::unique_ptr<rtc::SSLCertChain> GetRemoteSSLCertChain(
+ const std::string& transport_name) = 0;
+
+ // Returns true if there was an ICE restart initiated by the remote offer.
+ virtual bool IceRestartPending(const std::string& content_name) const = 0;
+
+ // Get SSL role for an arbitrary m= section (handles bundling correctly).
+ virtual bool GetSslRole(const std::string& content_name,
+ rtc::SSLRole* role) = 0;
+ // Functions needed by DataChannelController
+ virtual void NoteDataAddedEvent() {}
+ // Handler for sctp data channel state changes.
+ // The `channel_id` is the same unique identifier as used in
+ // `DataChannelStats::internal_id and
+ // `RTCDataChannelStats::data_channel_identifier`.
+ virtual void OnSctpDataChannelStateChanged(
+ int channel_id,
+ DataChannelInterface::DataState state) {}
+};
+
+} // namespace webrtc
+
+#endif // PC_PEER_CONNECTION_INTERNAL_H_
diff --git a/third_party/libwebrtc/pc/peer_connection_jsep_unittest.cc b/third_party/libwebrtc/pc/peer_connection_jsep_unittest.cc
new file mode 100644
index 0000000000..1369253ad6
--- /dev/null
+++ b/third_party/libwebrtc/pc/peer_connection_jsep_unittest.cc
@@ -0,0 +1,2421 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stddef.h>
+
+#include <algorithm>
+#include <map>
+#include <memory>
+#include <string>
+#include <tuple>
+#include <utility>
+#include <vector>
+
+#include "absl/strings/string_view.h"
+#include "absl/types/optional.h"
+#include "api/call/call_factory_interface.h"
+#include "api/field_trials_view.h"
+#include "api/jsep.h"
+#include "api/media_stream_interface.h"
+#include "api/media_types.h"
+#include "api/peer_connection_interface.h"
+#include "api/rtc_error.h"
+#include "api/rtp_parameters.h"
+#include "api/rtp_receiver_interface.h"
+#include "api/rtp_sender_interface.h"
+#include "api/rtp_transceiver_direction.h"
+#include "api/rtp_transceiver_interface.h"
+#include "api/scoped_refptr.h"
+#include "api/task_queue/default_task_queue_factory.h"
+#include "api/task_queue/task_queue_factory.h"
+#include "api/transport/field_trial_based_config.h"
+#include "api/transport/sctp_transport_factory_interface.h"
+#include "media/base/media_engine.h"
+#include "media/base/stream_params.h"
+#include "media/engine/webrtc_media_engine.h"
+#include "media/engine/webrtc_media_engine_defaults.h"
+#include "modules/audio_device/include/audio_device.h"
+#include "p2p/base/p2p_constants.h"
+#include "p2p/base/port_allocator.h"
+#include "p2p/base/transport_info.h"
+#include "pc/channel_interface.h"
+#include "pc/media_session.h"
+#include "pc/peer_connection_wrapper.h"
+#include "pc/sdp_utils.h"
+#include "pc/session_description.h"
+#include "pc/test/mock_peer_connection_observers.h"
+#include "rtc_base/rtc_certificate_generator.h"
+#include "rtc_base/thread.h"
+#include "test/gtest.h"
+#ifdef WEBRTC_ANDROID
+#include "pc/test/android_test_initializer.h"
+#endif
+#include "pc/test/fake_audio_capture_module.h"
+#include "rtc_base/virtual_socket_server.h"
+#include "test/gmock.h"
+#include "test/pc/sctp/fake_sctp_transport.h"
+
+// This file contains tests that ensure the PeerConnection's implementation of
+// CreateOffer/CreateAnswer/SetLocalDescription/SetRemoteDescription conform
+// to the JavaScript Session Establishment Protocol (JSEP).
+// For now these semantics are only available when configuring the
+// PeerConnection with Unified Plan, but eventually that will be the default.
+
+namespace webrtc {
+
+using cricket::MediaContentDescription;
+using RTCConfiguration = PeerConnectionInterface::RTCConfiguration;
+using ::testing::Combine;
+using ::testing::ElementsAre;
+using ::testing::UnorderedElementsAre;
+using ::testing::Values;
+
+PeerConnectionFactoryDependencies CreatePeerConnectionFactoryDependencies() {
+ PeerConnectionFactoryDependencies dependencies;
+ dependencies.worker_thread = rtc::Thread::Current();
+ dependencies.network_thread = rtc::Thread::Current();
+ dependencies.signaling_thread = rtc::Thread::Current();
+ dependencies.task_queue_factory = CreateDefaultTaskQueueFactory();
+ dependencies.trials = std::make_unique<FieldTrialBasedConfig>();
+ cricket::MediaEngineDependencies media_deps;
+ media_deps.task_queue_factory = dependencies.task_queue_factory.get();
+ media_deps.adm = FakeAudioCaptureModule::Create();
+ media_deps.trials = dependencies.trials.get();
+ SetMediaEngineDefaults(&media_deps);
+ dependencies.media_engine = cricket::CreateMediaEngine(std::move(media_deps));
+ dependencies.call_factory = CreateCallFactory();
+ dependencies.sctp_factory = std::make_unique<FakeSctpTransportFactory>();
+ return dependencies;
+}
+
+class PeerConnectionJsepTest : public ::testing::Test {
+ protected:
+ typedef std::unique_ptr<PeerConnectionWrapper> WrapperPtr;
+
+ PeerConnectionJsepTest()
+ : vss_(new rtc::VirtualSocketServer()), main_(vss_.get()) {
+#ifdef WEBRTC_ANDROID
+ InitializeAndroidObjects();
+#endif
+ }
+
+ WrapperPtr CreatePeerConnection() {
+ RTCConfiguration config;
+ config.sdp_semantics = SdpSemantics::kUnifiedPlan;
+ return CreatePeerConnection(config);
+ }
+
+ WrapperPtr CreatePeerConnection(const RTCConfiguration& config) {
+ rtc::scoped_refptr<PeerConnectionFactoryInterface> pc_factory =
+ CreateModularPeerConnectionFactory(
+ CreatePeerConnectionFactoryDependencies());
+ auto observer = std::make_unique<MockPeerConnectionObserver>();
+ auto result = pc_factory->CreatePeerConnectionOrError(
+ config, PeerConnectionDependencies(observer.get()));
+ if (!result.ok()) {
+ return nullptr;
+ }
+
+ observer->SetPeerConnectionInterface(result.value().get());
+ return std::make_unique<PeerConnectionWrapper>(
+ pc_factory, result.MoveValue(), std::move(observer));
+ }
+
+ std::unique_ptr<rtc::VirtualSocketServer> vss_;
+ rtc::AutoSocketServerThread main_;
+};
+
+// Tests for JSEP initial offer generation.
+
+// Test that an offer created by a PeerConnection with no transceivers generates
+// no media sections.
+TEST_F(PeerConnectionJsepTest, EmptyInitialOffer) {
+ auto caller = CreatePeerConnection();
+
+ auto offer = caller->CreateOffer();
+ ASSERT_EQ(0u, offer->description()->contents().size());
+}
+
+// Test that an initial offer with one audio track generates one audio media
+// section.
+TEST_F(PeerConnectionJsepTest, AudioOnlyInitialOffer) {
+ auto caller = CreatePeerConnection();
+ caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO);
+
+ auto offer = caller->CreateOffer();
+ auto contents = offer->description()->contents();
+ ASSERT_EQ(1u, contents.size());
+ EXPECT_EQ(cricket::MEDIA_TYPE_AUDIO, contents[0].media_description()->type());
+}
+
+// Test than an initial offer with one video track generates one video media
+// section
+TEST_F(PeerConnectionJsepTest, VideoOnlyInitialOffer) {
+ auto caller = CreatePeerConnection();
+ caller->AddTransceiver(cricket::MEDIA_TYPE_VIDEO);
+
+ auto offer = caller->CreateOffer();
+ auto contents = offer->description()->contents();
+ ASSERT_EQ(1u, contents.size());
+ EXPECT_EQ(cricket::MEDIA_TYPE_VIDEO, contents[0].media_description()->type());
+}
+
+// Test that an initial offer with one data channel generates one data media
+// section.
+TEST_F(PeerConnectionJsepTest, DataOnlyInitialOffer) {
+ auto caller = CreatePeerConnection();
+ caller->CreateDataChannel("dc");
+
+ auto offer = caller->CreateOffer();
+ auto contents = offer->description()->contents();
+ ASSERT_EQ(1u, contents.size());
+ EXPECT_EQ(cricket::MEDIA_TYPE_DATA, contents[0].media_description()->type());
+}
+
+// Test that creating multiple data channels only results in one data section
+// generated in the offer.
+TEST_F(PeerConnectionJsepTest, MultipleDataChannelsCreateOnlyOneDataSection) {
+ auto caller = CreatePeerConnection();
+ caller->CreateDataChannel("first");
+ caller->CreateDataChannel("second");
+ caller->CreateDataChannel("third");
+
+ auto offer = caller->CreateOffer();
+ ASSERT_EQ(1u, offer->description()->contents().size());
+}
+
+// Test that multiple media sections in the initial offer are ordered in the
+// order the transceivers were added to the PeerConnection. This is required by
+// JSEP section 5.2.1.
+TEST_F(PeerConnectionJsepTest, MediaSectionsInInitialOfferOrderedCorrectly) {
+ auto caller = CreatePeerConnection();
+ caller->AddTransceiver(cricket::MEDIA_TYPE_VIDEO);
+ caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO);
+ RtpTransceiverInit init;
+ init.direction = RtpTransceiverDirection::kSendOnly;
+ caller->AddTransceiver(cricket::MEDIA_TYPE_VIDEO, init);
+
+ auto offer = caller->CreateOffer();
+ auto contents = offer->description()->contents();
+ ASSERT_EQ(3u, contents.size());
+
+ const MediaContentDescription* media_description1 =
+ contents[0].media_description();
+ EXPECT_EQ(cricket::MEDIA_TYPE_VIDEO, media_description1->type());
+ EXPECT_EQ(RtpTransceiverDirection::kSendRecv,
+ media_description1->direction());
+
+ const MediaContentDescription* media_description2 =
+ contents[1].media_description();
+ EXPECT_EQ(cricket::MEDIA_TYPE_AUDIO, media_description2->type());
+ EXPECT_EQ(RtpTransceiverDirection::kSendRecv,
+ media_description2->direction());
+
+ const MediaContentDescription* media_description3 =
+ contents[2].media_description();
+ EXPECT_EQ(cricket::MEDIA_TYPE_VIDEO, media_description3->type());
+ EXPECT_EQ(RtpTransceiverDirection::kSendOnly,
+ media_description3->direction());
+}
+
+// Test that media sections in the initial offer have different mids.
+TEST_F(PeerConnectionJsepTest, MediaSectionsInInitialOfferHaveDifferentMids) {
+ auto caller = CreatePeerConnection();
+ caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO);
+ caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO);
+
+ auto offer = caller->CreateOffer();
+ auto contents = offer->description()->contents();
+ ASSERT_EQ(2u, contents.size());
+ EXPECT_NE(contents[0].name, contents[1].name);
+}
+
+TEST_F(PeerConnectionJsepTest,
+ StoppedTransceiverHasNoMediaSectionInInitialOffer) {
+ auto caller = CreatePeerConnection();
+ auto transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO);
+ transceiver->StopInternal();
+
+ auto offer = caller->CreateOffer();
+ EXPECT_EQ(0u, offer->description()->contents().size());
+}
+
+// Tests for JSEP SetLocalDescription with a local offer.
+
+TEST_F(PeerConnectionJsepTest, SetLocalEmptyOfferCreatesNoTransceivers) {
+ auto caller = CreatePeerConnection();
+
+ ASSERT_TRUE(caller->SetLocalDescription(caller->CreateOffer()));
+
+ EXPECT_THAT(caller->pc()->GetTransceivers(), ElementsAre());
+ EXPECT_THAT(caller->pc()->GetSenders(), ElementsAre());
+ EXPECT_THAT(caller->pc()->GetReceivers(), ElementsAre());
+}
+
+TEST_F(PeerConnectionJsepTest, SetLocalOfferSetsTransceiverMid) {
+ auto caller = CreatePeerConnection();
+ auto audio_transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO);
+ auto video_transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_VIDEO);
+
+ auto offer = caller->CreateOffer();
+ std::string audio_mid = offer->description()->contents()[0].name;
+ std::string video_mid = offer->description()->contents()[1].name;
+
+ ASSERT_TRUE(caller->SetLocalDescription(std::move(offer)));
+
+ EXPECT_EQ(audio_mid, audio_transceiver->mid());
+ EXPECT_EQ(video_mid, video_transceiver->mid());
+}
+
+// Tests for JSEP SetRemoteDescription with a remote offer.
+
+// Test that setting a remote offer with sendrecv audio and video creates two
+// transceivers, one for receiving audio and one for receiving video.
+TEST_F(PeerConnectionJsepTest, SetRemoteOfferCreatesTransceivers) {
+ auto caller = CreatePeerConnection();
+ auto caller_audio = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO);
+ auto caller_video = caller->AddTransceiver(cricket::MEDIA_TYPE_VIDEO);
+ auto callee = CreatePeerConnection();
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+
+ auto transceivers = callee->pc()->GetTransceivers();
+ ASSERT_EQ(2u, transceivers.size());
+
+ EXPECT_EQ(cricket::MEDIA_TYPE_AUDIO, transceivers[0]->media_type());
+ EXPECT_EQ(caller_audio->mid(), transceivers[0]->mid());
+ EXPECT_EQ(RtpTransceiverDirection::kRecvOnly, transceivers[0]->direction());
+ EXPECT_EQ(0u, transceivers[0]->sender()->stream_ids().size());
+
+ EXPECT_EQ(cricket::MEDIA_TYPE_VIDEO, transceivers[1]->media_type());
+ EXPECT_EQ(caller_video->mid(), transceivers[1]->mid());
+ EXPECT_EQ(RtpTransceiverDirection::kRecvOnly, transceivers[1]->direction());
+ EXPECT_EQ(0u, transceivers[1]->sender()->stream_ids().size());
+}
+
+// Test that setting a remote offer with an audio track will reuse the
+// transceiver created for a local audio track added by AddTrack.
+// This is specified in JSEP section 5.10 (Applying a Remote Description). The
+// intent is to preserve backwards compatibility with clients who only use the
+// AddTrack API.
+TEST_F(PeerConnectionJsepTest, SetRemoteOfferReusesTransceiverFromAddTrack) {
+ auto caller = CreatePeerConnection();
+ caller->AddAudioTrack("a");
+ auto caller_audio = caller->pc()->GetTransceivers()[0];
+ auto callee = CreatePeerConnection();
+ callee->AddAudioTrack("a");
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+
+ auto transceivers = callee->pc()->GetTransceivers();
+ ASSERT_EQ(1u, transceivers.size());
+ EXPECT_EQ(MediaStreamTrackInterface::kAudioKind,
+ transceivers[0]->receiver()->track()->kind());
+ EXPECT_EQ(caller_audio->mid(), transceivers[0]->mid());
+}
+
+// Test that setting a remote offer with an audio track marked sendonly will not
+// reuse a transceiver created by AddTrack. JSEP only allows the transceiver to
+// be reused if the offer direction is sendrecv or recvonly.
+TEST_F(PeerConnectionJsepTest,
+ SetRemoteOfferDoesNotReuseTransceiverIfDirectionSendOnly) {
+ auto caller = CreatePeerConnection();
+ caller->AddAudioTrack("a");
+ auto caller_audio = caller->pc()->GetTransceivers()[0];
+ caller_audio->SetDirectionWithError(RtpTransceiverDirection::kSendOnly);
+ auto callee = CreatePeerConnection();
+ callee->AddAudioTrack("a");
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+
+ auto transceivers = callee->pc()->GetTransceivers();
+ ASSERT_EQ(2u, transceivers.size());
+ EXPECT_EQ(absl::nullopt, transceivers[0]->mid());
+ EXPECT_EQ(caller_audio->mid(), transceivers[1]->mid());
+}
+
+// Test that setting a remote offer with an audio track will not reuse a
+// transceiver added by AddTransceiver. The logic for reusing a transceiver is
+// specific to those added by AddTrack and is tested above.
+TEST_F(PeerConnectionJsepTest,
+ SetRemoteOfferDoesNotReuseTransceiverFromAddTransceiver) {
+ auto caller = CreatePeerConnection();
+ caller->AddAudioTrack("a");
+ auto callee = CreatePeerConnection();
+ auto transceiver = callee->AddTransceiver(cricket::MEDIA_TYPE_AUDIO);
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+
+ auto transceivers = callee->pc()->GetTransceivers();
+ ASSERT_EQ(2u, transceivers.size());
+ EXPECT_EQ(absl::nullopt, transceivers[0]->mid());
+ EXPECT_EQ(caller->pc()->GetTransceivers()[0]->mid(), transceivers[1]->mid());
+ EXPECT_EQ(MediaStreamTrackInterface::kAudioKind,
+ transceivers[1]->receiver()->track()->kind());
+}
+
+// Test that setting a remote offer with an audio track will not reuse a
+// transceiver created for a local video track added by AddTrack.
+TEST_F(PeerConnectionJsepTest,
+ SetRemoteOfferDoesNotReuseTransceiverOfWrongType) {
+ auto caller = CreatePeerConnection();
+ caller->AddAudioTrack("a");
+ auto callee = CreatePeerConnection();
+ auto video_sender = callee->AddVideoTrack("v");
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+
+ auto transceivers = callee->pc()->GetTransceivers();
+ ASSERT_EQ(2u, transceivers.size());
+ EXPECT_EQ(absl::nullopt, transceivers[0]->mid());
+ EXPECT_EQ(caller->pc()->GetTransceivers()[0]->mid(), transceivers[1]->mid());
+ EXPECT_EQ(MediaStreamTrackInterface::kAudioKind,
+ transceivers[1]->receiver()->track()->kind());
+}
+
+// Test that setting a remote offer with an audio track will not reuse a
+// stopped transceiver.
+TEST_F(PeerConnectionJsepTest, SetRemoteOfferDoesNotReuseStoppedTransceiver) {
+ auto caller = CreatePeerConnection();
+ caller->AddAudioTrack("a");
+ auto callee = CreatePeerConnection();
+ callee->AddAudioTrack("a");
+ callee->pc()->GetTransceivers()[0]->StopInternal();
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+
+ auto transceivers = callee->pc()->GetTransceivers();
+ ASSERT_EQ(2u, transceivers.size());
+ // The stopped transceiver is removed in SetLocalDescription(answer)
+ ASSERT_TRUE(callee->SetLocalDescription(callee->CreateAnswer()));
+ transceivers = callee->pc()->GetTransceivers();
+ ASSERT_EQ(1u, transceivers.size());
+ EXPECT_EQ(caller->pc()->GetTransceivers()[0]->mid(), transceivers[0]->mid());
+ EXPECT_FALSE(transceivers[0]->stopped());
+}
+
+// Test that audio and video transceivers created on the remote side with
+// AddTrack will all be reused if there is the same number of audio/video tracks
+// in the remote offer. Additionally, this tests that transceivers are
+// successfully matched even if they are in a different order on the remote
+// side.
+TEST_F(PeerConnectionJsepTest, SetRemoteOfferReusesTransceiversOfBothTypes) {
+ auto caller = CreatePeerConnection();
+ caller->AddVideoTrack("v");
+ caller->AddAudioTrack("a");
+ auto callee = CreatePeerConnection();
+ callee->AddAudioTrack("a");
+ callee->AddVideoTrack("v");
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+
+ auto caller_transceivers = caller->pc()->GetTransceivers();
+ auto callee_transceivers = callee->pc()->GetTransceivers();
+ ASSERT_EQ(2u, callee_transceivers.size());
+ EXPECT_EQ(caller_transceivers[0]->mid(), callee_transceivers[1]->mid());
+ EXPECT_EQ(caller_transceivers[1]->mid(), callee_transceivers[0]->mid());
+}
+
+// Tests for JSEP initial CreateAnswer.
+
+// Test that the answer to a remote offer creates media sections for each
+// offered media in the same order and with the same mids.
+TEST_F(PeerConnectionJsepTest, CreateAnswerHasSameMidsAsOffer) {
+ auto caller = CreatePeerConnection();
+ auto first_transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_VIDEO);
+ auto second_transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO);
+ auto third_transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_VIDEO);
+ caller->CreateDataChannel("dc");
+ auto callee = CreatePeerConnection();
+
+ auto offer = caller->CreateOffer();
+ const auto* offer_data = cricket::GetFirstDataContent(offer->description());
+ ASSERT_TRUE(
+ caller->SetLocalDescription(CloneSessionDescription(offer.get())));
+ ASSERT_TRUE(callee->SetRemoteDescription(std::move(offer)));
+
+ auto answer = callee->CreateAnswer();
+ auto contents = answer->description()->contents();
+ ASSERT_EQ(4u, contents.size());
+ EXPECT_EQ(cricket::MEDIA_TYPE_VIDEO, contents[0].media_description()->type());
+ EXPECT_EQ(first_transceiver->mid(), contents[0].name);
+ EXPECT_EQ(cricket::MEDIA_TYPE_AUDIO, contents[1].media_description()->type());
+ EXPECT_EQ(second_transceiver->mid(), contents[1].name);
+ EXPECT_EQ(cricket::MEDIA_TYPE_VIDEO, contents[2].media_description()->type());
+ EXPECT_EQ(third_transceiver->mid(), contents[2].name);
+ EXPECT_EQ(cricket::MEDIA_TYPE_DATA, contents[3].media_description()->type());
+ EXPECT_EQ(offer_data->name, contents[3].name);
+}
+
+// Test that an answering media section is marked as rejected if the underlying
+// transceiver has been stopped.
+TEST_F(PeerConnectionJsepTest, CreateAnswerRejectsStoppedTransceiver) {
+ auto caller = CreatePeerConnection();
+ caller->AddAudioTrack("a");
+ auto callee = CreatePeerConnection();
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+
+ callee->pc()->GetTransceivers()[0]->StopInternal();
+
+ auto answer = callee->CreateAnswer();
+ auto contents = answer->description()->contents();
+ ASSERT_EQ(1u, contents.size());
+ EXPECT_TRUE(contents[0].rejected);
+}
+
+// Test that CreateAnswer will generate media sections which will only send or
+// receive if the offer indicates it can do the reciprocating direction.
+// The full matrix is tested more extensively in MediaSession.
+TEST_F(PeerConnectionJsepTest, CreateAnswerNegotiatesDirection) {
+ auto caller = CreatePeerConnection();
+ RtpTransceiverInit init;
+ init.direction = RtpTransceiverDirection::kSendOnly;
+ caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO, init);
+ auto callee = CreatePeerConnection();
+ callee->AddAudioTrack("a");
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+
+ auto answer = callee->CreateAnswer();
+ auto contents = answer->description()->contents();
+ ASSERT_EQ(1u, contents.size());
+ EXPECT_EQ(RtpTransceiverDirection::kRecvOnly,
+ contents[0].media_description()->direction());
+}
+
+// Tests for JSEP SetLocalDescription with a local answer.
+// Note that these test only the additional behaviors not covered by
+// SetLocalDescription with a local offer.
+
+// Test that SetLocalDescription with an answer sets the current_direction
+// property of the transceivers mentioned in the session description.
+TEST_F(PeerConnectionJsepTest, SetLocalAnswerUpdatesCurrentDirection) {
+ auto caller = CreatePeerConnection();
+ auto caller_audio = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO);
+ caller_audio->SetDirectionWithError(RtpTransceiverDirection::kRecvOnly);
+ auto callee = CreatePeerConnection();
+ callee->AddAudioTrack("a");
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+ ASSERT_TRUE(callee->SetLocalDescription(callee->CreateAnswer()));
+
+ auto transceivers = callee->pc()->GetTransceivers();
+ ASSERT_EQ(1u, transceivers.size());
+ // Since the offer was recvonly and the transceiver direction is sendrecv,
+ // the negotiated direction will be sendonly.
+ EXPECT_EQ(RtpTransceiverDirection::kSendOnly,
+ transceivers[0]->current_direction());
+}
+
+// Tests for JSEP SetRemoteDescription with a remote answer.
+// Note that these test only the additional behaviors not covered by
+// SetRemoteDescription with a remote offer.
+
+TEST_F(PeerConnectionJsepTest, SetRemoteAnswerUpdatesCurrentDirection) {
+ auto caller = CreatePeerConnection();
+ caller->AddAudioTrack("a");
+ auto callee = CreatePeerConnection();
+ callee->AddAudioTrack("a");
+ auto callee_audio = callee->pc()->GetTransceivers()[0];
+ callee_audio->SetDirectionWithError(RtpTransceiverDirection::kSendOnly);
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+ ASSERT_TRUE(
+ caller->SetRemoteDescription(callee->CreateAnswerAndSetAsLocal()));
+
+ auto transceivers = caller->pc()->GetTransceivers();
+ ASSERT_EQ(1u, transceivers.size());
+ // Since the remote transceiver was set to sendonly, the negotiated direction
+ // in the answer would be sendonly which we apply as recvonly to the local
+ // transceiver.
+ EXPECT_EQ(RtpTransceiverDirection::kRecvOnly,
+ transceivers[0]->current_direction());
+}
+
+TEST_F(PeerConnectionJsepTest,
+ ChangeDirectionFromRecvOnlyToSendRecvDoesNotBreakVideoNegotiation) {
+ auto caller = CreatePeerConnection();
+ auto caller_transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_VIDEO);
+ auto callee = CreatePeerConnection();
+ caller_transceiver->SetDirectionWithError(RtpTransceiverDirection::kRecvOnly);
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+ ASSERT_TRUE(
+ caller->SetRemoteDescription(callee->CreateAnswerAndSetAsLocal()));
+
+ caller_transceiver->SetDirectionWithError(RtpTransceiverDirection::kSendRecv);
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+ ASSERT_TRUE(
+ caller->SetRemoteDescription(callee->CreateAnswerAndSetAsLocal()));
+}
+
+TEST_F(PeerConnectionJsepTest,
+ ChangeDirectionFromRecvOnlyToSendRecvDoesNotBreakAudioNegotiation) {
+ auto caller = CreatePeerConnection();
+ auto caller_transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO);
+ auto callee = CreatePeerConnection();
+ caller_transceiver->SetDirectionWithError(RtpTransceiverDirection::kRecvOnly);
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+ ASSERT_TRUE(
+ caller->SetRemoteDescription(callee->CreateAnswerAndSetAsLocal()));
+
+ caller_transceiver->SetDirectionWithError(RtpTransceiverDirection::kSendRecv);
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+ ASSERT_TRUE(
+ caller->SetRemoteDescription(callee->CreateAnswerAndSetAsLocal()));
+}
+
+// Tests for multiple round trips.
+
+// Test that setting a transceiver with the inactive direction does not stop it
+// on either the caller or the callee.
+TEST_F(PeerConnectionJsepTest, SettingTransceiverInactiveDoesNotStopIt) {
+ auto caller = CreatePeerConnection();
+ caller->AddAudioTrack("a");
+ auto callee = CreatePeerConnection();
+ callee->AddAudioTrack("a");
+ callee->pc()->GetTransceivers()[0]->SetDirectionWithError(
+ RtpTransceiverDirection::kInactive);
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+ ASSERT_TRUE(
+ caller->SetRemoteDescription(callee->CreateAnswerAndSetAsLocal()));
+
+ EXPECT_FALSE(caller->pc()->GetTransceivers()[0]->stopped());
+ EXPECT_FALSE(callee->pc()->GetTransceivers()[0]->stopped());
+}
+
+// Test that if a transceiver had been associated and later stopped, then a
+// media section is still generated for it and the media section is marked as
+// rejected.
+TEST_F(PeerConnectionJsepTest,
+ ReOfferMediaSectionForAssociatedStoppedTransceiverIsRejected) {
+ auto caller = CreatePeerConnection();
+ auto transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO);
+ auto callee = CreatePeerConnection();
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+ ASSERT_TRUE(
+ caller->SetRemoteDescription(callee->CreateAnswerAndSetAsLocal()));
+
+ ASSERT_TRUE(transceiver->mid());
+ transceiver->StopInternal();
+
+ auto reoffer = caller->CreateOffer();
+ auto contents = reoffer->description()->contents();
+ ASSERT_EQ(1u, contents.size());
+ EXPECT_TRUE(contents[0].rejected);
+}
+
+// Test that stopping an associated transceiver on the caller side will stop the
+// corresponding transceiver on the remote side when the remote offer is
+// applied.
+TEST_F(PeerConnectionJsepTest,
+ StoppingTransceiverInOfferStopsTransceiverOnRemoteSide) {
+ auto caller = CreatePeerConnection();
+ auto transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO);
+ auto callee = CreatePeerConnection();
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+ ASSERT_TRUE(
+ caller->SetRemoteDescription(callee->CreateAnswerAndSetAsLocal()));
+
+ transceiver->StopInternal();
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+
+ auto transceivers = callee->pc()->GetTransceivers();
+ EXPECT_EQ(1u, transceivers.size());
+ ASSERT_TRUE(callee->SetLocalDescription(callee->CreateAnswer()));
+ transceivers = callee->pc()->GetTransceivers();
+ EXPECT_EQ(0u, transceivers.size());
+}
+
+// Test that CreateOffer will only generate a recycled media section if the
+// transceiver to be recycled has been seen stopped by the other side first.
+TEST_F(PeerConnectionJsepTest,
+ CreateOfferDoesNotRecycleMediaSectionIfFirstStopped) {
+ auto caller = CreatePeerConnection();
+ auto first_transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO);
+ auto callee = CreatePeerConnection();
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+ ASSERT_TRUE(
+ caller->SetRemoteDescription(callee->CreateAnswerAndSetAsLocal()));
+
+ auto second_transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO);
+ first_transceiver->StopInternal();
+
+ auto reoffer = caller->CreateOffer();
+ auto contents = reoffer->description()->contents();
+ ASSERT_EQ(2u, contents.size());
+ EXPECT_TRUE(contents[0].rejected);
+ EXPECT_FALSE(contents[1].rejected);
+}
+
+// Test that the offer/answer and the transceivers are correctly generated and
+// updated when the media section is recycled after the callee stops a
+// transceiver and sends an answer with a 0 port.
+TEST_F(PeerConnectionJsepTest,
+ RecycleMediaSectionWhenStoppingTransceiverOnAnswerer) {
+ auto caller = CreatePeerConnection();
+ auto first_transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO);
+ auto callee = CreatePeerConnection();
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+ std::string first_mid = *first_transceiver->mid();
+ ASSERT_EQ(1u, callee->pc()->GetTransceivers().size());
+ callee->pc()->GetTransceivers()[0]->StopInternal();
+ ASSERT_EQ(1u, callee->pc()->GetTransceivers().size());
+ ASSERT_TRUE(
+ caller->SetRemoteDescription(callee->CreateAnswerAndSetAsLocal()));
+ EXPECT_TRUE(first_transceiver->stopped());
+ // First transceivers are dissociated on caller side.
+ ASSERT_EQ(absl::nullopt, first_transceiver->mid());
+ // They are disassociated on callee side.
+ ASSERT_EQ(0u, callee->pc()->GetTransceivers().size());
+
+ // New offer exchange with new transceivers that recycles the m section
+ // correctly.
+ caller->AddAudioTrack("audio2");
+ callee->AddAudioTrack("audio2");
+ auto offer = caller->CreateOffer();
+ auto offer_contents = offer->description()->contents();
+ std::string second_mid = offer_contents[0].name;
+ ASSERT_EQ(1u, offer_contents.size());
+ EXPECT_FALSE(offer_contents[0].rejected);
+ EXPECT_NE(first_mid, second_mid);
+
+ // Setting the offer on each side will dissociate the first transceivers and
+ // associate the new transceivers.
+ ASSERT_TRUE(
+ caller->SetLocalDescription(CloneSessionDescription(offer.get())));
+ EXPECT_EQ(absl::nullopt, first_transceiver->mid());
+ ASSERT_EQ(1u, caller->pc()->GetTransceivers().size());
+ EXPECT_EQ(second_mid, caller->pc()->GetTransceivers()[0]->mid());
+ ASSERT_TRUE(callee->SetRemoteDescription(std::move(offer)));
+ ASSERT_EQ(1u, callee->pc()->GetTransceivers().size());
+ EXPECT_EQ(second_mid, callee->pc()->GetTransceivers()[0]->mid());
+
+ // The new answer should also recycle the m section correctly.
+ auto answer = callee->CreateAnswer();
+ auto answer_contents = answer->description()->contents();
+ ASSERT_EQ(1u, answer_contents.size());
+ EXPECT_FALSE(answer_contents[0].rejected);
+ EXPECT_EQ(second_mid, answer_contents[0].name);
+
+ // Finishing the negotiation shouldn't add or dissociate any transceivers.
+ ASSERT_TRUE(
+ callee->SetLocalDescription(CloneSessionDescription(answer.get())));
+ ASSERT_TRUE(caller->SetRemoteDescription(std::move(answer)));
+ auto caller_transceivers = caller->pc()->GetTransceivers();
+ ASSERT_EQ(1u, caller_transceivers.size());
+ EXPECT_EQ(second_mid, caller_transceivers[0]->mid());
+ auto callee_transceivers = callee->pc()->GetTransceivers();
+ ASSERT_EQ(1u, callee_transceivers.size());
+ EXPECT_EQ(second_mid, callee_transceivers[0]->mid());
+}
+
+// Test that creating/setting a local offer that recycles an m= section is
+// idempotent.
+TEST_F(PeerConnectionJsepTest, CreateOfferRecyclesWhenOfferingTwice) {
+ // Do a negotiation with a port 0 for the media section.
+ auto caller = CreatePeerConnection();
+ auto first_transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO);
+ auto callee = CreatePeerConnection();
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+ first_transceiver->StopInternal();
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+ caller->AddAudioTrack("audio2");
+
+ // Create a new offer that recycles the media section and set it as a local
+ // description.
+ auto offer = caller->CreateOffer();
+ auto offer_contents = offer->description()->contents();
+ ASSERT_EQ(1u, offer_contents.size());
+ EXPECT_FALSE(offer_contents[0].rejected);
+ ASSERT_TRUE(caller->SetLocalDescription(std::move(offer)));
+ ASSERT_EQ(1u, caller->pc()->GetTransceivers().size());
+ EXPECT_FALSE(caller->pc()->GetTransceivers()[0]->stopped());
+ std::string second_mid = offer_contents[0].name;
+
+ // Create another new offer and set the local description again without the
+ // rest of any negotation ocurring.
+ auto second_offer = caller->CreateOffer();
+ auto second_offer_contents = second_offer->description()->contents();
+ ASSERT_EQ(1u, second_offer_contents.size());
+ EXPECT_FALSE(second_offer_contents[0].rejected);
+ // The mid shouldn't change.
+ EXPECT_EQ(second_mid, second_offer_contents[0].name);
+
+ ASSERT_TRUE(caller->SetLocalDescription(std::move(second_offer)));
+ // Make sure that the caller's transceivers are associated correctly.
+ auto caller_transceivers = caller->pc()->GetTransceivers();
+ ASSERT_EQ(1u, caller_transceivers.size());
+ EXPECT_EQ(second_mid, caller_transceivers[0]->mid());
+ EXPECT_FALSE(caller_transceivers[0]->stopped());
+}
+
+// Test that the offer/answer and transceivers for both the caller and callee
+// side are generated/updated correctly when recycling an audio/video media
+// section as a media section of either the same or opposite type.
+// Correct recycling works as follows:
+// - The m= section is re-offered with a new MID value and the new media type.
+// - The previously-associated transceiver is dissociated when the new offer is
+// set as a local description on the offerer or as a remote description on
+// the answerer.
+// - The new transceiver is associated with the new MID value.
+class RecycleMediaSectionTest
+ : public PeerConnectionJsepTest,
+ public ::testing::WithParamInterface<
+ std::tuple<cricket::MediaType, cricket::MediaType>> {
+ protected:
+ RecycleMediaSectionTest() {
+ first_type_ = std::get<0>(GetParam());
+ second_type_ = std::get<1>(GetParam());
+ }
+
+ cricket::MediaType first_type_;
+ cricket::MediaType second_type_;
+};
+
+// Test that recycling works properly when a new transceiver recycles an m=
+// section that was rejected in both the current local and remote descriptions.
+TEST_P(RecycleMediaSectionTest, CurrentLocalAndCurrentRemoteRejected) {
+ auto caller = CreatePeerConnection();
+ auto first_transceiver = caller->AddTransceiver(first_type_);
+ auto callee = CreatePeerConnection();
+
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+
+ std::string first_mid = *first_transceiver->mid();
+ first_transceiver->StopInternal();
+
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+
+ auto second_transceiver = caller->AddTransceiver(second_type_);
+
+ // The offer should reuse the previous media section but allocate a new MID
+ // and change the media type.
+ auto offer = caller->CreateOffer();
+ auto offer_contents = offer->description()->contents();
+ ASSERT_EQ(1u, offer_contents.size());
+ EXPECT_FALSE(offer_contents[0].rejected);
+ EXPECT_EQ(second_type_, offer_contents[0].media_description()->type());
+ std::string second_mid = offer_contents[0].name;
+ EXPECT_NE(first_mid, second_mid);
+
+ // Setting the local offer will dissociate the previous transceiver and set
+ // the MID for the new transceiver.
+ ASSERT_TRUE(
+ caller->SetLocalDescription(CloneSessionDescription(offer.get())));
+ EXPECT_EQ(absl::nullopt, first_transceiver->mid());
+ EXPECT_EQ(second_mid, second_transceiver->mid());
+
+ // Setting the remote offer will dissociate the previous transceiver and
+ // create a new transceiver for the media section.
+ ASSERT_TRUE(callee->SetRemoteDescription(std::move(offer)));
+ auto callee_transceivers = callee->pc()->GetTransceivers();
+ ASSERT_EQ(1u, callee_transceivers.size());
+ EXPECT_EQ(second_mid, callee_transceivers[0]->mid());
+ EXPECT_EQ(second_type_, callee_transceivers[0]->media_type());
+
+ // The answer should have only one media section for the new transceiver.
+ auto answer = callee->CreateAnswer();
+ auto answer_contents = answer->description()->contents();
+ ASSERT_EQ(1u, answer_contents.size());
+ EXPECT_FALSE(answer_contents[0].rejected);
+ EXPECT_EQ(second_mid, answer_contents[0].name);
+ EXPECT_EQ(second_type_, answer_contents[0].media_description()->type());
+
+ // Setting the local answer should succeed.
+ ASSERT_TRUE(
+ callee->SetLocalDescription(CloneSessionDescription(answer.get())));
+
+ // Setting the remote answer should succeed and not create any new
+ // transceivers.
+ ASSERT_TRUE(caller->SetRemoteDescription(std::move(answer)));
+ ASSERT_EQ(1u, caller->pc()->GetTransceivers().size());
+ ASSERT_EQ(1u, callee->pc()->GetTransceivers().size());
+}
+
+// Test that recycling works properly when a new transceiver recycles an m=
+// section that was rejected in only the current remote description.
+TEST_P(RecycleMediaSectionTest, CurrentRemoteOnlyRejected) {
+ auto caller = CreatePeerConnection();
+ auto caller_first_transceiver = caller->AddTransceiver(first_type_);
+ auto callee = CreatePeerConnection();
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+
+ std::string first_mid = *caller_first_transceiver->mid();
+ ASSERT_EQ(1u, callee->pc()->GetTransceivers().size());
+ auto callee_first_transceiver = callee->pc()->GetTransceivers()[0];
+ callee_first_transceiver->StopInternal();
+
+ // The answer will have a rejected m= section.
+ ASSERT_TRUE(
+ caller->SetRemoteDescription(callee->CreateAnswerAndSetAsLocal()));
+
+ // The offer should reuse the previous media section but allocate a new MID
+ // and change the media type.
+ auto caller_second_transceiver = caller->AddTransceiver(second_type_);
+ auto offer = caller->CreateOffer();
+ const auto& offer_contents = offer->description()->contents();
+ ASSERT_EQ(1u, offer_contents.size());
+ EXPECT_FALSE(offer_contents[0].rejected);
+ EXPECT_EQ(second_type_, offer_contents[0].media_description()->type());
+ std::string second_mid = offer_contents[0].name;
+ EXPECT_NE(first_mid, second_mid);
+
+ // Setting the local offer will dissociate the previous transceiver and set
+ // the MID for the new transceiver.
+ ASSERT_TRUE(
+ caller->SetLocalDescription(CloneSessionDescription(offer.get())));
+ EXPECT_EQ(absl::nullopt, caller_first_transceiver->mid());
+ EXPECT_EQ(second_mid, caller_second_transceiver->mid());
+
+ // Setting the remote offer will dissociate the previous transceiver and
+ // create a new transceiver for the media section.
+ ASSERT_TRUE(callee->SetRemoteDescription(std::move(offer)));
+ auto callee_transceivers = callee->pc()->GetTransceivers();
+ ASSERT_EQ(1u, callee_transceivers.size());
+ EXPECT_EQ(second_mid, callee_transceivers[0]->mid());
+ EXPECT_EQ(second_type_, callee_transceivers[0]->media_type());
+
+ // The answer should have only one media section for the new transceiver.
+ auto answer = callee->CreateAnswer();
+ auto answer_contents = answer->description()->contents();
+ ASSERT_EQ(1u, answer_contents.size());
+ EXPECT_FALSE(answer_contents[0].rejected);
+ EXPECT_EQ(second_mid, answer_contents[0].name);
+ EXPECT_EQ(second_type_, answer_contents[0].media_description()->type());
+
+ // Setting the local answer should succeed.
+ ASSERT_TRUE(
+ callee->SetLocalDescription(CloneSessionDescription(answer.get())));
+
+ // Setting the remote answer should succeed and not create any new
+ // transceivers.
+ ASSERT_TRUE(caller->SetRemoteDescription(std::move(answer)));
+ ASSERT_EQ(1u, caller->pc()->GetTransceivers().size());
+ ASSERT_EQ(1u, callee->pc()->GetTransceivers().size());
+}
+
+// Test that recycling works properly when a new transceiver recycles an m=
+// section that was rejected only in the current local description.
+TEST_P(RecycleMediaSectionTest, CurrentLocalOnlyRejected) {
+ auto caller = CreatePeerConnection();
+ auto caller_first_transceiver = caller->AddTransceiver(first_type_);
+ auto callee = CreatePeerConnection();
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+
+ std::string first_mid = *caller_first_transceiver->mid();
+ ASSERT_EQ(1u, callee->pc()->GetTransceivers().size());
+ auto callee_first_transceiver = callee->pc()->GetTransceivers()[0];
+ callee_first_transceiver->StopInternal();
+
+ // The answer will have a rejected m= section.
+ ASSERT_TRUE(
+ caller->SetRemoteDescription(callee->CreateAnswerAndSetAsLocal()));
+
+ // The offer should reuse the previous media section but allocate a new MID
+ // and change the media type.
+ auto callee_second_transceiver = callee->AddTransceiver(second_type_);
+ auto offer = callee->CreateOffer();
+ const auto& offer_contents = offer->description()->contents();
+ ASSERT_EQ(1u, offer_contents.size());
+ EXPECT_FALSE(offer_contents[0].rejected);
+ EXPECT_EQ(second_type_, offer_contents[0].media_description()->type());
+ std::string second_mid = offer_contents[0].name;
+ EXPECT_NE(first_mid, second_mid);
+
+ // Setting the local offer will dissociate the previous transceiver and set
+ // the MID for the new transceiver.
+ ASSERT_TRUE(
+ callee->SetLocalDescription(CloneSessionDescription(offer.get())));
+ EXPECT_EQ(absl::nullopt, callee_first_transceiver->mid());
+ EXPECT_EQ(second_mid, callee_second_transceiver->mid());
+
+ // Setting the remote offer will dissociate the previous transceiver and
+ // create a new transceiver for the media section.
+ ASSERT_TRUE(caller->SetRemoteDescription(std::move(offer)));
+ auto caller_transceivers = caller->pc()->GetTransceivers();
+ ASSERT_EQ(1u, caller_transceivers.size());
+ EXPECT_EQ(second_mid, caller_transceivers[0]->mid());
+ EXPECT_EQ(second_type_, caller_transceivers[0]->media_type());
+
+ // The answer should have only one media section for the new transceiver.
+ auto answer = caller->CreateAnswer();
+ auto answer_contents = answer->description()->contents();
+ ASSERT_EQ(1u, answer_contents.size());
+ EXPECT_FALSE(answer_contents[0].rejected);
+ EXPECT_EQ(second_mid, answer_contents[0].name);
+ EXPECT_EQ(second_type_, answer_contents[0].media_description()->type());
+
+ // Setting the local answer should succeed.
+ ASSERT_TRUE(
+ caller->SetLocalDescription(CloneSessionDescription(answer.get())));
+
+ // Setting the remote answer should succeed and not create any new
+ // transceivers.
+ ASSERT_TRUE(callee->SetRemoteDescription(std::move(answer)));
+ ASSERT_EQ(1u, callee->pc()->GetTransceivers().size());
+ ASSERT_EQ(1u, caller->pc()->GetTransceivers().size());
+}
+
+// Test that a m= section is *not* recycled if the media section is only
+// rejected in the pending local description and there is no current remote
+// description.
+TEST_P(RecycleMediaSectionTest, PendingLocalRejectedAndNoRemote) {
+ auto caller = CreatePeerConnection();
+ auto caller_first_transceiver = caller->AddTransceiver(first_type_);
+
+ ASSERT_TRUE(caller->SetLocalDescription(caller->CreateOffer()));
+
+ std::string first_mid = *caller_first_transceiver->mid();
+ caller_first_transceiver->StopInternal();
+
+ // The reoffer will have a rejected m= section.
+ ASSERT_TRUE(caller->SetLocalDescription(caller->CreateOffer()));
+
+ auto caller_second_transceiver = caller->AddTransceiver(second_type_);
+
+ // The reoffer should not recycle the existing m= section since it is not
+ // rejected in either the *current* local or *current* remote description.
+ auto reoffer = caller->CreateOffer();
+ auto reoffer_contents = reoffer->description()->contents();
+ ASSERT_EQ(2u, reoffer_contents.size());
+ EXPECT_TRUE(reoffer_contents[0].rejected);
+ EXPECT_EQ(first_type_, reoffer_contents[0].media_description()->type());
+ EXPECT_EQ(first_mid, reoffer_contents[0].name);
+ EXPECT_FALSE(reoffer_contents[1].rejected);
+ EXPECT_EQ(second_type_, reoffer_contents[1].media_description()->type());
+ std::string second_mid = reoffer_contents[1].name;
+ EXPECT_NE(first_mid, second_mid);
+
+ ASSERT_TRUE(caller->SetLocalDescription(std::move(reoffer)));
+
+ // Both RtpTransceivers are associated.
+ EXPECT_EQ(first_mid, caller_first_transceiver->mid());
+ EXPECT_EQ(second_mid, caller_second_transceiver->mid());
+}
+
+// Test that a m= section is *not* recycled if the media section is only
+// rejected in the pending local description and not rejected in the current
+// remote description.
+TEST_P(RecycleMediaSectionTest, PendingLocalRejectedAndNotRejectedRemote) {
+ auto caller = CreatePeerConnection();
+ auto caller_first_transceiver = caller->AddTransceiver(first_type_);
+ auto callee = CreatePeerConnection();
+
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+
+ std::string first_mid = *caller_first_transceiver->mid();
+ caller_first_transceiver->StopInternal();
+
+ // The reoffer will have a rejected m= section.
+ ASSERT_TRUE(caller->SetLocalDescription(caller->CreateOffer()));
+
+ auto caller_second_transceiver = caller->AddTransceiver(second_type_);
+
+ // The reoffer should not recycle the existing m= section since it is not
+ // rejected in either the *current* local or *current* remote description.
+ auto reoffer = caller->CreateOffer();
+ auto reoffer_contents = reoffer->description()->contents();
+ ASSERT_EQ(2u, reoffer_contents.size());
+ EXPECT_TRUE(reoffer_contents[0].rejected);
+ EXPECT_EQ(first_type_, reoffer_contents[0].media_description()->type());
+ EXPECT_EQ(first_mid, reoffer_contents[0].name);
+ EXPECT_FALSE(reoffer_contents[1].rejected);
+ EXPECT_EQ(second_type_, reoffer_contents[1].media_description()->type());
+ std::string second_mid = reoffer_contents[1].name;
+ EXPECT_NE(first_mid, second_mid);
+
+ ASSERT_TRUE(caller->SetLocalDescription(std::move(reoffer)));
+
+ // Both RtpTransceivers are associated.
+ EXPECT_EQ(first_mid, caller_first_transceiver->mid());
+ EXPECT_EQ(second_mid, caller_second_transceiver->mid());
+}
+
+// Test that an m= section is *not* recycled if the media section is only
+// rejected in the pending remote description and there is no current local
+// description.
+TEST_P(RecycleMediaSectionTest, PendingRemoteRejectedAndNoLocal) {
+ auto caller = CreatePeerConnection();
+ auto caller_first_transceiver = caller->AddTransceiver(first_type_);
+ auto callee = CreatePeerConnection();
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+
+ ASSERT_EQ(1u, callee->pc()->GetTransceivers().size());
+ auto callee_first_transceiver = callee->pc()->GetTransceivers()[0];
+ std::string first_mid = *callee_first_transceiver->mid();
+ caller_first_transceiver->StopInternal();
+
+ // The reoffer will have a rejected m= section.
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+
+ auto callee_second_transceiver = callee->AddTransceiver(second_type_);
+
+ // The reoffer should not recycle the existing m= section since it is not
+ // rejected in either the *current* local or *current* remote description.
+ auto reoffer = callee->CreateOffer();
+ auto reoffer_contents = reoffer->description()->contents();
+ ASSERT_EQ(2u, reoffer_contents.size());
+ EXPECT_TRUE(reoffer_contents[0].rejected);
+ EXPECT_EQ(first_type_, reoffer_contents[0].media_description()->type());
+ EXPECT_EQ(first_mid, reoffer_contents[0].name);
+ EXPECT_FALSE(reoffer_contents[1].rejected);
+ EXPECT_EQ(second_type_, reoffer_contents[1].media_description()->type());
+ std::string second_mid = reoffer_contents[1].name;
+ EXPECT_NE(first_mid, second_mid);
+
+ // Note: Cannot actually set the reoffer since the callee is in the signaling
+ // state 'have-remote-offer'.
+}
+
+// Test that an m= section is *not* recycled if the media section is only
+// rejected in the pending remote description and not rejected in the current
+// local description.
+TEST_P(RecycleMediaSectionTest, PendingRemoteRejectedAndNotRejectedLocal) {
+ auto caller = CreatePeerConnection();
+ auto caller_first_transceiver = caller->AddTransceiver(first_type_);
+ auto callee = CreatePeerConnection();
+
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+
+ ASSERT_EQ(1u, callee->pc()->GetTransceivers().size());
+ auto callee_first_transceiver = callee->pc()->GetTransceivers()[0];
+ std::string first_mid = *callee_first_transceiver->mid();
+ caller_first_transceiver->StopInternal();
+
+ // The reoffer will have a rejected m= section.
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+
+ auto callee_second_transceiver = callee->AddTransceiver(second_type_);
+
+ // The reoffer should not recycle the existing m= section since it is not
+ // rejected in either the *current* local or *current* remote description.
+ auto reoffer = callee->CreateOffer();
+ auto reoffer_contents = reoffer->description()->contents();
+ ASSERT_EQ(2u, reoffer_contents.size());
+ EXPECT_TRUE(reoffer_contents[0].rejected);
+ EXPECT_EQ(first_type_, reoffer_contents[0].media_description()->type());
+ EXPECT_EQ(first_mid, reoffer_contents[0].name);
+ EXPECT_FALSE(reoffer_contents[1].rejected);
+ EXPECT_EQ(second_type_, reoffer_contents[1].media_description()->type());
+ std::string second_mid = reoffer_contents[1].name;
+ EXPECT_NE(first_mid, second_mid);
+
+ // Note: Cannot actually set the reoffer since the callee is in the signaling
+ // state 'have-remote-offer'.
+}
+
+// Test all combinations of audio and video as the first and second media type
+// for the media section. This is needed for full test coverage because
+// MediaSession has separate functions for processing audio and video media
+// sections.
+INSTANTIATE_TEST_SUITE_P(
+ PeerConnectionJsepTest,
+ RecycleMediaSectionTest,
+ Combine(Values(cricket::MEDIA_TYPE_AUDIO, cricket::MEDIA_TYPE_VIDEO),
+ Values(cricket::MEDIA_TYPE_AUDIO, cricket::MEDIA_TYPE_VIDEO)));
+
+// Test that a new data channel section will not reuse a recycleable audio or
+// video media section. Additionally, tests that the new section is added to the
+// end of the session description.
+TEST_F(PeerConnectionJsepTest, DataChannelDoesNotRecycleMediaSection) {
+ auto caller = CreatePeerConnection();
+ auto transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO);
+ auto callee = CreatePeerConnection();
+
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+
+ transceiver->StopInternal();
+
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+
+ caller->CreateDataChannel("dc");
+
+ auto offer = caller->CreateOffer();
+ auto offer_contents = offer->description()->contents();
+ ASSERT_EQ(2u, offer_contents.size());
+ EXPECT_EQ(cricket::MEDIA_TYPE_AUDIO,
+ offer_contents[0].media_description()->type());
+ EXPECT_EQ(cricket::MEDIA_TYPE_DATA,
+ offer_contents[1].media_description()->type());
+
+ ASSERT_TRUE(
+ caller->SetLocalDescription(CloneSessionDescription(offer.get())));
+ ASSERT_TRUE(callee->SetRemoteDescription(std::move(offer)));
+
+ auto answer = callee->CreateAnswer();
+ auto answer_contents = answer->description()->contents();
+ ASSERT_EQ(2u, answer_contents.size());
+ EXPECT_EQ(cricket::MEDIA_TYPE_AUDIO,
+ answer_contents[0].media_description()->type());
+ EXPECT_EQ(cricket::MEDIA_TYPE_DATA,
+ answer_contents[1].media_description()->type());
+}
+
+// Test that if a new track is added to an existing session that has a data,
+// the new section comes at the end of the new offer, after the existing data
+// section.
+TEST_F(PeerConnectionJsepTest, AudioTrackAddedAfterDataSectionInReoffer) {
+ auto caller = CreatePeerConnection();
+ caller->CreateDataChannel("dc");
+ auto callee = CreatePeerConnection();
+
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+
+ caller->AddAudioTrack("a");
+
+ auto offer = caller->CreateOffer();
+ auto contents = offer->description()->contents();
+ ASSERT_EQ(2u, contents.size());
+ EXPECT_EQ(cricket::MEDIA_TYPE_DATA, contents[0].media_description()->type());
+ EXPECT_EQ(cricket::MEDIA_TYPE_AUDIO, contents[1].media_description()->type());
+}
+
+// Tests for MID properties.
+
+static void RenameSection(size_t mline_index,
+ const std::string& new_mid,
+ SessionDescriptionInterface* sdesc) {
+ cricket::SessionDescription* desc = sdesc->description();
+ std::string old_mid = desc->contents()[mline_index].name;
+ desc->contents()[mline_index].name = new_mid;
+ desc->transport_infos()[mline_index].content_name = new_mid;
+ const cricket::ContentGroup* bundle =
+ desc->GetGroupByName(cricket::GROUP_TYPE_BUNDLE);
+ if (bundle) {
+ cricket::ContentGroup new_bundle = *bundle;
+ if (new_bundle.RemoveContentName(old_mid)) {
+ new_bundle.AddContentName(new_mid);
+ }
+ desc->RemoveGroupByName(cricket::GROUP_TYPE_BUNDLE);
+ desc->AddGroup(new_bundle);
+ }
+}
+
+// Test that two PeerConnections can have a successful offer/answer exchange if
+// the MIDs are changed from the defaults.
+TEST_F(PeerConnectionJsepTest, OfferAnswerWithChangedMids) {
+ constexpr char kFirstMid[] = "nondefaultmid";
+ constexpr char kSecondMid[] = "randommid";
+
+ auto caller = CreatePeerConnection();
+ caller->AddAudioTrack("a");
+ caller->AddAudioTrack("b");
+ auto callee = CreatePeerConnection();
+
+ auto offer = caller->CreateOffer();
+ RenameSection(0, kFirstMid, offer.get());
+ RenameSection(1, kSecondMid, offer.get());
+
+ ASSERT_TRUE(
+ caller->SetLocalDescription(CloneSessionDescription(offer.get())));
+ auto caller_transceivers = caller->pc()->GetTransceivers();
+ EXPECT_EQ(kFirstMid, caller_transceivers[0]->mid());
+ EXPECT_EQ(kSecondMid, caller_transceivers[1]->mid());
+
+ ASSERT_TRUE(callee->SetRemoteDescription(std::move(offer)));
+ auto callee_transceivers = callee->pc()->GetTransceivers();
+ EXPECT_EQ(kFirstMid, callee_transceivers[0]->mid());
+ EXPECT_EQ(kSecondMid, callee_transceivers[1]->mid());
+
+ auto answer = callee->CreateAnswer();
+ auto answer_contents = answer->description()->contents();
+ EXPECT_EQ(kFirstMid, answer_contents[0].name);
+ EXPECT_EQ(kSecondMid, answer_contents[1].name);
+
+ ASSERT_TRUE(
+ callee->SetLocalDescription(CloneSessionDescription(answer.get())));
+ ASSERT_TRUE(caller->SetRemoteDescription(std::move(answer)));
+}
+
+// Test that CreateOffer will generate a MID that is not already used if the
+// default it would have picked is already taken. This is tested by using a
+// third PeerConnection to determine what the default would be for the second
+// media section then setting that as the first media section's MID.
+TEST_F(PeerConnectionJsepTest, CreateOfferGeneratesUniqueMidIfAlreadyTaken) {
+ // First, find what the default MID is for the second media section.
+ auto pc = CreatePeerConnection();
+ pc->AddAudioTrack("a");
+ pc->AddAudioTrack("b");
+ auto default_offer = pc->CreateOffer();
+ std::string default_second_mid =
+ default_offer->description()->contents()[1].name;
+
+ // Now, do an offer/answer with one track which has the MID set to the default
+ // second MID.
+ auto caller = CreatePeerConnection();
+ caller->AddAudioTrack("a");
+ auto callee = CreatePeerConnection();
+
+ auto offer = caller->CreateOffer();
+ RenameSection(0, default_second_mid, offer.get());
+
+ ASSERT_TRUE(
+ caller->SetLocalDescription(CloneSessionDescription(offer.get())));
+ ASSERT_TRUE(callee->SetRemoteDescription(std::move(offer)));
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+
+ // Add a second track and ensure that the MID is different.
+ caller->AddAudioTrack("b");
+
+ auto reoffer = caller->CreateOffer();
+ auto reoffer_contents = reoffer->description()->contents();
+ EXPECT_EQ(default_second_mid, reoffer_contents[0].name);
+ EXPECT_NE(reoffer_contents[0].name, reoffer_contents[1].name);
+}
+
+// Test that if an audio or video section has the default data section MID, then
+// CreateOffer will generate a unique MID for the newly added data section.
+TEST_F(PeerConnectionJsepTest,
+ CreateOfferGeneratesUniqueMidForDataSectionIfAlreadyTaken) {
+ // First, find what the default MID is for the data channel.
+ auto pc = CreatePeerConnection();
+ pc->CreateDataChannel("dc");
+ auto default_offer = pc->CreateOffer();
+ std::string default_data_mid =
+ default_offer->description()->contents()[0].name;
+
+ // Now do an offer/answer with one audio track which has a MID set to the
+ // default data MID.
+ auto caller = CreatePeerConnection();
+ caller->AddAudioTrack("a");
+ auto callee = CreatePeerConnection();
+
+ auto offer = caller->CreateOffer();
+ RenameSection(0, default_data_mid, offer.get());
+
+ ASSERT_TRUE(
+ caller->SetLocalDescription(CloneSessionDescription(offer.get())));
+ ASSERT_TRUE(callee->SetRemoteDescription(std::move(offer)));
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+
+ // Add a data channel and ensure that the MID is different.
+ caller->CreateDataChannel("dc");
+
+ auto reoffer = caller->CreateOffer();
+ auto reoffer_contents = reoffer->description()->contents();
+ EXPECT_EQ(default_data_mid, reoffer_contents[0].name);
+ EXPECT_NE(reoffer_contents[0].name, reoffer_contents[1].name);
+}
+
+// Test that a reoffer initiated by the callee adds a new track to the caller.
+TEST_F(PeerConnectionJsepTest, CalleeDoesReoffer) {
+ auto caller = CreatePeerConnection();
+ caller->AddAudioTrack("a");
+ auto callee = CreatePeerConnection();
+ callee->AddAudioTrack("a");
+ callee->AddVideoTrack("v");
+
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+
+ EXPECT_EQ(1u, caller->pc()->GetTransceivers().size());
+ EXPECT_EQ(2u, callee->pc()->GetTransceivers().size());
+
+ ASSERT_TRUE(callee->ExchangeOfferAnswerWith(caller.get()));
+
+ EXPECT_EQ(2u, caller->pc()->GetTransceivers().size());
+ EXPECT_EQ(2u, callee->pc()->GetTransceivers().size());
+}
+
+// Tests for MSID properties.
+
+// Test that adding a track with AddTrack results in an offer that signals the
+// track's ID.
+TEST_F(PeerConnectionJsepTest, AddingTrackWithAddTrackSpecifiesTrackId) {
+ const std::string kTrackId = "audio_track";
+
+ auto caller = CreatePeerConnection();
+ caller->AddAudioTrack(kTrackId);
+
+ auto offer = caller->CreateOffer();
+ auto contents = offer->description()->contents();
+ ASSERT_EQ(1u, contents.size());
+ auto streams = contents[0].media_description()->streams();
+ ASSERT_EQ(1u, streams.size());
+ EXPECT_EQ(kTrackId, streams[0].id);
+}
+
+// Test that adding a track by calling AddTransceiver then SetTrack results in
+// an offer that does not signal the track's ID and signals a random ID.
+TEST_F(PeerConnectionJsepTest,
+ AddingTrackWithAddTransceiverSpecifiesRandomTrackId) {
+ const std::string kTrackId = "audio_track";
+
+ auto caller = CreatePeerConnection();
+ auto transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO);
+ transceiver->sender()->SetTrack(caller->CreateAudioTrack(kTrackId).get());
+
+ auto offer = caller->CreateOffer();
+ auto contents = offer->description()->contents();
+ ASSERT_EQ(1u, contents.size());
+ auto streams = contents[0].media_description()->streams();
+ ASSERT_EQ(1u, streams.size());
+ EXPECT_NE(kTrackId, streams[0].id);
+}
+
+// Test that if the transceiver is recvonly or inactive, then no MSID
+// information is included in the offer.
+TEST_F(PeerConnectionJsepTest, NoMsidInOfferIfTransceiverDirectionHasNoSend) {
+ auto caller = CreatePeerConnection();
+
+ RtpTransceiverInit init_recvonly;
+ init_recvonly.direction = RtpTransceiverDirection::kRecvOnly;
+ ASSERT_TRUE(caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO, init_recvonly));
+
+ RtpTransceiverInit init_inactive;
+ init_inactive.direction = RtpTransceiverDirection::kInactive;
+ ASSERT_TRUE(caller->AddTransceiver(cricket::MEDIA_TYPE_VIDEO, init_inactive));
+
+ auto offer = caller->CreateOffer();
+ auto contents = offer->description()->contents();
+ ASSERT_EQ(2u, contents.size());
+ // MSID is specified in the first stream, so no streams means no MSID.
+ EXPECT_EQ(0u, contents[0].media_description()->streams().size());
+ EXPECT_EQ(0u, contents[1].media_description()->streams().size());
+}
+
+// Test that if an answer negotiates transceiver directions of recvonly or
+// inactive, then no MSID information is included in the answer.
+TEST_F(PeerConnectionJsepTest, NoMsidInAnswerIfNoRespondingTracks) {
+ auto caller = CreatePeerConnection();
+ auto callee = CreatePeerConnection();
+
+ // recvonly transceiver will get negotiated to inactive since the callee has
+ // no tracks to send in response.
+ RtpTransceiverInit init_recvonly;
+ init_recvonly.direction = RtpTransceiverDirection::kRecvOnly;
+ caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO, init_recvonly);
+
+ // sendrecv transceiver will get negotiated to recvonly since the callee has
+ // no tracks to send in response.
+ RtpTransceiverInit init_sendrecv;
+ init_sendrecv.direction = RtpTransceiverDirection::kSendRecv;
+ caller->AddTransceiver(cricket::MEDIA_TYPE_VIDEO, init_sendrecv);
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+
+ auto answer = callee->CreateAnswer();
+ auto contents = answer->description()->contents();
+ ASSERT_EQ(2u, contents.size());
+ // MSID is specified in the first stream, so no streams means no MSID.
+ EXPECT_EQ(0u, contents[0].media_description()->streams().size());
+ EXPECT_EQ(0u, contents[1].media_description()->streams().size());
+}
+
+// Test that the MSID is included even if the transceiver direction has changed
+// to inactive if the transceiver had previously sent media.
+TEST_F(PeerConnectionJsepTest, IncludeMsidEvenIfDirectionHasChanged) {
+ auto caller = CreatePeerConnection();
+ caller->AddAudioTrack("audio");
+ auto callee = CreatePeerConnection();
+ callee->AddAudioTrack("audio");
+
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+
+ caller->pc()->GetTransceivers()[0]->SetDirectionWithError(
+ RtpTransceiverDirection::kInactive);
+
+ // The transceiver direction on both sides will turn to inactive.
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+
+ auto* offer = callee->pc()->remote_description();
+ auto offer_contents = offer->description()->contents();
+ ASSERT_EQ(1u, offer_contents.size());
+ // MSID is specified in the first stream. If it is present, assume that MSID
+ // is there.
+ EXPECT_EQ(1u, offer_contents[0].media_description()->streams().size());
+
+ auto* answer = caller->pc()->remote_description();
+ auto answer_contents = answer->description()->contents();
+ ASSERT_EQ(1u, answer_contents.size());
+ EXPECT_EQ(1u, answer_contents[0].media_description()->streams().size());
+}
+
+// Test that stopping a RtpTransceiver will cause future offers to not include
+// any MSID information for that section.
+TEST_F(PeerConnectionJsepTest, RemoveMsidIfTransceiverStopped) {
+ auto caller = CreatePeerConnection();
+ auto transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO);
+ auto callee = CreatePeerConnection();
+
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+
+ transceiver->StopInternal();
+
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+
+ auto* offer = callee->pc()->remote_description();
+ auto offer_contents = offer->description()->contents();
+ ASSERT_EQ(1u, offer_contents.size());
+ // MSID is specified in the first stream, so no streams means no MSID.
+ EXPECT_EQ(0u, offer_contents[0].media_description()->streams().size());
+}
+
+// Test that the callee RtpReceiver created by a call to SetRemoteDescription
+// has its ID set to the signaled track ID.
+TEST_F(PeerConnectionJsepTest,
+ RtpReceiverCreatedBySetRemoteDescriptionHasSignaledTrackId) {
+ const std::string kTrackId = "audio_track";
+
+ auto caller = CreatePeerConnection();
+ auto callee = CreatePeerConnection();
+ caller->AddAudioTrack(kTrackId);
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+
+ ASSERT_EQ(1u, callee->pc()->GetReceivers().size());
+ auto receiver = callee->pc()->GetReceivers()[0];
+ EXPECT_EQ(kTrackId, receiver->id());
+}
+
+// Test that if the callee RtpReceiver is reused by a call to
+// SetRemoteDescription, its ID does not change.
+TEST_F(PeerConnectionJsepTest,
+ RtpReceiverCreatedBeforeSetRemoteDescriptionKeepsId) {
+ const std::string kTrackId = "audio_track";
+
+ auto caller = CreatePeerConnection();
+ auto callee = CreatePeerConnection();
+ caller->AddAudioTrack(kTrackId);
+ callee->AddAudioTrack("dummy_track");
+
+ ASSERT_EQ(1u, callee->pc()->GetReceivers().size());
+ auto receiver = callee->pc()->GetReceivers()[0];
+ std::string receiver_id = receiver->id();
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+
+ EXPECT_EQ(receiver_id, receiver->id());
+}
+
+// Test that setting a remote offer with one track that has no streams fires off
+// the correct OnAddTrack event.
+TEST_F(PeerConnectionJsepTest,
+ SetRemoteOfferWithOneTrackNoStreamFiresOnAddTrack) {
+ const std::string kTrackLabel = "audio_track";
+
+ auto caller = CreatePeerConnection();
+ auto callee = CreatePeerConnection();
+ ASSERT_TRUE(caller->AddAudioTrack(kTrackLabel));
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+
+ const auto& track_events = callee->observer()->add_track_events_;
+ ASSERT_EQ(1u, track_events.size());
+ const auto& event = track_events[0];
+ EXPECT_EQ(kTrackLabel, event.receiver->track()->id());
+ EXPECT_EQ(0u, event.streams.size());
+}
+
+// Test that setting a remote offer with one track that has one stream fires off
+// the correct OnAddTrack event.
+TEST_F(PeerConnectionJsepTest,
+ SetRemoteOfferWithOneTrackOneStreamFiresOnAddTrack) {
+ const std::string kTrackLabel = "audio_track";
+ const std::string kStreamId = "audio_stream";
+
+ auto caller = CreatePeerConnection();
+ auto callee = CreatePeerConnection();
+ ASSERT_TRUE(caller->AddAudioTrack(kTrackLabel, {kStreamId}));
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+
+ const auto& track_events = callee->observer()->add_track_events_;
+ ASSERT_EQ(1u, track_events.size());
+ const auto& event = track_events[0];
+ ASSERT_EQ(1u, event.streams.size());
+ auto stream = event.streams[0];
+ EXPECT_EQ(kStreamId, stream->id());
+ EXPECT_THAT(track_events[0].snapshotted_stream_tracks.at(stream),
+ ElementsAre(event.receiver->track()));
+ EXPECT_EQ(event.receiver->streams(), track_events[0].streams);
+}
+
+// Test that setting a remote offer with two tracks that share the same stream
+// fires off two OnAddTrack events, both with the same stream that has both
+// tracks present at the time of firing. This is to ensure that track events are
+// not fired until SetRemoteDescription has finished processing all the media
+// sections.
+TEST_F(PeerConnectionJsepTest,
+ SetRemoteOfferWithTwoTracksSameStreamFiresOnAddTrack) {
+ const std::string kTrack1Label = "audio_track1";
+ const std::string kTrack2Label = "audio_track2";
+ const std::string kSharedStreamId = "stream";
+
+ auto caller = CreatePeerConnection();
+ auto callee = CreatePeerConnection();
+ ASSERT_TRUE(caller->AddAudioTrack(kTrack1Label, {kSharedStreamId}));
+ ASSERT_TRUE(caller->AddAudioTrack(kTrack2Label, {kSharedStreamId}));
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+
+ const auto& track_events = callee->observer()->add_track_events_;
+ ASSERT_EQ(2u, track_events.size());
+ const auto& event1 = track_events[0];
+ const auto& event2 = track_events[1];
+ ASSERT_EQ(1u, event1.streams.size());
+ auto stream = event1.streams[0];
+ ASSERT_THAT(event2.streams, ElementsAre(stream));
+ auto track1 = event1.receiver->track();
+ auto track2 = event2.receiver->track();
+ EXPECT_THAT(event1.snapshotted_stream_tracks.at(stream),
+ UnorderedElementsAre(track1, track2));
+ EXPECT_THAT(event2.snapshotted_stream_tracks.at(stream),
+ UnorderedElementsAre(track1, track2));
+}
+
+// Test that setting a remote offer with one track that has two streams fires
+// off the correct OnAddTrack event.
+TEST_F(PeerConnectionJsepTest,
+ SetRemoteOfferWithOneTrackTwoStreamFiresOnAddTrack) {
+ const std::string kTrackLabel = "audio_track";
+ const std::string kStreamId1 = "audio_stream1";
+ const std::string kStreamId2 = "audio_stream2";
+
+ auto caller = CreatePeerConnection();
+ auto callee = CreatePeerConnection();
+ ASSERT_TRUE(caller->AddAudioTrack(kTrackLabel, {kStreamId1, kStreamId2}));
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+
+ const auto& track_events = callee->observer()->add_track_events_;
+ ASSERT_EQ(1u, track_events.size());
+ const auto& event = track_events[0];
+ ASSERT_EQ(2u, event.streams.size());
+ EXPECT_EQ(kStreamId1, event.streams[0]->id());
+ EXPECT_EQ(kStreamId2, event.streams[1]->id());
+}
+
+// Test that if an RtpTransceiver with a current_direction set is stopped, then
+// current_direction is changed to null.
+TEST_F(PeerConnectionJsepTest, CurrentDirectionResetWhenRtpTransceiverStopped) {
+ auto caller = CreatePeerConnection();
+ auto callee = CreatePeerConnection();
+
+ auto transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO);
+
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+
+ ASSERT_TRUE(transceiver->current_direction());
+ transceiver->StopInternal();
+ EXPECT_EQ(transceiver->current_direction(),
+ RtpTransceiverDirection::kStopped);
+}
+
+// Test that you can't set an answer on a PeerConnection before setting the
+// offer.
+TEST_F(PeerConnectionJsepTest, AnswerBeforeOfferFails) {
+ auto caller = CreatePeerConnection();
+ auto callee = CreatePeerConnection();
+ caller->AddAudioTrack("audio");
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOffer()));
+
+ RTCError error;
+ ASSERT_FALSE(caller->SetRemoteDescription(callee->CreateAnswer(), &error));
+ EXPECT_EQ(RTCErrorType::INVALID_STATE, error.type());
+}
+
+// Test that a Unified Plan PeerConnection fails to set a Plan B offer if it has
+// two video tracks.
+TEST_F(PeerConnectionJsepTest, TwoVideoPlanBToUnifiedPlanFails) {
+ RTCConfiguration config_planb;
+ config_planb.sdp_semantics = SdpSemantics::kPlanB_DEPRECATED;
+ auto caller = CreatePeerConnection(config_planb);
+ auto callee = CreatePeerConnection();
+ caller->AddVideoTrack("video1");
+ caller->AddVideoTrack("video2");
+
+ RTCError error;
+ ASSERT_FALSE(callee->SetRemoteDescription(caller->CreateOffer(), &error));
+ EXPECT_EQ(RTCErrorType::INVALID_PARAMETER, error.type());
+}
+
+// Test that a Unified Plan PeerConnection fails to set a Plan B answer if it
+// has two video tracks.
+TEST_F(PeerConnectionJsepTest, OneVideoUnifiedPlanToTwoVideoPlanBFails) {
+ auto caller = CreatePeerConnection();
+ RTCConfiguration config_planb;
+ config_planb.sdp_semantics = SdpSemantics::kPlanB_DEPRECATED;
+ auto callee = CreatePeerConnection(config_planb);
+ caller->AddVideoTrack("video");
+ callee->AddVideoTrack("video1");
+ callee->AddVideoTrack("video2");
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+
+ RTCError error;
+ ASSERT_FALSE(caller->SetRemoteDescription(caller->CreateAnswer(), &error));
+ EXPECT_EQ(RTCErrorType::INVALID_PARAMETER, error.type());
+}
+
+// Removes the RTP header extension associated with the given URI from the media
+// description.
+static void RemoveRtpHeaderExtensionByUri(
+ MediaContentDescription* media_description,
+ absl::string_view uri) {
+ std::vector<RtpExtension> header_extensions =
+ media_description->rtp_header_extensions();
+ header_extensions.erase(std::remove_if(
+ header_extensions.begin(), header_extensions.end(),
+ [uri](const RtpExtension& extension) { return extension.uri == uri; }));
+ media_description->set_rtp_header_extensions(header_extensions);
+}
+
+// Transforms a session description to emulate a legacy endpoint which does not
+// support a=mid, BUNDLE, and the MID header extension.
+static void ClearMids(SessionDescriptionInterface* sdesc) {
+ cricket::SessionDescription* desc = sdesc->description();
+ desc->RemoveGroupByName(cricket::GROUP_TYPE_BUNDLE);
+ cricket::ContentInfo* audio_content = cricket::GetFirstAudioContent(desc);
+ if (audio_content) {
+ desc->GetTransportInfoByName(audio_content->name)->content_name = "";
+ audio_content->name = "";
+ RemoveRtpHeaderExtensionByUri(audio_content->media_description(),
+ RtpExtension::kMidUri);
+ }
+ cricket::ContentInfo* video_content = cricket::GetFirstVideoContent(desc);
+ if (video_content) {
+ desc->GetTransportInfoByName(video_content->name)->content_name = "";
+ video_content->name = "";
+ RemoveRtpHeaderExtensionByUri(video_content->media_description(),
+ RtpExtension::kMidUri);
+ }
+}
+
+// Test that negotiation works with legacy endpoints which do not support a=mid.
+TEST_F(PeerConnectionJsepTest, LegacyNoMidAudioOnlyOffer) {
+ auto caller = CreatePeerConnection();
+ caller->AddAudioTrack("audio");
+ auto callee = CreatePeerConnection();
+ callee->AddAudioTrack("audio");
+
+ auto offer = caller->CreateOffer();
+ ClearMids(offer.get());
+
+ ASSERT_TRUE(callee->SetRemoteDescription(std::move(offer)));
+ EXPECT_TRUE(callee->SetLocalDescription(callee->CreateAnswer()));
+}
+TEST_F(PeerConnectionJsepTest, LegacyNoMidAudioVideoOffer) {
+ auto caller = CreatePeerConnection();
+ caller->AddAudioTrack("audio");
+ caller->AddVideoTrack("video");
+ auto callee = CreatePeerConnection();
+ callee->AddAudioTrack("audio");
+ callee->AddVideoTrack("video");
+
+ auto offer = caller->CreateOffer();
+ ClearMids(offer.get());
+
+ ASSERT_TRUE(callee->SetRemoteDescription(std::move(offer)));
+ EXPECT_TRUE(callee->SetLocalDescription(callee->CreateAnswer()));
+}
+TEST_F(PeerConnectionJsepTest, LegacyNoMidAudioOnlyAnswer) {
+ auto caller = CreatePeerConnection();
+ caller->AddAudioTrack("audio");
+ auto callee = CreatePeerConnection();
+ callee->AddAudioTrack("audio");
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+
+ auto answer = callee->CreateAnswer();
+ ClearMids(answer.get());
+
+ EXPECT_TRUE(caller->SetRemoteDescription(std::move(answer)));
+}
+TEST_F(PeerConnectionJsepTest, LegacyNoMidAudioVideoAnswer) {
+ auto caller = CreatePeerConnection();
+ caller->AddAudioTrack("audio");
+ caller->AddVideoTrack("video");
+ auto callee = CreatePeerConnection();
+ callee->AddAudioTrack("audio");
+ callee->AddVideoTrack("video");
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+
+ auto answer = callee->CreateAnswer();
+ ClearMids(answer.get());
+
+ ASSERT_TRUE(caller->SetRemoteDescription(std::move(answer)));
+}
+
+// Test that negotiation works with legacy endpoints which do not support a=mid
+// when setting two remote descriptions without setting a local description in
+// between.
+TEST_F(PeerConnectionJsepTest, LegacyNoMidTwoRemoteOffers) {
+ auto caller = CreatePeerConnection();
+ caller->AddAudioTrack("audio");
+ auto callee = CreatePeerConnection();
+ callee->AddAudioTrack("audio");
+
+ auto offer = caller->CreateOffer();
+ ClearMids(offer.get());
+
+ ASSERT_TRUE(
+ callee->SetRemoteDescription(CloneSessionDescription(offer.get())));
+ ASSERT_TRUE(callee->SetRemoteDescription(std::move(offer)));
+ EXPECT_TRUE(callee->SetLocalDescription(callee->CreateAnswer()));
+}
+
+// Test that SetLocalDescription fails if a=mid lines are missing.
+TEST_F(PeerConnectionJsepTest, SetLocalDescriptionFailsMissingMid) {
+ auto caller = CreatePeerConnection();
+ caller->AddAudioTrack("audio");
+
+ auto offer = caller->CreateOffer();
+ ClearMids(offer.get());
+
+ std::string error;
+ ASSERT_FALSE(caller->SetLocalDescription(std::move(offer), &error));
+ EXPECT_EQ(
+ "Failed to set local offer sdp: A media section is missing a MID "
+ "attribute.",
+ error);
+}
+
+TEST_F(PeerConnectionJsepTest, RollbackSupportedInUnifiedPlan) {
+ RTCConfiguration config;
+ config.sdp_semantics = SdpSemantics::kUnifiedPlan;
+ config.enable_implicit_rollback = true;
+ auto caller = CreatePeerConnection(config);
+ auto callee = CreatePeerConnection(config);
+ EXPECT_TRUE(caller->CreateOfferAndSetAsLocal());
+ EXPECT_TRUE(caller->SetLocalDescription(caller->CreateRollback()));
+ EXPECT_TRUE(caller->CreateOfferAndSetAsLocal());
+ EXPECT_TRUE(caller->SetRemoteDescription(caller->CreateRollback()));
+ EXPECT_TRUE(caller->CreateOfferAndSetAsLocal());
+ EXPECT_TRUE(caller->SetRemoteDescription(callee->CreateOffer()));
+}
+
+TEST_F(PeerConnectionJsepTest, RollbackNotSupportedInPlanB) {
+ RTCConfiguration config;
+ config.sdp_semantics = SdpSemantics::kPlanB_DEPRECATED;
+ config.enable_implicit_rollback = true;
+ auto caller = CreatePeerConnection(config);
+ auto callee = CreatePeerConnection(config);
+ EXPECT_TRUE(caller->CreateOfferAndSetAsLocal());
+ EXPECT_FALSE(caller->SetLocalDescription(caller->CreateRollback()));
+ EXPECT_FALSE(caller->SetRemoteDescription(caller->CreateRollback()));
+ EXPECT_FALSE(caller->SetRemoteDescription(callee->CreateOffer()));
+}
+
+TEST_F(PeerConnectionJsepTest, RollbackFailsInStableState) {
+ auto caller = CreatePeerConnection();
+ EXPECT_FALSE(caller->SetLocalDescription(caller->CreateRollback()));
+ EXPECT_FALSE(caller->SetRemoteDescription(caller->CreateRollback()));
+}
+
+TEST_F(PeerConnectionJsepTest, RollbackToStableStateAndClearLocalOffer) {
+ auto caller = CreatePeerConnection();
+ EXPECT_TRUE(caller->CreateOfferAndSetAsLocal());
+ EXPECT_TRUE(caller->SetLocalDescription(caller->CreateRollback()));
+ EXPECT_EQ(caller->signaling_state(), PeerConnectionInterface::kStable);
+ EXPECT_EQ(caller->pc()->pending_local_description(), nullptr);
+
+ EXPECT_TRUE(caller->CreateOfferAndSetAsLocal());
+ EXPECT_TRUE(caller->SetRemoteDescription(caller->CreateRollback()));
+ EXPECT_EQ(caller->signaling_state(), PeerConnectionInterface::kStable);
+ EXPECT_EQ(caller->pc()->pending_local_description(), nullptr);
+}
+
+TEST_F(PeerConnectionJsepTest, RollbackToStableStateAndClearRemoteOffer) {
+ auto caller = CreatePeerConnection();
+ auto callee = CreatePeerConnection();
+ EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateOffer()));
+ EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateRollback()));
+ EXPECT_EQ(callee->signaling_state(), PeerConnectionInterface::kStable);
+ EXPECT_EQ(callee->pc()->pending_remote_description(), nullptr);
+
+ EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateOffer()));
+ EXPECT_TRUE(callee->SetLocalDescription(caller->CreateRollback()));
+ EXPECT_EQ(callee->signaling_state(), PeerConnectionInterface::kStable);
+ EXPECT_EQ(callee->pc()->pending_remote_description(), nullptr);
+}
+
+TEST_F(PeerConnectionJsepTest, RollbackImplicitly) {
+ RTCConfiguration config;
+ config.sdp_semantics = SdpSemantics::kUnifiedPlan;
+ config.enable_implicit_rollback = true;
+ auto caller = CreatePeerConnection(config);
+ auto callee = CreatePeerConnection(config);
+ EXPECT_TRUE(callee->CreateOfferAndSetAsLocal());
+ EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateOffer()));
+ EXPECT_EQ(callee->signaling_state(),
+ PeerConnectionInterface::kHaveRemoteOffer);
+ EXPECT_TRUE(callee->CreateAnswerAndSetAsLocal());
+ EXPECT_FALSE(callee->observer()->legacy_renegotiation_needed());
+ EXPECT_FALSE(callee->observer()->has_negotiation_needed_event());
+}
+
+TEST_F(PeerConnectionJsepTest, RollbackImplicitlyNegotatiationNotNeeded) {
+ RTCConfiguration config;
+ config.sdp_semantics = SdpSemantics::kUnifiedPlan;
+ config.enable_implicit_rollback = true;
+ auto caller = CreatePeerConnection(config);
+ auto callee = CreatePeerConnection(config);
+ caller->AddAudioTrack("a");
+ callee->AddAudioTrack("b");
+ EXPECT_TRUE(callee->CreateOfferAndSetAsLocal());
+ callee->observer()->clear_legacy_renegotiation_needed();
+ callee->observer()->clear_latest_negotiation_needed_event();
+ EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateOffer()));
+ EXPECT_EQ(callee->signaling_state(),
+ PeerConnectionInterface::kHaveRemoteOffer);
+ EXPECT_TRUE(callee->CreateAnswerAndSetAsLocal());
+ // No negotiation needed as track got attached in the answer.
+ EXPECT_FALSE(callee->observer()->legacy_renegotiation_needed());
+ EXPECT_FALSE(callee->observer()->has_negotiation_needed_event());
+ EXPECT_EQ(callee->observer()->remove_track_events_.size(), 0u);
+}
+
+TEST_F(PeerConnectionJsepTest, RollbackImplicitlyAndNegotiationNeeded) {
+ RTCConfiguration config;
+ config.sdp_semantics = SdpSemantics::kUnifiedPlan;
+ config.enable_implicit_rollback = true;
+ auto caller = CreatePeerConnection(config);
+ auto callee = CreatePeerConnection(config);
+ callee->AddAudioTrack("a");
+ EXPECT_TRUE(callee->CreateOfferAndSetAsLocal());
+ callee->observer()->clear_legacy_renegotiation_needed();
+ callee->observer()->clear_latest_negotiation_needed_event();
+ EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateOffer()));
+ EXPECT_EQ(callee->signaling_state(),
+ PeerConnectionInterface::kHaveRemoteOffer);
+ EXPECT_FALSE(callee->observer()->legacy_renegotiation_needed());
+ EXPECT_FALSE(callee->observer()->has_negotiation_needed_event());
+ EXPECT_TRUE(callee->CreateAnswerAndSetAsLocal());
+ EXPECT_TRUE(callee->observer()->legacy_renegotiation_needed());
+ EXPECT_TRUE(callee->observer()->has_negotiation_needed_event());
+ EXPECT_EQ(callee->observer()->remove_track_events_.size(), 0u);
+}
+
+TEST_F(PeerConnectionJsepTest, AttemptToRollbackImplicitly) {
+ RTCConfiguration config;
+ config.sdp_semantics = SdpSemantics::kUnifiedPlan;
+ config.enable_implicit_rollback = true;
+ auto caller = CreatePeerConnection(config);
+ auto callee = CreatePeerConnection(config);
+ EXPECT_TRUE(callee->CreateOfferAndSetAsLocal());
+ EXPECT_FALSE(callee->SetRemoteDescription(
+ CreateSessionDescription(SdpType::kOffer, "invalid sdp")));
+ EXPECT_EQ(callee->signaling_state(),
+ PeerConnectionInterface::kHaveLocalOffer);
+}
+
+TEST_F(PeerConnectionJsepTest, RollbackRemovesTransceiver) {
+ auto caller = CreatePeerConnection();
+ caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO);
+ auto callee = CreatePeerConnection();
+ EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateOffer()));
+ ASSERT_EQ(callee->pc()->GetTransceivers().size(), 1u);
+ auto transceiver = callee->pc()->GetTransceivers()[0];
+ EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateRollback()));
+ EXPECT_EQ(callee->pc()->GetTransceivers().size(), 0u);
+ EXPECT_EQ(callee->observer()->remove_track_events_.size(), 1u);
+ // The removed transceiver should be stopped and its receiver track should be
+ // ended.
+ EXPECT_TRUE(transceiver->stopping());
+ EXPECT_TRUE(transceiver->stopping());
+ EXPECT_EQ(transceiver->receiver()->track()->state(),
+ MediaStreamTrackInterface::kEnded);
+}
+
+TEST_F(PeerConnectionJsepTest, RollbackKeepsTransceiverAndClearsMid) {
+ auto caller = CreatePeerConnection();
+ caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO);
+ auto callee = CreatePeerConnection();
+ EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateOffer()));
+ callee->AddAudioTrack("a");
+ EXPECT_EQ(callee->pc()->GetTransceivers().size(), 1u);
+ EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateRollback()));
+ // Transceiver can't be removed as track was added to it.
+ EXPECT_EQ(callee->pc()->GetTransceivers().size(), 1u);
+ // Mid got cleared to make it reusable.
+ EXPECT_EQ(callee->pc()->GetTransceivers()[0]->mid(), absl::nullopt);
+ // Transceiver should be counted as addTrack-created after rollback.
+ EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateOffer()));
+ EXPECT_EQ(callee->pc()->GetTransceivers().size(), 1u);
+ EXPECT_EQ(callee->observer()->remove_track_events_.size(), 1u);
+ // Because the transceiver is reusable, it must not be stopped and its
+ // receiver track must still be live.
+ auto transceiver = callee->pc()->GetTransceivers()[0];
+ EXPECT_FALSE(transceiver->stopping());
+ EXPECT_FALSE(transceiver->stopping());
+ EXPECT_EQ(transceiver->receiver()->track()->state(),
+ MediaStreamTrackInterface::kLive);
+}
+
+TEST_F(PeerConnectionJsepTest,
+ RollbackKeepsTransceiverAfterAddTrackEvenWhenTrackIsNulled) {
+ auto caller = CreatePeerConnection();
+ caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO);
+ auto callee = CreatePeerConnection();
+ EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateOffer()));
+ callee->AddAudioTrack("a");
+ callee->pc()->GetTransceivers()[0]->sender()->SetTrack(nullptr);
+ EXPECT_EQ(callee->pc()->GetTransceivers()[0]->sender()->track(), nullptr);
+ EXPECT_EQ(callee->pc()->GetTransceivers().size(), 1u);
+ EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateRollback()));
+ // Transceiver can't be removed as track was added to it.
+ EXPECT_EQ(callee->pc()->GetTransceivers().size(), 1u);
+ // Mid got cleared to make it reusable.
+ EXPECT_EQ(callee->pc()->GetTransceivers()[0]->mid(), absl::nullopt);
+ // Transceiver should be counted as addTrack-created after rollback.
+ EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateOffer()));
+ EXPECT_EQ(callee->pc()->GetTransceivers().size(), 1u);
+ EXPECT_EQ(callee->observer()->remove_track_events_.size(), 1u);
+}
+
+TEST_F(PeerConnectionJsepTest, RollbackRestoresMid) {
+ auto caller = CreatePeerConnection();
+ caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO);
+ auto callee = CreatePeerConnection();
+ callee->AddAudioTrack("a");
+ auto offer = callee->CreateOffer();
+ EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateOffer()));
+ EXPECT_EQ(callee->pc()->GetTransceivers().size(), 1u);
+ EXPECT_NE(callee->pc()->GetTransceivers()[0]->mid(), absl::nullopt);
+ EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateRollback()));
+ EXPECT_EQ(callee->pc()->GetTransceivers()[0]->mid(), absl::nullopt);
+ EXPECT_TRUE(callee->SetLocalDescription(std::move(offer)));
+}
+
+TEST_F(PeerConnectionJsepTest, RollbackRestoresInitSendEncodings) {
+ auto caller = CreatePeerConnection();
+ RtpTransceiverInit init;
+ init.direction = RtpTransceiverDirection::kSendRecv;
+ RtpEncodingParameters encoding;
+ encoding.rid = "hi";
+ init.send_encodings.push_back(encoding);
+ encoding.rid = "mid";
+ init.send_encodings.push_back(encoding);
+ encoding.rid = "lo";
+ init.send_encodings.push_back(encoding);
+ caller->AddTransceiver(cricket::MEDIA_TYPE_VIDEO, init);
+ auto encodings =
+ caller->pc()->GetTransceivers()[0]->sender()->init_send_encodings();
+ EXPECT_TRUE(caller->SetLocalDescription(caller->CreateOffer()));
+ EXPECT_NE(caller->pc()->GetTransceivers()[0]->sender()->init_send_encodings(),
+ encodings);
+ EXPECT_TRUE(caller->SetLocalDescription(caller->CreateRollback()));
+ EXPECT_EQ(caller->pc()->GetTransceivers()[0]->sender()->init_send_encodings(),
+ encodings);
+}
+
+TEST_F(PeerConnectionJsepTest, RollbackDoesNotAffectSendEncodings) {
+ auto caller = CreatePeerConnection();
+ auto callee = CreatePeerConnection();
+ RtpTransceiverInit init;
+ init.direction = RtpTransceiverDirection::kSendOnly;
+ RtpEncodingParameters encoding;
+ encoding.rid = "hi";
+ init.send_encodings.push_back(encoding);
+ encoding.rid = "mid";
+ init.send_encodings.push_back(encoding);
+ encoding.rid = "lo";
+ init.send_encodings.push_back(encoding);
+ caller->AddTransceiver(cricket::MEDIA_TYPE_VIDEO, init);
+ callee->AddTransceiver(cricket::MEDIA_TYPE_VIDEO);
+ callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal());
+ caller->SetRemoteDescription(callee->CreateAnswerAndSetAsLocal());
+ auto params = caller->pc()->GetTransceivers()[0]->sender()->GetParameters();
+ EXPECT_TRUE(params.encodings[0].active);
+ params.encodings[0].active = false;
+ caller->pc()->GetTransceivers()[0]->sender()->SetParameters(params);
+ auto offer = caller->CreateOffer();
+ std::string offer_string;
+ EXPECT_TRUE(offer.get()->ToString(&offer_string));
+ std::string simulcast_line =
+ offer_string.substr(offer_string.find("a=simulcast"));
+ EXPECT_FALSE(simulcast_line.empty());
+ EXPECT_TRUE(caller->SetLocalDescription(std::move(offer)));
+ EXPECT_TRUE(caller->SetLocalDescription(caller->CreateRollback()));
+ EXPECT_FALSE(caller->pc()
+ ->GetTransceivers()[0]
+ ->sender()
+ ->GetParameters()
+ .encodings[0]
+ .active);
+ offer = caller->CreateOffer();
+ EXPECT_TRUE(offer.get()->ToString(&offer_string));
+ EXPECT_EQ(offer_string.substr(offer_string.find("a=simulcast")),
+ simulcast_line);
+}
+
+TEST_F(PeerConnectionJsepTest, RollbackRestoresMidAndRemovesTransceiver) {
+ auto callee = CreatePeerConnection();
+ callee->AddVideoTrack("a");
+ auto offer = callee->CreateOffer();
+ auto caller = CreatePeerConnection();
+ caller->AddAudioTrack("b");
+ caller->AddVideoTrack("c");
+ auto mid = callee->pc()->GetTransceivers()[0]->mid();
+ EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateOffer()));
+ EXPECT_EQ(callee->pc()->GetTransceivers().size(), 2u);
+ EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateRollback()));
+ EXPECT_EQ(callee->pc()->GetTransceivers().size(), 1u);
+ EXPECT_EQ(callee->pc()->GetTransceivers()[0]->mid(), mid);
+ EXPECT_EQ(callee->pc()->GetTransceivers()[0]->media_type(),
+ cricket::MEDIA_TYPE_VIDEO);
+ EXPECT_TRUE(callee->SetLocalDescription(std::move(offer)));
+ EXPECT_EQ(callee->observer()->remove_track_events_.size(),
+ callee->observer()->add_track_events_.size());
+}
+
+TEST_F(PeerConnectionJsepTest, RollbackHasNoEffectOnStableTransceivers) {
+ auto callee = CreatePeerConnection();
+ callee->AddVideoTrack("a");
+ auto caller = CreatePeerConnection();
+ caller->AddAudioTrack("b");
+ caller->AddVideoTrack("c");
+ EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+ EXPECT_TRUE(
+ caller->SetRemoteDescription(callee->CreateAnswerAndSetAsLocal()));
+ // In stable don't add or remove anything.
+ callee->observer()->clear_legacy_renegotiation_needed();
+ callee->observer()->clear_latest_negotiation_needed_event();
+ size_t transceiver_count = callee->pc()->GetTransceivers().size();
+ auto mid_0 = callee->pc()->GetTransceivers()[0]->mid();
+ auto mid_1 = callee->pc()->GetTransceivers()[1]->mid();
+ EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateOffer()));
+ EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateRollback()));
+ EXPECT_EQ(callee->pc()->GetTransceivers().size(), transceiver_count);
+ EXPECT_EQ(callee->pc()->GetTransceivers()[0]->mid(), mid_0);
+ EXPECT_EQ(callee->pc()->GetTransceivers()[1]->mid(), mid_1);
+ EXPECT_EQ(callee->observer()->remove_track_events_.size(), 0u);
+ EXPECT_FALSE(callee->observer()->legacy_renegotiation_needed());
+ EXPECT_FALSE(callee->observer()->has_negotiation_needed_event());
+}
+
+TEST_F(PeerConnectionJsepTest, ImplicitlyRollbackTransceiversWithSameMids) {
+ RTCConfiguration config;
+ config.sdp_semantics = SdpSemantics::kUnifiedPlan;
+ config.enable_implicit_rollback = true;
+ auto caller = CreatePeerConnection(config);
+ caller->AddTransceiver(cricket::MEDIA_TYPE_VIDEO);
+ auto callee = CreatePeerConnection(config);
+ callee->AddTransceiver(cricket::MEDIA_TYPE_VIDEO);
+ EXPECT_TRUE(callee->CreateOfferAndSetAsLocal());
+ auto initial_mid = callee->pc()->GetTransceivers()[0]->mid();
+ EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+ EXPECT_EQ(callee->pc()->GetTransceivers().size(), 2u);
+ EXPECT_EQ(callee->pc()->GetTransceivers()[0]->mid(), absl::nullopt);
+ EXPECT_EQ(callee->pc()->GetTransceivers()[1]->mid(),
+ caller->pc()->GetTransceivers()[0]->mid());
+ EXPECT_TRUE(callee->CreateAnswerAndSetAsLocal()); // Go to stable.
+ EXPECT_TRUE(callee->CreateOfferAndSetAsLocal());
+ EXPECT_NE(callee->pc()->GetTransceivers()[0]->mid(), initial_mid);
+}
+
+TEST_F(PeerConnectionJsepTest, RollbackToNegotiatedStableState) {
+ RTCConfiguration config;
+ config.sdp_semantics = SdpSemantics::kUnifiedPlan;
+ config.bundle_policy = PeerConnectionInterface::kBundlePolicyMaxBundle;
+ auto caller = CreatePeerConnection(config);
+ caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO);
+ auto callee = CreatePeerConnection(config);
+ EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+ EXPECT_TRUE(callee->CreateAnswerAndSetAsLocal());
+ caller->AddVideoTrack("a");
+ callee->AddVideoTrack("b");
+ EXPECT_TRUE(callee->CreateOfferAndSetAsLocal());
+ EXPECT_EQ(callee->pc()->GetTransceivers().size(), 2u);
+ auto audio_transport =
+ callee->pc()->GetTransceivers()[0]->sender()->dtls_transport();
+ EXPECT_EQ(callee->pc()->GetTransceivers()[0]->sender()->dtls_transport(),
+ callee->pc()->GetTransceivers()[1]->sender()->dtls_transport());
+ EXPECT_NE(callee->pc()->GetTransceivers()[1]->sender()->dtls_transport(),
+ nullptr);
+ EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateRollback()));
+ EXPECT_EQ(callee->pc()->GetTransceivers()[0]->sender()->dtls_transport(),
+ audio_transport); // Audio must remain working after rollback.
+ EXPECT_EQ(callee->pc()->GetTransceivers()[1]->sender()->dtls_transport(),
+ nullptr);
+ EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+
+ EXPECT_EQ(callee->pc()->GetTransceivers()[0]->sender()->dtls_transport(),
+ audio_transport); // Audio transport is still the same.
+}
+
+TEST_F(PeerConnectionJsepTest, RollbackHasToDestroyTransport) {
+ RTCConfiguration config;
+ config.sdp_semantics = SdpSemantics::kUnifiedPlan;
+ config.bundle_policy = PeerConnectionInterface::kBundlePolicyMaxBundle;
+ auto pc = CreatePeerConnection(config);
+ pc->AddAudioTrack("a");
+ pc->AddVideoTrack("b");
+ EXPECT_TRUE(pc->CreateOfferAndSetAsLocal());
+ auto offer = pc->CreateOffer();
+ EXPECT_EQ(pc->pc()->GetTransceivers().size(), 2u);
+ auto audio_transport =
+ pc->pc()->GetTransceivers()[0]->sender()->dtls_transport();
+ EXPECT_EQ(pc->pc()->GetTransceivers()[0]->sender()->dtls_transport(),
+ pc->pc()->GetTransceivers()[1]->sender()->dtls_transport());
+ EXPECT_NE(pc->pc()->GetTransceivers()[1]->sender()->dtls_transport(),
+ nullptr);
+ EXPECT_TRUE(pc->SetRemoteDescription(pc->CreateRollback()));
+ EXPECT_TRUE(pc->SetLocalDescription(std::move(offer)));
+ EXPECT_NE(pc->pc()->GetTransceivers()[0]->sender()->dtls_transport(),
+ nullptr);
+ EXPECT_NE(pc->pc()->GetTransceivers()[0]->sender()->dtls_transport(),
+ audio_transport);
+}
+
+TEST_F(PeerConnectionJsepTest, RollbackLocalDirectionChange) {
+ auto caller = CreatePeerConnection();
+ caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO);
+ auto callee = CreatePeerConnection();
+ EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+ EXPECT_TRUE(
+ caller->SetRemoteDescription(callee->CreateAnswerAndSetAsLocal()));
+ callee->AddAudioTrack("a");
+ callee->pc()->GetTransceivers()[0]->SetDirectionWithError(
+ RtpTransceiverDirection::kSendOnly);
+ EXPECT_TRUE(callee->CreateOfferAndSetAsLocal());
+ EXPECT_EQ(callee->pc()->GetTransceivers().size(), 1u);
+ auto audio_transport =
+ callee->pc()->GetTransceivers()[0]->receiver()->dtls_transport();
+ EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateRollback()));
+ EXPECT_EQ(callee->pc()->GetTransceivers()[0]->direction(),
+ RtpTransceiverDirection::kSendOnly);
+ // One way audio must remain working after rollback as local direction change
+ // comes in effect after completing full negotiation round.
+ EXPECT_EQ(callee->pc()->GetTransceivers()[0]->receiver()->dtls_transport(),
+ audio_transport);
+}
+
+TEST_F(PeerConnectionJsepTest, RollbackRemoteDirectionChange) {
+ auto caller = CreatePeerConnection();
+ auto caller_transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO);
+ auto callee = CreatePeerConnection();
+ callee->AddAudioTrack("a");
+ EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+ EXPECT_TRUE(
+ caller->SetRemoteDescription(callee->CreateAnswerAndSetAsLocal()));
+ // In stable make remote audio receive only.
+ caller_transceiver->SetDirectionWithError(RtpTransceiverDirection::kRecvOnly);
+ EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+ EXPECT_EQ(callee->pc()->GetTransceivers().size(), 1u);
+ // The direction attribute is not modified by the offer.
+ EXPECT_EQ(callee->pc()->GetTransceivers()[0]->direction(),
+ RtpTransceiverDirection::kSendRecv);
+ auto audio_transport =
+ callee->pc()->GetTransceivers()[0]->sender()->dtls_transport();
+ EXPECT_EQ(callee->observer()->add_track_events_.size(), 1u);
+ EXPECT_EQ(callee->observer()->remove_track_events_.size(), 1u);
+ EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateRollback()));
+ EXPECT_EQ(callee->pc()->GetTransceivers().size(), 1u);
+ EXPECT_EQ(callee->pc()->GetTransceivers()[0]->direction(),
+ RtpTransceiverDirection::kSendRecv);
+ // One way audio must remain working after rollback.
+ EXPECT_EQ(callee->pc()->GetTransceivers()[0]->sender()->dtls_transport(),
+ audio_transport);
+ EXPECT_EQ(callee->observer()->remove_track_events_.size(), 1u);
+}
+
+TEST_F(PeerConnectionJsepTest,
+ RollbackRestoresFiredDirectionAndOnTrackCanFireAgain) {
+ auto caller = CreatePeerConnection();
+ auto caller_transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO);
+ auto callee = CreatePeerConnection();
+ callee->AddAudioTrack("a");
+ ASSERT_EQ(callee->pc()->GetTransceivers().size(), 1u);
+ auto callee_transceiver = callee->pc()->GetTransceivers()[0];
+ EXPECT_FALSE(callee_transceiver->fired_direction().has_value());
+ EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+ EXPECT_TRUE(callee_transceiver->fired_direction().has_value());
+ EXPECT_EQ(callee->observer()->add_track_events_.size(), 1u);
+ // The existing transceiver becomes associated. Because it already exists,
+ // rolling it back does not remove the transceiver, so if ontrack fires again
+ // later it will be because the transceiver's internal states were restored
+ // rather than due to the creation of a new transceiver.
+ EXPECT_EQ(callee->pc()->GetTransceivers().size(), 1u);
+
+ // Rollback: the transceiver is no longer receiving.
+ EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateRollback()));
+ EXPECT_FALSE(callee_transceiver->fired_direction().has_value());
+ EXPECT_EQ(callee->pc()->GetTransceivers().size(), 1u);
+
+ // Set the remote offer again: ontrack should fire on the same transceiver.
+ EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+ EXPECT_TRUE(callee_transceiver->fired_direction().has_value());
+ EXPECT_EQ(callee->observer()->add_track_events_.size(), 2u);
+ EXPECT_EQ(callee->pc()->GetTransceivers().size(), 1u);
+}
+
+TEST_F(PeerConnectionJsepTest,
+ RollbackFromInactiveToReceivingMakesOnTrackFire) {
+ auto caller = CreatePeerConnection();
+ auto caller_transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO);
+ auto callee = CreatePeerConnection();
+ // Perform full O/A so that transceiver is associated. Ontrack fires.
+ EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+ EXPECT_EQ(callee->observer()->add_track_events_.size(), 1u);
+ EXPECT_EQ(callee->observer()->remove_track_events_.size(), 0u);
+ ASSERT_TRUE(
+ caller->SetRemoteDescription(callee->CreateAnswerAndSetAsLocal()));
+
+ // Start negotiating to make the transceiver inactive. Onremovetrack fires.
+ caller_transceiver->SetDirectionWithError(RtpTransceiverDirection::kInactive);
+ EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+ EXPECT_EQ(callee->observer()->add_track_events_.size(), 1u);
+ EXPECT_EQ(callee->observer()->remove_track_events_.size(), 1u);
+
+ // Rollback the inactivation. Ontrack should fire again.
+ EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateRollback()));
+ EXPECT_EQ(callee->observer()->add_track_events_.size(), 2u);
+ EXPECT_EQ(callee->observer()->remove_track_events_.size(), 1u);
+}
+
+TEST_F(PeerConnectionJsepTest, RollbackAfterMultipleSLD) {
+ auto callee = CreatePeerConnection();
+ callee->AddTransceiver(cricket::MEDIA_TYPE_AUDIO);
+ EXPECT_TRUE(callee->CreateOfferAndSetAsLocal());
+ callee->AddTransceiver(cricket::MEDIA_TYPE_VIDEO);
+ EXPECT_TRUE(callee->CreateOfferAndSetAsLocal());
+ callee->observer()->clear_legacy_renegotiation_needed();
+ callee->observer()->clear_latest_negotiation_needed_event();
+ EXPECT_TRUE(callee->SetRemoteDescription(callee->CreateRollback()));
+ EXPECT_TRUE(callee->observer()->legacy_renegotiation_needed());
+ EXPECT_TRUE(callee->observer()->has_negotiation_needed_event());
+ EXPECT_EQ(callee->pc()->GetTransceivers().size(), 2u);
+ EXPECT_EQ(callee->pc()->GetTransceivers()[0]->mid(), absl::nullopt);
+ EXPECT_EQ(callee->pc()->GetTransceivers()[1]->mid(), absl::nullopt);
+}
+
+TEST_F(PeerConnectionJsepTest, NoRollbackNeeded) {
+ auto caller = CreatePeerConnection();
+ caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO);
+ auto callee = CreatePeerConnection();
+ callee->AddTransceiver(cricket::MEDIA_TYPE_AUDIO);
+ EXPECT_TRUE(caller->CreateOfferAndSetAsLocal());
+ EXPECT_TRUE(caller->CreateOfferAndSetAsLocal());
+ EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateOffer()));
+ EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateOffer()));
+}
+
+TEST_F(PeerConnectionJsepTest, RollbackMultipleStreamChanges) {
+ auto callee = CreatePeerConnection();
+ auto caller = CreatePeerConnection();
+ caller->AddAudioTrack("a_1", {"id_1"});
+ caller->AddVideoTrack("v_0", {"id_0"}); // Provide an extra stream id.
+ EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+ EXPECT_TRUE(
+ caller->SetRemoteDescription(callee->CreateAnswerAndSetAsLocal()));
+ caller->pc()->GetTransceivers()[0]->sender()->SetStreams({"id_2"});
+ EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+ caller->pc()->GetTransceivers()[0]->sender()->SetStreams({"id_3"});
+ EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+ EXPECT_EQ(callee->pc()->GetTransceivers()[0]->receiver()->stream_ids()[0],
+ "id_3");
+ EXPECT_TRUE(callee->SetRemoteDescription(callee->CreateRollback()));
+ EXPECT_EQ(callee->pc()->GetTransceivers()[0]->receiver()->stream_ids().size(),
+ 1u);
+ EXPECT_EQ(callee->pc()->GetTransceivers()[0]->receiver()->stream_ids()[0],
+ "id_1");
+}
+
+TEST_F(PeerConnectionJsepTest, DataChannelImplicitRollback) {
+ RTCConfiguration config;
+ config.sdp_semantics = SdpSemantics::kUnifiedPlan;
+ config.enable_implicit_rollback = true;
+ auto caller = CreatePeerConnection(config);
+ caller->AddTransceiver(cricket::MEDIA_TYPE_VIDEO);
+ auto callee = CreatePeerConnection(config);
+ callee->CreateDataChannel("dummy");
+ EXPECT_TRUE(callee->CreateOfferAndSetAsLocal());
+ EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateOffer()));
+ EXPECT_TRUE(callee->CreateAnswerAndSetAsLocal());
+ EXPECT_TRUE(callee->observer()->legacy_renegotiation_needed());
+ EXPECT_TRUE(callee->observer()->has_negotiation_needed_event());
+ EXPECT_TRUE(callee->CreateOfferAndSetAsLocal());
+}
+
+TEST_F(PeerConnectionJsepTest, RollbackRemoteDataChannelThenAddTransceiver) {
+ auto caller = CreatePeerConnection();
+ auto callee = CreatePeerConnection();
+ caller->CreateDataChannel("dummy");
+ EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateOffer()));
+ EXPECT_TRUE(callee->SetRemoteDescription(callee->CreateRollback()));
+ callee->AddTransceiver(cricket::MEDIA_TYPE_VIDEO);
+ EXPECT_TRUE(callee->CreateOfferAndSetAsLocal());
+}
+
+TEST_F(PeerConnectionJsepTest,
+ RollbackRemoteDataChannelThenAddTransceiverAndDataChannel) {
+ auto caller = CreatePeerConnection();
+ auto callee = CreatePeerConnection();
+ caller->CreateDataChannel("dummy");
+ EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateOffer()));
+ EXPECT_TRUE(callee->SetRemoteDescription(callee->CreateRollback()));
+ callee->AddTransceiver(cricket::MEDIA_TYPE_VIDEO);
+ callee->CreateDataChannel("dummy");
+ EXPECT_TRUE(callee->CreateOfferAndSetAsLocal());
+}
+
+TEST_F(PeerConnectionJsepTest, RollbackRemoteDataChannelThenAddDataChannel) {
+ auto caller = CreatePeerConnection();
+ auto callee = CreatePeerConnection();
+ caller->CreateDataChannel("dummy");
+ EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateOffer()));
+ EXPECT_TRUE(callee->SetRemoteDescription(callee->CreateRollback()));
+ callee->CreateDataChannel("dummy");
+ EXPECT_TRUE(callee->CreateOfferAndSetAsLocal());
+}
+
+TEST_F(PeerConnectionJsepTest, RollbackRemoteTransceiverThenAddDataChannel) {
+ auto caller = CreatePeerConnection();
+ auto callee = CreatePeerConnection();
+ caller->AddTransceiver(cricket::MEDIA_TYPE_VIDEO);
+ EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateOffer()));
+ EXPECT_TRUE(callee->SetRemoteDescription(callee->CreateRollback()));
+ callee->CreateDataChannel("dummy");
+ EXPECT_TRUE(callee->CreateOfferAndSetAsLocal());
+}
+
+TEST_F(PeerConnectionJsepTest,
+ RollbackRemoteTransceiverThenAddDataChannelAndTransceiver) {
+ auto caller = CreatePeerConnection();
+ auto callee = CreatePeerConnection();
+ caller->AddTransceiver(cricket::MEDIA_TYPE_VIDEO);
+ EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateOffer()));
+ EXPECT_TRUE(callee->SetRemoteDescription(callee->CreateRollback()));
+ callee->CreateDataChannel("dummy");
+ callee->AddTransceiver(cricket::MEDIA_TYPE_VIDEO);
+ EXPECT_TRUE(callee->CreateOfferAndSetAsLocal());
+}
+
+TEST_F(PeerConnectionJsepTest, BundleOnlySectionDoesNotNeedRtcpMux) {
+ auto caller = CreatePeerConnection();
+ auto callee = CreatePeerConnection();
+ caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO);
+ caller->AddTransceiver(cricket::MEDIA_TYPE_VIDEO);
+ auto offer = caller->CreateOffer();
+ // Remove rtcp-mux and set bundle-only on the second content.
+ offer->description()->contents()[1].media_description()->set_rtcp_mux(false);
+ offer->description()->contents()[1].bundle_only = true;
+
+ EXPECT_TRUE(callee->SetRemoteDescription(std::move(offer)));
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/peer_connection_media_unittest.cc b/third_party/libwebrtc/pc/peer_connection_media_unittest.cc
new file mode 100644
index 0000000000..485541981e
--- /dev/null
+++ b/third_party/libwebrtc/pc/peer_connection_media_unittest.cc
@@ -0,0 +1,2137 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This file contains tests that check the interaction between the
+// PeerConnection and the underlying media engine, as well as tests that check
+// the media-related aspects of SDP.
+
+#include <algorithm>
+#include <functional>
+#include <iterator>
+#include <map>
+#include <memory>
+#include <set>
+#include <string>
+#include <tuple>
+#include <type_traits>
+#include <utility>
+#include <vector>
+
+#include "absl/algorithm/container.h"
+#include "absl/types/optional.h"
+#include "api/audio_options.h"
+#include "api/call/call_factory_interface.h"
+#include "api/jsep.h"
+#include "api/media_types.h"
+#include "api/peer_connection_interface.h"
+#include "api/rtc_error.h"
+#include "api/rtc_event_log/rtc_event_log_factory.h"
+#include "api/rtc_event_log/rtc_event_log_factory_interface.h"
+#include "api/rtp_parameters.h"
+#include "api/rtp_sender_interface.h"
+#include "api/rtp_transceiver_direction.h"
+#include "api/rtp_transceiver_interface.h"
+#include "api/scoped_refptr.h"
+#include "api/task_queue/default_task_queue_factory.h"
+#include "api/task_queue/task_queue_factory.h"
+#include "media/base/codec.h"
+#include "media/base/fake_media_engine.h"
+#include "media/base/media_channel.h"
+#include "media/base/media_constants.h"
+#include "media/base/media_engine.h"
+#include "media/base/stream_params.h"
+#include "p2p/base/fake_port_allocator.h"
+#include "p2p/base/p2p_constants.h"
+#include "p2p/base/port_allocator.h"
+#include "p2p/base/transport_info.h"
+#include "pc/channel_interface.h"
+#include "pc/media_session.h"
+#include "pc/peer_connection_wrapper.h"
+#include "pc/rtp_media_utils.h"
+#include "pc/rtp_transceiver.h"
+#include "pc/session_description.h"
+#include "pc/test/mock_peer_connection_observers.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/rtc_certificate_generator.h"
+#include "rtc_base/thread.h"
+#include "test/gtest.h"
+#include "test/scoped_key_value_config.h"
+#ifdef WEBRTC_ANDROID
+#include "pc/test/android_test_initializer.h"
+#endif
+#include "rtc_base/gunit.h"
+#include "rtc_base/virtual_socket_server.h"
+#include "test/gmock.h"
+
+namespace webrtc {
+
+using cricket::FakeMediaEngine;
+using RTCConfiguration = PeerConnectionInterface::RTCConfiguration;
+using RTCOfferAnswerOptions = PeerConnectionInterface::RTCOfferAnswerOptions;
+using ::testing::Bool;
+using ::testing::Combine;
+using ::testing::ElementsAre;
+using ::testing::NotNull;
+using ::testing::Values;
+
+cricket::MediaSendChannelInterface* SendChannelInternal(
+ rtc::scoped_refptr<webrtc::RtpTransceiverInterface> transceiver) {
+ auto transceiver_with_internal = static_cast<rtc::RefCountedObject<
+ webrtc::RtpTransceiverProxyWithInternal<webrtc::RtpTransceiver>>*>(
+ transceiver.get());
+ auto transceiver_internal =
+ static_cast<RtpTransceiver*>(transceiver_with_internal->internal());
+ return transceiver_internal->channel()->media_send_channel();
+}
+
+cricket::MediaReceiveChannelInterface* ReceiveChannelInternal(
+ rtc::scoped_refptr<webrtc::RtpTransceiverInterface> transceiver) {
+ auto transceiver_with_internal = static_cast<rtc::RefCountedObject<
+ webrtc::RtpTransceiverProxyWithInternal<webrtc::RtpTransceiver>>*>(
+ transceiver.get());
+ auto transceiver_internal =
+ static_cast<RtpTransceiver*>(transceiver_with_internal->internal());
+ return transceiver_internal->channel()->media_receive_channel();
+}
+
+cricket::FakeVideoMediaSendChannel* VideoMediaSendChannel(
+ rtc::scoped_refptr<webrtc::RtpTransceiverInterface> transceiver) {
+ return static_cast<cricket::FakeVideoMediaSendChannel*>(
+ SendChannelInternal(transceiver));
+}
+cricket::FakeVideoMediaReceiveChannel* VideoMediaReceiveChannel(
+ rtc::scoped_refptr<webrtc::RtpTransceiverInterface> transceiver) {
+ return static_cast<cricket::FakeVideoMediaReceiveChannel*>(
+ ReceiveChannelInternal(transceiver));
+}
+cricket::FakeVoiceMediaSendChannel* VoiceMediaSendChannel(
+ rtc::scoped_refptr<webrtc::RtpTransceiverInterface> transceiver) {
+ return static_cast<cricket::FakeVoiceMediaSendChannel*>(
+ SendChannelInternal(transceiver));
+}
+cricket::FakeVoiceMediaReceiveChannel* VoiceMediaReceiveChannel(
+ rtc::scoped_refptr<webrtc::RtpTransceiverInterface> transceiver) {
+ return static_cast<cricket::FakeVoiceMediaReceiveChannel*>(
+ ReceiveChannelInternal(transceiver));
+}
+
+class PeerConnectionWrapperForMediaTest : public PeerConnectionWrapper {
+ public:
+ using PeerConnectionWrapper::PeerConnectionWrapper;
+
+ FakeMediaEngine* media_engine() { return media_engine_; }
+ void set_media_engine(FakeMediaEngine* media_engine) {
+ media_engine_ = media_engine;
+ }
+
+ private:
+ FakeMediaEngine* media_engine_;
+};
+
+class PeerConnectionMediaBaseTest : public ::testing::Test {
+ protected:
+ typedef std::unique_ptr<PeerConnectionWrapperForMediaTest> WrapperPtr;
+
+ explicit PeerConnectionMediaBaseTest(SdpSemantics sdp_semantics)
+ : vss_(new rtc::VirtualSocketServer()),
+ main_(vss_.get()),
+ sdp_semantics_(sdp_semantics) {
+#ifdef WEBRTC_ANDROID
+ InitializeAndroidObjects();
+#endif
+ }
+
+ WrapperPtr CreatePeerConnection() {
+ return CreatePeerConnection(RTCConfiguration());
+ }
+
+ WrapperPtr CreatePeerConnection(const RTCConfiguration& config) {
+ return CreatePeerConnection(config, std::make_unique<FakeMediaEngine>());
+ }
+
+ WrapperPtr CreatePeerConnection(
+ std::unique_ptr<FakeMediaEngine> media_engine) {
+ return CreatePeerConnection(RTCConfiguration(), std::move(media_engine));
+ }
+
+ // Creates PeerConnectionFactory and PeerConnection for given configuration.
+ WrapperPtr CreatePeerConnection(
+ const RTCConfiguration& config,
+ std::unique_ptr<FakeMediaEngine> media_engine) {
+ auto* media_engine_ptr = media_engine.get();
+
+ PeerConnectionFactoryDependencies factory_dependencies;
+
+ factory_dependencies.network_thread = rtc::Thread::Current();
+ factory_dependencies.worker_thread = rtc::Thread::Current();
+ factory_dependencies.signaling_thread = rtc::Thread::Current();
+ factory_dependencies.task_queue_factory = CreateDefaultTaskQueueFactory();
+ factory_dependencies.media_engine = std::move(media_engine);
+ factory_dependencies.call_factory = CreateCallFactory();
+ factory_dependencies.event_log_factory =
+ std::make_unique<RtcEventLogFactory>(
+ factory_dependencies.task_queue_factory.get());
+
+ auto pc_factory =
+ CreateModularPeerConnectionFactory(std::move(factory_dependencies));
+
+ auto fake_port_allocator = std::make_unique<cricket::FakePortAllocator>(
+ rtc::Thread::Current(),
+ std::make_unique<rtc::BasicPacketSocketFactory>(vss_.get()),
+ &field_trials_);
+ auto observer = std::make_unique<MockPeerConnectionObserver>();
+ auto modified_config = config;
+ modified_config.sdp_semantics = sdp_semantics_;
+ PeerConnectionDependencies pc_dependencies(observer.get());
+ pc_dependencies.allocator = std::move(fake_port_allocator);
+ auto result = pc_factory->CreatePeerConnectionOrError(
+ modified_config, std::move(pc_dependencies));
+ if (!result.ok()) {
+ return nullptr;
+ }
+
+ auto pc = result.MoveValue();
+ observer->SetPeerConnectionInterface(pc.get());
+ auto wrapper = std::make_unique<PeerConnectionWrapperForMediaTest>(
+ pc_factory, pc, std::move(observer));
+ wrapper->set_media_engine(media_engine_ptr);
+ return wrapper;
+ }
+
+ // Accepts the same arguments as CreatePeerConnection and adds default audio
+ // track (but no video).
+ template <typename... Args>
+ WrapperPtr CreatePeerConnectionWithAudio(Args&&... args) {
+ auto wrapper = CreatePeerConnection(std::forward<Args>(args)...);
+ if (!wrapper) {
+ return nullptr;
+ }
+ wrapper->AddAudioTrack("a");
+ return wrapper;
+ }
+
+ // Accepts the same arguments as CreatePeerConnection and adds default video
+ // track (but no audio).
+ template <typename... Args>
+ WrapperPtr CreatePeerConnectionWithVideo(Args&&... args) {
+ auto wrapper = CreatePeerConnection(std::forward<Args>(args)...);
+ if (!wrapper) {
+ return nullptr;
+ }
+ wrapper->AddVideoTrack("v");
+ return wrapper;
+ }
+
+ // Accepts the same arguments as CreatePeerConnection and adds default audio
+ // and video tracks.
+ template <typename... Args>
+ WrapperPtr CreatePeerConnectionWithAudioVideo(Args&&... args) {
+ auto wrapper = CreatePeerConnection(std::forward<Args>(args)...);
+ if (!wrapper) {
+ return nullptr;
+ }
+ wrapper->AddAudioTrack("a");
+ wrapper->AddVideoTrack("v");
+ return wrapper;
+ }
+
+ RtpTransceiverDirection GetMediaContentDirection(
+ const SessionDescriptionInterface* sdesc,
+ cricket::MediaType media_type) {
+ auto* content =
+ cricket::GetFirstMediaContent(sdesc->description(), media_type);
+ RTC_DCHECK(content);
+ return content->media_description()->direction();
+ }
+
+ bool IsUnifiedPlan() const {
+ return sdp_semantics_ == SdpSemantics::kUnifiedPlan;
+ }
+
+ webrtc::test::ScopedKeyValueConfig field_trials_;
+ std::unique_ptr<rtc::VirtualSocketServer> vss_;
+ rtc::AutoSocketServerThread main_;
+ const SdpSemantics sdp_semantics_;
+};
+
+class PeerConnectionMediaTest
+ : public PeerConnectionMediaBaseTest,
+ public ::testing::WithParamInterface<SdpSemantics> {
+ protected:
+ PeerConnectionMediaTest() : PeerConnectionMediaBaseTest(GetParam()) {}
+};
+
+class PeerConnectionMediaTestUnifiedPlan : public PeerConnectionMediaBaseTest {
+ protected:
+ PeerConnectionMediaTestUnifiedPlan()
+ : PeerConnectionMediaBaseTest(SdpSemantics::kUnifiedPlan) {}
+};
+
+class PeerConnectionMediaTestPlanB : public PeerConnectionMediaBaseTest {
+ protected:
+ PeerConnectionMediaTestPlanB()
+ : PeerConnectionMediaBaseTest(SdpSemantics::kPlanB_DEPRECATED) {}
+};
+
+TEST_P(PeerConnectionMediaTest,
+ FailToSetRemoteDescriptionIfCreateMediaChannelFails) {
+ auto caller = CreatePeerConnectionWithAudioVideo();
+ auto callee = CreatePeerConnectionWithAudioVideo();
+ callee->media_engine()->set_fail_create_channel(true);
+
+ std::string error;
+ ASSERT_FALSE(callee->SetRemoteDescription(caller->CreateOffer(), &error));
+ EXPECT_PRED_FORMAT2(AssertStartsWith, error,
+ "Failed to set remote offer sdp: Failed to create");
+}
+
+TEST_P(PeerConnectionMediaTest,
+ FailToSetLocalDescriptionIfCreateMediaChannelFails) {
+ auto caller = CreatePeerConnectionWithAudioVideo();
+ caller->media_engine()->set_fail_create_channel(true);
+
+ std::string error;
+ ASSERT_FALSE(caller->SetLocalDescription(caller->CreateOffer(), &error));
+ EXPECT_PRED_FORMAT2(AssertStartsWith, error,
+ "Failed to set local offer sdp: Failed to create");
+}
+
+std::vector<std::string> GetIds(
+ const std::vector<cricket::StreamParams>& streams) {
+ std::vector<std::string> ids;
+ ids.reserve(streams.size());
+ for (const auto& stream : streams) {
+ ids.push_back(stream.id);
+ }
+ return ids;
+}
+
+// Test that exchanging an offer and answer with each side having an audio and
+// video stream creates the appropriate send/recv streams in the underlying
+// media engine on both sides.
+TEST_F(PeerConnectionMediaTestUnifiedPlan,
+ AudioVideoOfferAnswerCreateSendRecvStreams) {
+ const std::string kCallerAudioId = "caller_a";
+ const std::string kCallerVideoId = "caller_v";
+ const std::string kCalleeAudioId = "callee_a";
+ const std::string kCalleeVideoId = "callee_v";
+
+ auto caller = CreatePeerConnection();
+ caller->AddAudioTrack(kCallerAudioId);
+ caller->AddVideoTrack(kCallerVideoId);
+
+ auto callee = CreatePeerConnection();
+ callee->AddAudioTrack(kCalleeAudioId);
+ callee->AddVideoTrack(kCalleeVideoId);
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+ ASSERT_TRUE(
+ caller->SetRemoteDescription(callee->CreateAnswerAndSetAsLocal()));
+
+ auto* caller_voice_send =
+ VoiceMediaSendChannel(caller->pc()->GetTransceivers()[0]);
+ auto* caller_voice_receive =
+ VoiceMediaReceiveChannel(caller->pc()->GetTransceivers()[0]);
+ EXPECT_THAT(GetIds(caller_voice_receive->recv_streams()),
+ ElementsAre(kCalleeAudioId));
+ EXPECT_THAT(GetIds(caller_voice_send->send_streams()),
+ ElementsAre(kCallerAudioId));
+
+ auto* caller_video_send =
+ VideoMediaSendChannel(caller->pc()->GetTransceivers()[1]);
+ auto* caller_video_receive =
+ VideoMediaReceiveChannel(caller->pc()->GetTransceivers()[1]);
+ EXPECT_THAT(GetIds(caller_video_receive->recv_streams()),
+ ElementsAre(kCalleeVideoId));
+ EXPECT_THAT(GetIds(caller_video_send->send_streams()),
+ ElementsAre(kCallerVideoId));
+
+ auto* callee_voice_send =
+ VoiceMediaSendChannel(callee->pc()->GetTransceivers()[0]);
+ auto* callee_voice_receive =
+ VoiceMediaReceiveChannel(callee->pc()->GetTransceivers()[0]);
+ EXPECT_THAT(GetIds(callee_voice_receive->recv_streams()),
+ ElementsAre(kCallerAudioId));
+ EXPECT_THAT(GetIds(callee_voice_send->send_streams()),
+ ElementsAre(kCalleeAudioId));
+
+ auto* callee_video_send =
+ VideoMediaSendChannel(callee->pc()->GetTransceivers()[1]);
+ auto* callee_video_receive =
+ VideoMediaReceiveChannel(callee->pc()->GetTransceivers()[1]);
+ EXPECT_THAT(GetIds(callee_video_receive->recv_streams()),
+ ElementsAre(kCallerVideoId));
+ EXPECT_THAT(GetIds(callee_video_send->send_streams()),
+ ElementsAre(kCalleeVideoId));
+}
+
+// Test that stopping the caller transceivers causes the media channels on the
+// callee to be destroyed after calling SetRemoteDescription on the generated
+// offer.
+// See next test for equivalent behavior with Plan B semantics.
+TEST_F(PeerConnectionMediaTestUnifiedPlan,
+ StoppedRemoteTransceiversRemovesMediaChannels) {
+ auto caller = CreatePeerConnectionWithAudioVideo();
+ auto callee = CreatePeerConnection();
+
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+
+ // Stop both audio and video transceivers on the caller.
+ auto transceivers = caller->pc()->GetTransceivers();
+ ASSERT_EQ(2u, transceivers.size());
+ transceivers[0]->StopInternal();
+ transceivers[1]->StopInternal();
+
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+
+ ASSERT_TRUE(callee->pc()->GetTransceivers().empty());
+}
+
+// Test that removing streams from a subsequent offer causes the receive streams
+// on the callee to be removed.
+// See previous test for equivalent behavior with Unified Plan semantics.
+TEST_F(PeerConnectionMediaTestPlanB, EmptyRemoteOfferRemovesRecvStreams) {
+ auto caller = CreatePeerConnection();
+ auto caller_audio_track = caller->AddAudioTrack("a");
+ auto caller_video_track = caller->AddVideoTrack("v");
+ auto callee = CreatePeerConnectionWithAudioVideo();
+
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+
+ // Remove both tracks from caller.
+ caller->pc()->RemoveTrackOrError(caller_audio_track);
+ caller->pc()->RemoveTrackOrError(caller_video_track);
+
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+
+ EXPECT_TRUE(callee->pc()->GetReceivers().empty());
+ EXPECT_EQ(2u, callee->pc()->GetSenders().size());
+}
+
+// Test enabling of simulcast with Plan B semantics.
+// This test creating an offer.
+TEST_F(PeerConnectionMediaTestPlanB, SimulcastOffer) {
+ auto caller = CreatePeerConnection();
+ auto caller_video_track = caller->AddVideoTrack("v");
+ RTCOfferAnswerOptions options;
+ options.num_simulcast_layers = 3;
+ auto offer = caller->CreateOffer(options);
+ auto* description = cricket::GetFirstMediaContent(offer->description(),
+ cricket::MEDIA_TYPE_VIDEO)
+ ->media_description();
+ ASSERT_EQ(1u, description->streams().size());
+ ASSERT_TRUE(description->streams()[0].get_ssrc_group("SIM"));
+ EXPECT_EQ(3u, description->streams()[0].get_ssrc_group("SIM")->ssrcs.size());
+
+ // Check that it actually creates simulcast aswell.
+ caller->SetLocalDescription(std::move(offer));
+ auto senders = caller->pc()->GetSenders();
+ ASSERT_EQ(1u, senders.size());
+ EXPECT_EQ(cricket::MediaType::MEDIA_TYPE_VIDEO, senders[0]->media_type());
+ EXPECT_EQ(3u, senders[0]->GetParameters().encodings.size());
+}
+
+// Test enabling of simulcast with Plan B semantics.
+// This test creating an answer.
+TEST_F(PeerConnectionMediaTestPlanB, SimulcastAnswer) {
+ auto caller = CreatePeerConnection();
+ caller->AddVideoTrack("v0");
+ auto offer = caller->CreateOffer();
+ auto callee = CreatePeerConnection();
+ auto callee_video_track = callee->AddVideoTrack("v1");
+ ASSERT_TRUE(callee->SetRemoteDescription(std::move(offer)));
+ RTCOfferAnswerOptions options;
+ options.num_simulcast_layers = 3;
+ auto answer = callee->CreateAnswer(options);
+ auto* description = cricket::GetFirstMediaContent(answer->description(),
+ cricket::MEDIA_TYPE_VIDEO)
+ ->media_description();
+ ASSERT_EQ(1u, description->streams().size());
+ ASSERT_TRUE(description->streams()[0].get_ssrc_group("SIM"));
+ EXPECT_EQ(3u, description->streams()[0].get_ssrc_group("SIM")->ssrcs.size());
+
+ // Check that it actually creates simulcast aswell.
+ callee->SetLocalDescription(std::move(answer));
+ auto senders = callee->pc()->GetSenders();
+ ASSERT_EQ(1u, senders.size());
+ EXPECT_EQ(cricket::MediaType::MEDIA_TYPE_VIDEO, senders[0]->media_type());
+ EXPECT_EQ(3u, senders[0]->GetParameters().encodings.size());
+}
+
+// Test that stopping the callee transceivers causes the media channels to be
+// destroyed on the callee after calling SetLocalDescription on the local
+// answer.
+// See next test for equivalent behavior with Plan B semantics.
+TEST_F(PeerConnectionMediaTestUnifiedPlan,
+ StoppedLocalTransceiversRemovesMediaChannels) {
+ auto caller = CreatePeerConnectionWithAudioVideo();
+ auto callee = CreatePeerConnectionWithAudioVideo();
+
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+
+ // Stop both audio and video transceivers on the callee.
+ auto transceivers = callee->pc()->GetTransceivers();
+ ASSERT_EQ(2u, transceivers.size());
+ transceivers[0]->StopInternal();
+ transceivers[1]->StopInternal();
+
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+
+ EXPECT_TRUE(callee->pc()->GetTransceivers().empty());
+}
+
+// Test that removing streams from a subsequent answer causes the send streams
+// on the callee to be removed when applied locally.
+// See previous test for equivalent behavior with Unified Plan semantics.
+TEST_F(PeerConnectionMediaTestPlanB, EmptyLocalAnswerRemovesSendStreams) {
+ auto caller = CreatePeerConnectionWithAudioVideo();
+ auto callee = CreatePeerConnection();
+ auto callee_audio_track = callee->AddAudioTrack("a");
+ auto callee_video_track = callee->AddVideoTrack("v");
+
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+
+ // Remove both tracks from callee.
+ callee->pc()->RemoveTrackOrError(callee_audio_track);
+ callee->pc()->RemoveTrackOrError(callee_video_track);
+
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+
+ EXPECT_TRUE(callee->pc()->GetSenders().empty());
+ EXPECT_EQ(2u, callee->pc()->GetReceivers().size());
+}
+
+// Test that a new stream in a subsequent offer causes a new receive stream to
+// be created on the callee.
+TEST_P(PeerConnectionMediaTest, NewStreamInRemoteOfferAddsRecvStreams) {
+ auto caller = CreatePeerConnectionWithAudioVideo();
+ auto callee = CreatePeerConnection();
+
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+
+ // Add second set of tracks to the caller.
+ caller->AddAudioTrack("a2");
+ caller->AddVideoTrack("v2");
+
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+
+ if (IsUnifiedPlan()) {
+ auto a1 = VoiceMediaReceiveChannel(callee->pc()->GetTransceivers()[0]);
+ auto a2 = VoiceMediaReceiveChannel(callee->pc()->GetTransceivers()[2]);
+ auto v1 = VideoMediaReceiveChannel(callee->pc()->GetTransceivers()[1]);
+ auto v2 = VideoMediaReceiveChannel(callee->pc()->GetTransceivers()[3]);
+
+ ASSERT_TRUE(a1);
+ EXPECT_EQ(1u, a1->recv_streams().size());
+ ASSERT_TRUE(a2);
+ EXPECT_EQ(1u, a2->recv_streams().size());
+ ASSERT_TRUE(v1);
+ EXPECT_EQ(1u, v1->recv_streams().size());
+ ASSERT_TRUE(v2);
+ EXPECT_EQ(1u, v2->recv_streams().size());
+ } else {
+ EXPECT_EQ(4u, callee->pc()->GetReceivers().size());
+ }
+}
+
+// Test that a new stream in a subsequent answer causes a new send stream to be
+// created on the callee when added locally.
+TEST_P(PeerConnectionMediaTest, NewStreamInLocalAnswerAddsSendStreams) {
+ auto caller = CreatePeerConnection();
+ auto callee = CreatePeerConnectionWithAudioVideo();
+
+ RTCOfferAnswerOptions offer_options;
+ offer_options.offer_to_receive_audio =
+ RTCOfferAnswerOptions::kOfferToReceiveMediaTrue;
+ offer_options.offer_to_receive_video =
+ RTCOfferAnswerOptions::kOfferToReceiveMediaTrue;
+ RTCOfferAnswerOptions answer_options;
+
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get(), offer_options,
+ answer_options));
+
+ // Add second set of tracks to the callee.
+ callee->AddAudioTrack("a2");
+ callee->AddVideoTrack("v2");
+
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get(), offer_options,
+ answer_options));
+
+ if (IsUnifiedPlan()) {
+ auto callee_voice =
+ VoiceMediaSendChannel(callee->pc()->GetTransceivers()[0]);
+ ASSERT_TRUE(callee_voice);
+ auto callee_video =
+ VideoMediaSendChannel(callee->pc()->GetTransceivers()[1]);
+ ASSERT_TRUE(callee_video);
+
+ EXPECT_EQ(1u, callee_voice->send_streams().size());
+ EXPECT_EQ(1u, callee_video->send_streams().size());
+ } else {
+ EXPECT_EQ(4u, callee->pc()->GetSenders().size());
+ }
+}
+
+// A PeerConnection with no local streams and no explicit answer constraints
+// should not reject any offered media sections.
+TEST_P(PeerConnectionMediaTest,
+ CreateAnswerWithNoStreamsAndDefaultOptionsDoesNotReject) {
+ auto caller = CreatePeerConnectionWithAudioVideo();
+ auto callee = CreatePeerConnection();
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+ auto answer = callee->CreateAnswer();
+
+ const auto* audio_content =
+ cricket::GetFirstAudioContent(answer->description());
+ ASSERT_TRUE(audio_content);
+ EXPECT_FALSE(audio_content->rejected);
+
+ const auto* video_content =
+ cricket::GetFirstVideoContent(answer->description());
+ ASSERT_TRUE(video_content);
+ EXPECT_FALSE(video_content->rejected);
+}
+
+// Test that raw packetization is not set in the offer by default.
+TEST_P(PeerConnectionMediaTest, RawPacketizationNotSetInOffer) {
+ std::vector<cricket::VideoCodec> fake_codecs;
+ fake_codecs.push_back(cricket::CreateVideoCodec(111, cricket::kVp8CodecName));
+ fake_codecs.push_back(cricket::CreateVideoRtxCodec(112, 111));
+ fake_codecs.push_back(cricket::CreateVideoCodec(113, cricket::kVp9CodecName));
+ fake_codecs.push_back(
+ cricket::CreateVideoCodec(114, cricket::kH264CodecName));
+ fake_codecs.push_back(cricket::CreateVideoCodec(115, "HEVC"));
+ auto caller_fake_engine = std::make_unique<FakeMediaEngine>();
+ caller_fake_engine->SetVideoCodecs(fake_codecs);
+
+ auto caller = CreatePeerConnectionWithVideo(std::move(caller_fake_engine));
+ auto offer = caller->CreateOfferAndSetAsLocal();
+ auto* offer_description =
+ cricket::GetFirstVideoContentDescription(offer->description());
+ for (const auto& codec : offer_description->codecs()) {
+ EXPECT_EQ(codec.packetization, absl::nullopt);
+ }
+}
+
+// Test that raw packetization is set in the offer and answer for all
+// video payload when raw_packetization_for_video is true.
+TEST_P(PeerConnectionMediaTest, RawPacketizationSetInOfferAndAnswer) {
+ std::vector<cricket::VideoCodec> fake_codecs;
+ fake_codecs.push_back(cricket::CreateVideoCodec(111, cricket::kVp8CodecName));
+ fake_codecs.push_back(cricket::CreateVideoRtxCodec(112, 111));
+ fake_codecs.push_back(cricket::CreateVideoCodec(113, cricket::kVp9CodecName));
+ fake_codecs.push_back(
+ cricket::CreateVideoCodec(114, cricket::kH264CodecName));
+ fake_codecs.push_back(cricket::CreateVideoCodec(115, "HEVC"));
+ auto caller_fake_engine = std::make_unique<FakeMediaEngine>();
+ caller_fake_engine->SetVideoCodecs(fake_codecs);
+ auto callee_fake_engine = std::make_unique<FakeMediaEngine>();
+ callee_fake_engine->SetVideoCodecs(fake_codecs);
+
+ RTCOfferAnswerOptions options;
+ options.raw_packetization_for_video = true;
+
+ auto caller = CreatePeerConnectionWithVideo(std::move(caller_fake_engine));
+ auto offer = caller->CreateOfferAndSetAsLocal(options);
+ auto* offer_description =
+ cricket::GetFirstVideoContentDescription(offer->description());
+ for (const auto& codec : offer_description->codecs()) {
+ if (codec.IsMediaCodec()) {
+ EXPECT_EQ(codec.packetization, cricket::kPacketizationParamRaw);
+ }
+ }
+
+ auto callee = CreatePeerConnectionWithVideo(std::move(callee_fake_engine));
+ ASSERT_TRUE(callee->SetRemoteDescription(std::move(offer)));
+ auto answer = callee->CreateAnswerAndSetAsLocal(options);
+ auto* answer_description =
+ cricket::GetFirstVideoContentDescription(answer->description());
+ for (const auto& codec : answer_description->codecs()) {
+ if (codec.IsMediaCodec()) {
+ EXPECT_EQ(codec.packetization, cricket::kPacketizationParamRaw);
+ }
+ }
+
+ ASSERT_TRUE(caller->SetRemoteDescription(std::move(answer)));
+}
+
+// Test that raw packetization is not set in the answer when
+// raw_packetization_for_video is true if it was not set in the offer.
+TEST_P(PeerConnectionMediaTest,
+ RawPacketizationNotSetInAnswerWhenNotSetInOffer) {
+ std::vector<cricket::VideoCodec> fake_codecs;
+ fake_codecs.push_back(cricket::CreateVideoCodec(111, cricket::kVp8CodecName));
+ fake_codecs.push_back(cricket::CreateVideoRtxCodec(112, 111));
+ fake_codecs.push_back(cricket::CreateVideoCodec(113, cricket::kVp9CodecName));
+ fake_codecs.push_back(
+ cricket::CreateVideoCodec(114, cricket::kH264CodecName));
+ fake_codecs.push_back(cricket::CreateVideoCodec(115, "HEVC"));
+ auto caller_fake_engine = std::make_unique<FakeMediaEngine>();
+ caller_fake_engine->SetVideoCodecs(fake_codecs);
+ auto callee_fake_engine = std::make_unique<FakeMediaEngine>();
+ callee_fake_engine->SetVideoCodecs(fake_codecs);
+
+ RTCOfferAnswerOptions caller_options;
+ caller_options.raw_packetization_for_video = false;
+ RTCOfferAnswerOptions callee_options;
+ callee_options.raw_packetization_for_video = true;
+
+ auto caller = CreatePeerConnectionWithVideo(std::move(caller_fake_engine));
+ auto offer = caller->CreateOfferAndSetAsLocal(caller_options);
+
+ auto callee = CreatePeerConnectionWithVideo(std::move(callee_fake_engine));
+ ASSERT_TRUE(callee->SetRemoteDescription(std::move(offer)));
+ auto answer = callee->CreateAnswerAndSetAsLocal(callee_options);
+
+ auto* answer_description =
+ cricket::GetFirstVideoContentDescription(answer->description());
+ for (const auto& codec : answer_description->codecs()) {
+ EXPECT_EQ(codec.packetization, absl::nullopt);
+ }
+
+ ASSERT_TRUE(caller->SetRemoteDescription(std::move(answer)));
+}
+
+class PeerConnectionMediaOfferDirectionTest
+ : public PeerConnectionMediaBaseTest,
+ public ::testing::WithParamInterface<
+ std::tuple<SdpSemantics,
+ std::tuple<bool, int, RtpTransceiverDirection>>> {
+ protected:
+ PeerConnectionMediaOfferDirectionTest()
+ : PeerConnectionMediaBaseTest(std::get<0>(GetParam())) {
+ auto param = std::get<1>(GetParam());
+ send_media_ = std::get<0>(param);
+ offer_to_receive_ = std::get<1>(param);
+ expected_direction_ = std::get<2>(param);
+ }
+
+ bool send_media_;
+ int offer_to_receive_;
+ RtpTransceiverDirection expected_direction_;
+};
+
+// Tests that the correct direction is set on the media description according
+// to the presence of a local media track and the offer_to_receive setting.
+TEST_P(PeerConnectionMediaOfferDirectionTest, VerifyDirection) {
+ auto caller = CreatePeerConnection();
+ if (send_media_) {
+ caller->AddAudioTrack("a");
+ }
+
+ RTCOfferAnswerOptions options;
+ options.offer_to_receive_audio = offer_to_receive_;
+ auto offer = caller->CreateOffer(options);
+
+ auto* content = cricket::GetFirstMediaContent(offer->description(),
+ cricket::MEDIA_TYPE_AUDIO);
+ if (expected_direction_ == RtpTransceiverDirection::kInactive) {
+ EXPECT_FALSE(content);
+ } else {
+ EXPECT_EQ(expected_direction_, content->media_description()->direction());
+ }
+}
+
+// Note that in these tests, MD_INACTIVE indicates that no media section is
+// included in the offer, not that the media direction is inactive.
+INSTANTIATE_TEST_SUITE_P(
+ PeerConnectionMediaTest,
+ PeerConnectionMediaOfferDirectionTest,
+ Combine(
+ Values(SdpSemantics::kPlanB_DEPRECATED, SdpSemantics::kUnifiedPlan),
+ Values(std::make_tuple(false, -1, RtpTransceiverDirection::kInactive),
+ std::make_tuple(false, 0, RtpTransceiverDirection::kInactive),
+ std::make_tuple(false, 1, RtpTransceiverDirection::kRecvOnly),
+ std::make_tuple(true, -1, RtpTransceiverDirection::kSendRecv),
+ std::make_tuple(true, 0, RtpTransceiverDirection::kSendOnly),
+ std::make_tuple(true, 1, RtpTransceiverDirection::kSendRecv))));
+
+class PeerConnectionMediaAnswerDirectionTest
+ : public PeerConnectionMediaBaseTest,
+ public ::testing::WithParamInterface<
+ std::tuple<SdpSemantics, RtpTransceiverDirection, bool, int>> {
+ protected:
+ PeerConnectionMediaAnswerDirectionTest()
+ : PeerConnectionMediaBaseTest(std::get<0>(GetParam())) {
+ offer_direction_ = std::get<1>(GetParam());
+ send_media_ = std::get<2>(GetParam());
+ offer_to_receive_ = std::get<3>(GetParam());
+ }
+
+ RtpTransceiverDirection offer_direction_;
+ bool send_media_;
+ int offer_to_receive_;
+};
+
+// Tests that the direction in an answer is correct according to direction sent
+// in the offer, the presence of a local media track on the receive side and the
+// offer_to_receive setting.
+TEST_P(PeerConnectionMediaAnswerDirectionTest, VerifyDirection) {
+ if (IsUnifiedPlan() &&
+ offer_to_receive_ != RTCOfferAnswerOptions::kUndefined) {
+ // offer_to_receive_ is not implemented when creating answers with Unified
+ // Plan semantics specified.
+ return;
+ }
+
+ auto caller = CreatePeerConnection();
+ caller->AddAudioTrack("a");
+
+ // Create the offer with an audio section and set its direction.
+ auto offer = caller->CreateOffer();
+ cricket::GetFirstAudioContentDescription(offer->description())
+ ->set_direction(offer_direction_);
+
+ auto callee = CreatePeerConnection();
+ if (send_media_) {
+ callee->AddAudioTrack("a");
+ }
+ ASSERT_TRUE(callee->SetRemoteDescription(std::move(offer)));
+
+ // Create the answer according to the test parameters.
+ RTCOfferAnswerOptions options;
+ options.offer_to_receive_audio = offer_to_receive_;
+ auto answer = callee->CreateAnswer(options);
+
+ // The expected direction in the answer is the intersection of each side's
+ // capability to send/recv media.
+ // For the offerer, the direction is given in the offer (offer_direction_).
+ // For the answerer, the direction has two components:
+ // 1. Send if the answerer has a local track to send.
+ // 2. Receive if the answerer has explicitly set the offer_to_receive to 1 or
+ // if it has been left as default.
+ bool offer_send = RtpTransceiverDirectionHasSend(offer_direction_);
+ bool offer_recv = RtpTransceiverDirectionHasRecv(offer_direction_);
+
+ // The negotiated components determine the direction set in the answer.
+ bool negotiate_send = (send_media_ && offer_recv);
+ bool negotiate_recv = ((offer_to_receive_ != 0) && offer_send);
+
+ auto expected_direction =
+ RtpTransceiverDirectionFromSendRecv(negotiate_send, negotiate_recv);
+ EXPECT_EQ(expected_direction,
+ GetMediaContentDirection(answer.get(), cricket::MEDIA_TYPE_AUDIO));
+}
+
+// Tests that the media section is rejected if and only if the callee has no
+// local media track and has set offer_to_receive to 0, no matter which
+// direction the caller indicated in the offer.
+TEST_P(PeerConnectionMediaAnswerDirectionTest, VerifyRejected) {
+ if (IsUnifiedPlan() &&
+ offer_to_receive_ != RTCOfferAnswerOptions::kUndefined) {
+ // offer_to_receive_ is not implemented when creating answers with Unified
+ // Plan semantics specified.
+ return;
+ }
+
+ auto caller = CreatePeerConnection();
+ caller->AddAudioTrack("a");
+
+ // Create the offer with an audio section and set its direction.
+ auto offer = caller->CreateOffer();
+ cricket::GetFirstAudioContentDescription(offer->description())
+ ->set_direction(offer_direction_);
+
+ auto callee = CreatePeerConnection();
+ if (send_media_) {
+ callee->AddAudioTrack("a");
+ }
+ ASSERT_TRUE(callee->SetRemoteDescription(std::move(offer)));
+
+ // Create the answer according to the test parameters.
+ RTCOfferAnswerOptions options;
+ options.offer_to_receive_audio = offer_to_receive_;
+ auto answer = callee->CreateAnswer(options);
+
+ // The media section is rejected if and only if offer_to_receive is explicitly
+ // set to 0 and there is no media to send.
+ auto* audio_content = cricket::GetFirstAudioContent(answer->description());
+ ASSERT_TRUE(audio_content);
+ EXPECT_EQ((offer_to_receive_ == 0 && !send_media_), audio_content->rejected);
+}
+
+INSTANTIATE_TEST_SUITE_P(PeerConnectionMediaTest,
+ PeerConnectionMediaAnswerDirectionTest,
+ Combine(Values(SdpSemantics::kPlanB_DEPRECATED,
+ SdpSemantics::kUnifiedPlan),
+ Values(RtpTransceiverDirection::kInactive,
+ RtpTransceiverDirection::kSendOnly,
+ RtpTransceiverDirection::kRecvOnly,
+ RtpTransceiverDirection::kSendRecv),
+ Bool(),
+ Values(-1, 0, 1)));
+
+TEST_P(PeerConnectionMediaTest, OfferHasDifferentDirectionForAudioVideo) {
+ auto caller = CreatePeerConnection();
+ caller->AddVideoTrack("v");
+
+ RTCOfferAnswerOptions options;
+ options.offer_to_receive_audio = 1;
+ options.offer_to_receive_video = 0;
+ auto offer = caller->CreateOffer(options);
+
+ EXPECT_EQ(RtpTransceiverDirection::kRecvOnly,
+ GetMediaContentDirection(offer.get(), cricket::MEDIA_TYPE_AUDIO));
+ EXPECT_EQ(RtpTransceiverDirection::kSendOnly,
+ GetMediaContentDirection(offer.get(), cricket::MEDIA_TYPE_VIDEO));
+}
+
+TEST_P(PeerConnectionMediaTest, AnswerHasDifferentDirectionsForAudioVideo) {
+ if (IsUnifiedPlan()) {
+ // offer_to_receive_ is not implemented when creating answers with Unified
+ // Plan semantics specified.
+ return;
+ }
+
+ auto caller = CreatePeerConnectionWithAudioVideo();
+ auto callee = CreatePeerConnection();
+ callee->AddVideoTrack("v");
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+
+ RTCOfferAnswerOptions options;
+ options.offer_to_receive_audio = 1;
+ options.offer_to_receive_video = 0;
+ auto answer = callee->CreateAnswer(options);
+
+ EXPECT_EQ(RtpTransceiverDirection::kRecvOnly,
+ GetMediaContentDirection(answer.get(), cricket::MEDIA_TYPE_AUDIO));
+ EXPECT_EQ(RtpTransceiverDirection::kSendOnly,
+ GetMediaContentDirection(answer.get(), cricket::MEDIA_TYPE_VIDEO));
+}
+
+void AddComfortNoiseCodecsToSend(cricket::FakeMediaEngine* media_engine) {
+ const cricket::AudioCodec kComfortNoiseCodec8k =
+ cricket::CreateAudioCodec(102, cricket::kCnCodecName, 8000, 1);
+ const cricket::AudioCodec kComfortNoiseCodec16k =
+ cricket::CreateAudioCodec(103, cricket::kCnCodecName, 16000, 1);
+
+ auto codecs = media_engine->voice().send_codecs();
+ codecs.push_back(kComfortNoiseCodec8k);
+ codecs.push_back(kComfortNoiseCodec16k);
+ media_engine->SetAudioCodecs(codecs);
+}
+
+bool HasAnyComfortNoiseCodecs(const cricket::SessionDescription* desc) {
+ const auto* audio_desc = cricket::GetFirstAudioContentDescription(desc);
+ for (const auto& codec : audio_desc->codecs()) {
+ if (codec.name == cricket::kCnCodecName) {
+ return true;
+ }
+ }
+ return false;
+}
+
+bool HasPayloadTypeConflict(const cricket::SessionDescription* desc) {
+ std::set<int> payload_types;
+ const auto* audio_desc = cricket::GetFirstAudioContentDescription(desc);
+ if (audio_desc) {
+ for (const auto& codec : audio_desc->codecs()) {
+ if (payload_types.count(codec.id) > 0) {
+ return true;
+ }
+ payload_types.insert(codec.id);
+ }
+ }
+ const auto* video_desc = cricket::GetFirstVideoContentDescription(desc);
+ if (video_desc) {
+ for (const auto& codec : video_desc->codecs()) {
+ if (payload_types.count(codec.id) > 0) {
+ return true;
+ }
+ payload_types.insert(codec.id);
+ }
+ }
+ return false;
+}
+
+TEST_P(PeerConnectionMediaTest,
+ CreateOfferWithNoVoiceActivityDetectionIncludesNoComfortNoiseCodecs) {
+ auto fake_engine = std::make_unique<FakeMediaEngine>();
+ AddComfortNoiseCodecsToSend(fake_engine.get());
+ auto caller = CreatePeerConnectionWithAudioVideo(std::move(fake_engine));
+
+ RTCOfferAnswerOptions options;
+ options.voice_activity_detection = false;
+ auto offer = caller->CreateOffer(options);
+
+ EXPECT_FALSE(HasAnyComfortNoiseCodecs(offer->description()));
+}
+
+TEST_P(PeerConnectionMediaTest,
+ CreateOfferWithVoiceActivityDetectionIncludesComfortNoiseCodecs) {
+ auto fake_engine = std::make_unique<FakeMediaEngine>();
+ AddComfortNoiseCodecsToSend(fake_engine.get());
+ auto caller = CreatePeerConnectionWithAudioVideo(std::move(fake_engine));
+
+ RTCOfferAnswerOptions options;
+ options.voice_activity_detection = true;
+ auto offer = caller->CreateOffer(options);
+
+ EXPECT_TRUE(HasAnyComfortNoiseCodecs(offer->description()));
+}
+
+TEST_P(PeerConnectionMediaTest,
+ CreateAnswerWithVoiceActivityDetectionIncludesNoComfortNoiseCodecs) {
+ auto caller = CreatePeerConnectionWithAudioVideo();
+
+ auto callee_fake_engine = std::make_unique<FakeMediaEngine>();
+ AddComfortNoiseCodecsToSend(callee_fake_engine.get());
+ auto callee =
+ CreatePeerConnectionWithAudioVideo(std::move(callee_fake_engine));
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+
+ RTCOfferAnswerOptions options;
+ options.voice_activity_detection = true;
+ auto answer = callee->CreateAnswer(options);
+
+ EXPECT_FALSE(HasAnyComfortNoiseCodecs(answer->description()));
+}
+
+TEST_P(PeerConnectionMediaTest,
+ CreateAnswerWithNoVoiceActivityDetectionIncludesNoComfortNoiseCodecs) {
+ auto caller_fake_engine = std::make_unique<FakeMediaEngine>();
+ AddComfortNoiseCodecsToSend(caller_fake_engine.get());
+ auto caller =
+ CreatePeerConnectionWithAudioVideo(std::move(caller_fake_engine));
+
+ auto callee_fake_engine = std::make_unique<FakeMediaEngine>();
+ AddComfortNoiseCodecsToSend(callee_fake_engine.get());
+ auto callee =
+ CreatePeerConnectionWithAudioVideo(std::move(callee_fake_engine));
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+
+ RTCOfferAnswerOptions options;
+ options.voice_activity_detection = false;
+ auto answer = callee->CreateAnswer(options);
+
+ EXPECT_FALSE(HasAnyComfortNoiseCodecs(answer->description()));
+}
+
+// The following test group verifies that we reject answers with invalid media
+// sections as per RFC 3264.
+
+class PeerConnectionMediaInvalidMediaTest
+ : public PeerConnectionMediaBaseTest,
+ public ::testing::WithParamInterface<std::tuple<
+ SdpSemantics,
+ std::tuple<std::string,
+ std::function<void(cricket::SessionDescription*)>,
+ std::string>>> {
+ protected:
+ PeerConnectionMediaInvalidMediaTest()
+ : PeerConnectionMediaBaseTest(std::get<0>(GetParam())) {
+ auto param = std::get<1>(GetParam());
+ mutator_ = std::get<1>(param);
+ expected_error_ = std::get<2>(param);
+ }
+
+ std::function<void(cricket::SessionDescription*)> mutator_;
+ std::string expected_error_;
+};
+
+TEST_P(PeerConnectionMediaInvalidMediaTest, FailToSetRemoteAnswer) {
+ auto caller = CreatePeerConnectionWithAudioVideo();
+ auto callee = CreatePeerConnectionWithAudioVideo();
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+
+ auto answer = callee->CreateAnswer();
+ mutator_(answer->description());
+
+ std::string error;
+ ASSERT_FALSE(caller->SetRemoteDescription(std::move(answer), &error));
+ EXPECT_EQ("Failed to set remote answer sdp: " + expected_error_, error);
+}
+
+TEST_P(PeerConnectionMediaInvalidMediaTest, FailToSetLocalAnswer) {
+ auto caller = CreatePeerConnectionWithAudioVideo();
+ auto callee = CreatePeerConnectionWithAudioVideo();
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+
+ auto answer = callee->CreateAnswer();
+ mutator_(answer->description());
+
+ std::string error;
+ ASSERT_FALSE(callee->SetLocalDescription(std::move(answer), &error));
+ EXPECT_EQ("Failed to set local answer sdp: " + expected_error_, error);
+}
+
+void RemoveVideoContentAndUnbundle(cricket::SessionDescription* desc) {
+ // Removing BUNDLE is easier than removing the content in there.
+ desc->RemoveGroupByName("BUNDLE");
+ auto content_name = cricket::GetFirstVideoContent(desc)->name;
+ desc->RemoveContentByName(content_name);
+ desc->RemoveTransportInfoByName(content_name);
+}
+
+void RenameVideoContentAndUnbundle(cricket::SessionDescription* desc) {
+ // Removing BUNDLE is easier than renaming the content in there.
+ desc->RemoveGroupByName("BUNDLE");
+ auto* video_content = cricket::GetFirstVideoContent(desc);
+ auto* transport_info = desc->GetTransportInfoByName(video_content->name);
+ video_content->name = "video_renamed";
+ transport_info->content_name = video_content->name;
+}
+
+void ReverseMediaContent(cricket::SessionDescription* desc) {
+ absl::c_reverse(desc->contents());
+ absl::c_reverse(desc->transport_infos());
+}
+
+void ChangeMediaTypeAudioToVideo(cricket::SessionDescription* desc) {
+ std::string audio_mid = cricket::GetFirstAudioContent(desc)->name;
+ desc->RemoveContentByName(audio_mid);
+ auto* video_content = cricket::GetFirstVideoContent(desc);
+ desc->AddContent(audio_mid, video_content->type,
+ video_content->media_description()->Clone());
+}
+
+constexpr char kMLinesOutOfOrder[] =
+ "The order of m-lines in answer doesn't match order in offer. Rejecting "
+ "answer.";
+
+INSTANTIATE_TEST_SUITE_P(
+ PeerConnectionMediaTest,
+ PeerConnectionMediaInvalidMediaTest,
+ Combine(Values(SdpSemantics::kPlanB_DEPRECATED, SdpSemantics::kUnifiedPlan),
+ Values(std::make_tuple("remove video",
+ RemoveVideoContentAndUnbundle,
+ kMLinesOutOfOrder),
+ std::make_tuple("rename video",
+ RenameVideoContentAndUnbundle,
+ kMLinesOutOfOrder),
+ std::make_tuple("reverse media sections",
+ ReverseMediaContent,
+ kMLinesOutOfOrder),
+ std::make_tuple("change audio type to video type",
+ ChangeMediaTypeAudioToVideo,
+ kMLinesOutOfOrder))));
+
+// Tests that if the underlying video encoder fails to be initialized (signaled
+// by failing to set send codecs), the PeerConnection signals the error to the
+// client.
+TEST_F(PeerConnectionMediaTestUnifiedPlan,
+ MediaEngineErrorPropagatedToClients) {
+ auto caller = CreatePeerConnectionWithAudioVideo();
+ auto callee = CreatePeerConnectionWithAudioVideo();
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+
+ auto video_channel =
+ VideoMediaSendChannel(caller->pc()->GetTransceivers()[1]);
+ video_channel->set_fail_set_send_codecs(true);
+
+ std::string error;
+ ASSERT_FALSE(caller->SetRemoteDescription(callee->CreateAnswerAndSetAsLocal(),
+ &error));
+ EXPECT_EQ(std::string("Failed to set remote answer sdp: Failed to set remote "
+ "video description "
+ "send parameters for m-section with mid='") +
+ (IsUnifiedPlan() ? "1" : "video") + "'.",
+ error);
+}
+
+// Tests that if the underlying video encoder fails once then subsequent
+// attempts at setting the local/remote description will also fail, even if
+// SetSendCodecs no longer fails.
+TEST_F(PeerConnectionMediaTestUnifiedPlan,
+ FailToApplyDescriptionIfVideoEncoderHasEverFailed) {
+ auto caller = CreatePeerConnectionWithAudioVideo();
+ auto callee = CreatePeerConnectionWithAudioVideo();
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+
+ auto video_channel =
+ VideoMediaSendChannel(caller->pc()->GetTransceivers()[1]);
+ video_channel->set_fail_set_send_codecs(true);
+
+ EXPECT_FALSE(
+ caller->SetRemoteDescription(callee->CreateAnswerAndSetAsLocal()));
+
+ video_channel->set_fail_set_send_codecs(false);
+
+ EXPECT_FALSE(caller->SetRemoteDescription(callee->CreateAnswer()));
+ EXPECT_FALSE(caller->SetLocalDescription(caller->CreateOffer()));
+}
+
+void RenameContent(cricket::SessionDescription* desc,
+ cricket::MediaType media_type,
+ const std::string& new_name) {
+ auto* content = cricket::GetFirstMediaContent(desc, media_type);
+ RTC_DCHECK(content);
+ std::string old_name = content->name;
+ content->name = new_name;
+ auto* transport = desc->GetTransportInfoByName(old_name);
+ RTC_DCHECK(transport);
+ transport->content_name = new_name;
+
+ // Rename the content name in the BUNDLE group.
+ cricket::ContentGroup new_bundle_group =
+ *desc->GetGroupByName(cricket::GROUP_TYPE_BUNDLE);
+ new_bundle_group.RemoveContentName(old_name);
+ new_bundle_group.AddContentName(new_name);
+ desc->RemoveGroupByName(cricket::GROUP_TYPE_BUNDLE);
+ desc->AddGroup(new_bundle_group);
+}
+
+// Tests that an answer responds with the same MIDs as the offer.
+TEST_P(PeerConnectionMediaTest, AnswerHasSameMidsAsOffer) {
+ const std::string kAudioMid = "notdefault1";
+ const std::string kVideoMid = "notdefault2";
+
+ auto caller = CreatePeerConnectionWithAudioVideo();
+ auto callee = CreatePeerConnectionWithAudioVideo();
+
+ auto offer = caller->CreateOffer();
+ RenameContent(offer->description(), cricket::MEDIA_TYPE_AUDIO, kAudioMid);
+ RenameContent(offer->description(), cricket::MEDIA_TYPE_VIDEO, kVideoMid);
+ ASSERT_TRUE(callee->SetRemoteDescription(std::move(offer)));
+
+ auto answer = callee->CreateAnswer();
+ EXPECT_EQ(kAudioMid,
+ cricket::GetFirstAudioContent(answer->description())->name);
+ EXPECT_EQ(kVideoMid,
+ cricket::GetFirstVideoContent(answer->description())->name);
+}
+
+// Test that if the callee creates a re-offer, the MIDs are the same as the
+// original offer.
+TEST_P(PeerConnectionMediaTest, ReOfferHasSameMidsAsFirstOffer) {
+ const std::string kAudioMid = "notdefault1";
+ const std::string kVideoMid = "notdefault2";
+
+ auto caller = CreatePeerConnectionWithAudioVideo();
+ auto callee = CreatePeerConnectionWithAudioVideo();
+
+ auto offer = caller->CreateOffer();
+ RenameContent(offer->description(), cricket::MEDIA_TYPE_AUDIO, kAudioMid);
+ RenameContent(offer->description(), cricket::MEDIA_TYPE_VIDEO, kVideoMid);
+ ASSERT_TRUE(callee->SetRemoteDescription(std::move(offer)));
+ ASSERT_TRUE(callee->SetLocalDescription(callee->CreateAnswer()));
+
+ auto reoffer = callee->CreateOffer();
+ EXPECT_EQ(kAudioMid,
+ cricket::GetFirstAudioContent(reoffer->description())->name);
+ EXPECT_EQ(kVideoMid,
+ cricket::GetFirstVideoContent(reoffer->description())->name);
+}
+
+// Test that SetRemoteDescription returns an error if there are two m= sections
+// with the same MID value.
+TEST_P(PeerConnectionMediaTest, SetRemoteDescriptionFailsWithDuplicateMids) {
+ auto caller = CreatePeerConnectionWithAudioVideo();
+ auto callee = CreatePeerConnectionWithAudioVideo();
+
+ auto offer = caller->CreateOffer();
+ RenameContent(offer->description(), cricket::MEDIA_TYPE_AUDIO, "same");
+ RenameContent(offer->description(), cricket::MEDIA_TYPE_VIDEO, "same");
+
+ std::string error;
+ EXPECT_FALSE(callee->SetRemoteDescription(std::move(offer), &error));
+ EXPECT_EQ(error,
+ "Failed to set remote offer sdp: Duplicate a=mid value 'same'.");
+}
+
+// Test that if a RED codec refers to another codec in its fmtp line, but that
+// codec's payload type was reassigned for some reason (either the remote
+// endpoint selected a different payload type or there was a conflict), the RED
+// fmtp line is modified to refer to the correct payload type.
+TEST_P(PeerConnectionMediaTest, RedFmtpPayloadTypeReassigned) {
+ std::vector<cricket::AudioCodec> caller_fake_codecs;
+ caller_fake_codecs.push_back(cricket::CreateAudioCodec(100, "foo", 0, 1));
+ auto caller_fake_engine = std::make_unique<FakeMediaEngine>();
+ caller_fake_engine->SetAudioCodecs(caller_fake_codecs);
+ auto caller = CreatePeerConnectionWithAudio(std::move(caller_fake_engine));
+
+ std::vector<cricket::AudioCodec> callee_fake_codecs;
+ callee_fake_codecs.push_back(cricket::CreateAudioCodec(120, "foo", 0, 1));
+ callee_fake_codecs.push_back(
+ cricket::CreateAudioCodec(121, cricket::kRedCodecName, 0, 1));
+ callee_fake_codecs.back().SetParam(cricket::kCodecParamNotInNameValueFormat,
+ "120/120");
+ auto callee_fake_engine = std::make_unique<FakeMediaEngine>();
+ callee_fake_engine->SetAudioCodecs(callee_fake_codecs);
+ auto callee = CreatePeerConnectionWithAudio(std::move(callee_fake_engine));
+
+ // Offer from the caller establishes 100 as the "foo" payload type.
+ auto offer = caller->CreateOfferAndSetAsLocal();
+ callee->SetRemoteDescription(std::move(offer));
+ auto answer = callee->CreateAnswerAndSetAsLocal();
+ auto answer_description =
+ cricket::GetFirstAudioContentDescription(answer->description());
+ ASSERT_EQ(1u, answer_description->codecs().size());
+
+ // Offer from the callee should respect the established payload type, and
+ // attempt to add RED, which should refer to the correct payload type.
+ offer = callee->CreateOfferAndSetAsLocal();
+ auto* offer_description =
+ cricket::GetFirstAudioContentDescription(offer->description());
+ ASSERT_EQ(2u, offer_description->codecs().size());
+ for (const auto& codec : offer_description->codecs()) {
+ if (codec.name == "foo") {
+ ASSERT_EQ(100, codec.id);
+ } else if (codec.name == cricket::kRedCodecName) {
+ std::string fmtp;
+ ASSERT_TRUE(codec.GetParam("", &fmtp));
+ EXPECT_EQ("100/100", fmtp);
+ }
+ }
+}
+
+// Test that RED without fmtp does match RED without fmtp.
+TEST_P(PeerConnectionMediaTest, RedFmtpPayloadTypeNoFmtpMatchNoFmtp) {
+ std::vector<cricket::AudioCodec> caller_fake_codecs;
+ caller_fake_codecs.push_back(cricket::CreateAudioCodec(100, "foo", 0, 1));
+ caller_fake_codecs.push_back(
+ cricket::CreateAudioCodec(101, cricket::kRedCodecName, 0, 1));
+ auto caller_fake_engine = std::make_unique<FakeMediaEngine>();
+ caller_fake_engine->SetAudioCodecs(caller_fake_codecs);
+ auto caller = CreatePeerConnectionWithAudio(std::move(caller_fake_engine));
+
+ std::vector<cricket::AudioCodec> callee_fake_codecs;
+ callee_fake_codecs.push_back(cricket::CreateAudioCodec(120, "foo", 0, 1));
+ callee_fake_codecs.push_back(
+ cricket::CreateAudioCodec(121, cricket::kRedCodecName, 0, 1));
+ auto callee_fake_engine = std::make_unique<FakeMediaEngine>();
+ callee_fake_engine->SetAudioCodecs(callee_fake_codecs);
+ auto callee = CreatePeerConnectionWithAudio(std::move(callee_fake_engine));
+
+ // Offer from the caller establishes 100 as the "foo" payload type.
+ // Red (without fmtp) is negotiated.
+ auto offer = caller->CreateOfferAndSetAsLocal();
+ callee->SetRemoteDescription(std::move(offer));
+ auto answer = callee->CreateAnswerAndSetAsLocal();
+ auto answer_description =
+ cricket::GetFirstAudioContentDescription(answer->description());
+ ASSERT_EQ(2u, answer_description->codecs().size());
+
+ // Offer from the callee should respect the established payload type, and
+ // attempt to add RED.
+ offer = callee->CreateOfferAndSetAsLocal();
+ auto* offer_description =
+ cricket::GetFirstAudioContentDescription(offer->description());
+ ASSERT_EQ(2u, offer_description->codecs().size());
+ for (const auto& codec : offer_description->codecs()) {
+ if (codec.name == "foo") {
+ ASSERT_EQ(100, codec.id);
+ } else if (codec.name == cricket::kRedCodecName) {
+ ASSERT_EQ(101, codec.id);
+ }
+ }
+}
+
+// Test that RED without fmtp does not match RED with fmtp.
+TEST_P(PeerConnectionMediaTest, RedFmtpPayloadTypeNoFmtpNoMatchFmtp) {
+ std::vector<cricket::AudioCodec> caller_fake_codecs;
+ caller_fake_codecs.push_back(cricket::CreateAudioCodec(100, "foo", 0, 1));
+ caller_fake_codecs.push_back(
+ cricket::CreateAudioCodec(101, cricket::kRedCodecName, 0, 1));
+ auto caller_fake_engine = std::make_unique<FakeMediaEngine>();
+ caller_fake_engine->SetAudioCodecs(caller_fake_codecs);
+ auto caller = CreatePeerConnectionWithAudio(std::move(caller_fake_engine));
+
+ std::vector<cricket::AudioCodec> callee_fake_codecs;
+ callee_fake_codecs.push_back(cricket::CreateAudioCodec(120, "foo", 0, 1));
+ callee_fake_codecs.push_back(
+ cricket::CreateAudioCodec(121, cricket::kRedCodecName, 0, 1));
+ callee_fake_codecs.back().SetParam(cricket::kCodecParamNotInNameValueFormat,
+ "120/120");
+ auto callee_fake_engine = std::make_unique<FakeMediaEngine>();
+ callee_fake_engine->SetAudioCodecs(callee_fake_codecs);
+ auto callee = CreatePeerConnectionWithAudio(std::move(callee_fake_engine));
+
+ // Offer from the caller establishes 100 as the "foo" payload type.
+ // It should not negotiate RED.
+ auto offer = caller->CreateOfferAndSetAsLocal();
+ callee->SetRemoteDescription(std::move(offer));
+ auto answer = callee->CreateAnswerAndSetAsLocal();
+ auto answer_description =
+ cricket::GetFirstAudioContentDescription(answer->description());
+ ASSERT_EQ(1u, answer_description->codecs().size());
+
+ // Offer from the callee should respect the established payload type, and
+ // attempt to add RED, which should refer to the correct payload type.
+ offer = callee->CreateOfferAndSetAsLocal();
+ auto* offer_description =
+ cricket::GetFirstAudioContentDescription(offer->description());
+ ASSERT_EQ(2u, offer_description->codecs().size());
+ for (const auto& codec : offer_description->codecs()) {
+ if (codec.name == "foo") {
+ ASSERT_EQ(100, codec.id);
+ } else if (codec.name == cricket::kRedCodecName) {
+ std::string fmtp;
+ ASSERT_TRUE(
+ codec.GetParam(cricket::kCodecParamNotInNameValueFormat, &fmtp));
+ EXPECT_EQ("100/100", fmtp);
+ }
+ }
+}
+
+// Test that RED with fmtp must match base codecs.
+TEST_P(PeerConnectionMediaTest, RedFmtpPayloadTypeMustMatchBaseCodecs) {
+ std::vector<cricket::AudioCodec> caller_fake_codecs;
+ caller_fake_codecs.push_back(cricket::CreateAudioCodec(100, "foo", 0, 1));
+ caller_fake_codecs.push_back(
+ cricket::CreateAudioCodec(101, cricket::kRedCodecName, 0, 1));
+ caller_fake_codecs.back().SetParam(cricket::kCodecParamNotInNameValueFormat,
+ "100/100");
+ auto caller_fake_engine = std::make_unique<FakeMediaEngine>();
+ caller_fake_engine->SetAudioCodecs(caller_fake_codecs);
+ auto caller = CreatePeerConnectionWithAudio(std::move(caller_fake_engine));
+
+ std::vector<cricket::AudioCodec> callee_fake_codecs;
+ callee_fake_codecs.push_back(cricket::CreateAudioCodec(120, "foo", 0, 1));
+ callee_fake_codecs.push_back(
+ cricket::CreateAudioCodec(121, cricket::kRedCodecName, 0, 1));
+ callee_fake_codecs.push_back(cricket::CreateAudioCodec(122, "bar", 0, 1));
+ callee_fake_codecs.back().SetParam(cricket::kCodecParamNotInNameValueFormat,
+ "122/122");
+ auto callee_fake_engine = std::make_unique<FakeMediaEngine>();
+ callee_fake_engine->SetAudioCodecs(callee_fake_codecs);
+ auto callee = CreatePeerConnectionWithAudio(std::move(callee_fake_engine));
+
+ // Offer from the caller establishes 100 as the "foo" payload type.
+ // It should not negotiate RED since RED is associated with foo, not bar.
+ auto offer = caller->CreateOfferAndSetAsLocal();
+ callee->SetRemoteDescription(std::move(offer));
+ auto answer = callee->CreateAnswerAndSetAsLocal();
+ auto answer_description =
+ cricket::GetFirstAudioContentDescription(answer->description());
+ ASSERT_EQ(1u, answer_description->codecs().size());
+}
+
+// Test behaviour when the RED fmtp attempts to specify different codecs
+// which is not supported.
+TEST_P(PeerConnectionMediaTest, RedFmtpPayloadMixed) {
+ std::vector<cricket::AudioCodec> caller_fake_codecs;
+ caller_fake_codecs.push_back(cricket::CreateAudioCodec(100, "foo", 0, 1));
+ caller_fake_codecs.push_back(cricket::CreateAudioCodec(102, "bar", 0, 1));
+ caller_fake_codecs.push_back(
+ cricket::CreateAudioCodec(101, cricket::kRedCodecName, 0, 1));
+ caller_fake_codecs.back().SetParam(cricket::kCodecParamNotInNameValueFormat,
+ "100/102");
+ auto caller_fake_engine = std::make_unique<FakeMediaEngine>();
+ caller_fake_engine->SetAudioCodecs(caller_fake_codecs);
+ auto caller = CreatePeerConnectionWithAudio(std::move(caller_fake_engine));
+
+ std::vector<cricket::AudioCodec> callee_fake_codecs;
+ callee_fake_codecs.push_back(cricket::CreateAudioCodec(120, "foo", 0, 1));
+ callee_fake_codecs.push_back(
+ cricket::CreateAudioCodec(121, cricket::kRedCodecName, 0, 1));
+ callee_fake_codecs.back().SetParam(cricket::kCodecParamNotInNameValueFormat,
+ "120/120");
+ auto callee_fake_engine = std::make_unique<FakeMediaEngine>();
+ callee_fake_engine->SetAudioCodecs(callee_fake_codecs);
+ auto callee = CreatePeerConnectionWithAudio(std::move(callee_fake_engine));
+
+ // Offer from the caller establishes 100 as the "foo" payload type.
+ auto offer = caller->CreateOfferAndSetAsLocal();
+ callee->SetRemoteDescription(std::move(offer));
+ auto answer = callee->CreateAnswerAndSetAsLocal();
+ auto answer_description =
+ cricket::GetFirstAudioContentDescription(answer->description());
+ // RED is not negotiated.
+ ASSERT_EQ(1u, answer_description->codecs().size());
+}
+
+// Test behaviour when the RED fmtp attempts to negotiate different levels of
+// redundancy.
+TEST_P(PeerConnectionMediaTest, RedFmtpPayloadDifferentRedundancy) {
+ std::vector<cricket::AudioCodec> caller_fake_codecs;
+ caller_fake_codecs.push_back(cricket::CreateAudioCodec(100, "foo", 0, 1));
+ caller_fake_codecs.push_back(
+ cricket::CreateAudioCodec(101, cricket::kRedCodecName, 0, 1));
+ caller_fake_codecs.back().SetParam(cricket::kCodecParamNotInNameValueFormat,
+ "100/100");
+ auto caller_fake_engine = std::make_unique<FakeMediaEngine>();
+ caller_fake_engine->SetAudioCodecs(caller_fake_codecs);
+ auto caller = CreatePeerConnectionWithAudio(std::move(caller_fake_engine));
+
+ std::vector<cricket::AudioCodec> callee_fake_codecs;
+ callee_fake_codecs.push_back(cricket::CreateAudioCodec(120, "foo", 0, 1));
+ callee_fake_codecs.push_back(
+ cricket::CreateAudioCodec(121, cricket::kRedCodecName, 0, 1));
+ callee_fake_codecs.back().SetParam(cricket::kCodecParamNotInNameValueFormat,
+ "120/120/120");
+ auto callee_fake_engine = std::make_unique<FakeMediaEngine>();
+ callee_fake_engine->SetAudioCodecs(callee_fake_codecs);
+ auto callee = CreatePeerConnectionWithAudio(std::move(callee_fake_engine));
+
+ // Offer from the caller establishes 100 as the "foo" payload type.
+ auto offer = caller->CreateOfferAndSetAsLocal();
+ callee->SetRemoteDescription(std::move(offer));
+ auto answer = callee->CreateAnswerAndSetAsLocal();
+ auto answer_description =
+ cricket::GetFirstAudioContentDescription(answer->description());
+ // RED is negotiated.
+ ASSERT_EQ(2u, answer_description->codecs().size());
+
+ // Offer from the callee should respect the established payload type, and
+ // attempt to add RED, which should refer to the correct payload type.
+ offer = callee->CreateOfferAndSetAsLocal();
+ auto* offer_description =
+ cricket::GetFirstAudioContentDescription(offer->description());
+ ASSERT_EQ(2u, offer_description->codecs().size());
+ for (const auto& codec : offer_description->codecs()) {
+ if (codec.name == "foo") {
+ ASSERT_EQ(100, codec.id);
+ } else if (codec.name == cricket::kRedCodecName) {
+ std::string fmtp;
+ ASSERT_TRUE(
+ codec.GetParam(cricket::kCodecParamNotInNameValueFormat, &fmtp));
+ EXPECT_EQ("100/100", fmtp);
+ }
+ }
+}
+
+template <typename C>
+bool CompareCodecs(const std::vector<webrtc::RtpCodecCapability>& capabilities,
+ const std::vector<C>& codecs) {
+ bool capability_has_rtx =
+ absl::c_any_of(capabilities, [](const webrtc::RtpCodecCapability& codec) {
+ return codec.name == cricket::kRtxCodecName;
+ });
+ bool codecs_has_rtx = absl::c_any_of(codecs, [](const C& codec) {
+ return codec.name == cricket::kRtxCodecName;
+ });
+
+ std::vector<C> codecs_no_rtx;
+ absl::c_copy_if(
+ codecs, std::back_inserter(codecs_no_rtx),
+ [](const C& codec) { return codec.name != cricket::kRtxCodecName; });
+
+ std::vector<webrtc::RtpCodecCapability> capabilities_no_rtx;
+ absl::c_copy_if(capabilities, std::back_inserter(capabilities_no_rtx),
+ [](const webrtc::RtpCodecCapability& codec) {
+ return codec.name != cricket::kRtxCodecName;
+ });
+
+ return capability_has_rtx == codecs_has_rtx &&
+ absl::c_equal(
+ capabilities_no_rtx, codecs_no_rtx,
+ [](const webrtc::RtpCodecCapability& capability, const C& codec) {
+ return codec.MatchesRtpCodec(capability);
+ });
+}
+
+TEST_F(PeerConnectionMediaTestUnifiedPlan,
+ SetCodecPreferencesAudioMissingRecvCodec) {
+ auto fake_engine = std::make_unique<FakeMediaEngine>();
+ auto send_codecs = fake_engine->voice().send_codecs();
+ send_codecs.push_back(cricket::CreateAudioCodec(send_codecs.back().id + 1,
+ "send_only_codec", 0, 1));
+ fake_engine->SetAudioSendCodecs(send_codecs);
+
+ auto caller = CreatePeerConnectionWithAudio(std::move(fake_engine));
+
+ auto transceiver = caller->pc()->GetTransceivers().front();
+ auto capabilities = caller->pc_factory()->GetRtpSenderCapabilities(
+ cricket::MediaType::MEDIA_TYPE_AUDIO);
+
+ std::vector<webrtc::RtpCodecCapability> codecs;
+ absl::c_copy_if(capabilities.codecs, std::back_inserter(codecs),
+ [](const webrtc::RtpCodecCapability& codec) {
+ return codec.name.find("_only_") != std::string::npos;
+ });
+
+ auto result = transceiver->SetCodecPreferences(codecs);
+ EXPECT_EQ(RTCErrorType::INVALID_MODIFICATION, result.type());
+}
+
+TEST_F(PeerConnectionMediaTestUnifiedPlan,
+ SetCodecPreferencesAudioMissingSendCodec) {
+ auto fake_engine = std::make_unique<FakeMediaEngine>();
+ auto recv_codecs = fake_engine->voice().recv_codecs();
+ recv_codecs.push_back(cricket::CreateAudioCodec(recv_codecs.back().id + 1,
+ "recv_only_codec", 0, 1));
+ fake_engine->SetAudioRecvCodecs(recv_codecs);
+ auto caller = CreatePeerConnectionWithAudio(std::move(fake_engine));
+
+ auto transceiver = caller->pc()->GetTransceivers().front();
+ auto capabilities = caller->pc_factory()->GetRtpReceiverCapabilities(
+ cricket::MediaType::MEDIA_TYPE_AUDIO);
+
+ std::vector<webrtc::RtpCodecCapability> codecs;
+ absl::c_copy_if(capabilities.codecs, std::back_inserter(codecs),
+ [](const webrtc::RtpCodecCapability& codec) {
+ return codec.name.find("_only_") != std::string::npos;
+ });
+
+ auto result = transceiver->SetCodecPreferences(codecs);
+ EXPECT_EQ(RTCErrorType::INVALID_MODIFICATION, result.type());
+}
+
+TEST_F(PeerConnectionMediaTestUnifiedPlan,
+ SetCodecPreferencesAudioRejectsVideoCodec) {
+ auto caller = CreatePeerConnectionWithAudio();
+
+ auto transceiver = caller->pc()->GetTransceivers().front();
+ auto video_codecs =
+ caller->pc_factory()
+ ->GetRtpSenderCapabilities(cricket::MediaType::MEDIA_TYPE_VIDEO)
+ .codecs;
+ auto codecs =
+ caller->pc_factory()
+ ->GetRtpSenderCapabilities(cricket::MediaType::MEDIA_TYPE_AUDIO)
+ .codecs;
+ codecs.insert(codecs.end(), video_codecs.begin(), video_codecs.end());
+ auto result = transceiver->SetCodecPreferences(codecs);
+ EXPECT_EQ(RTCErrorType::INVALID_MODIFICATION, result.type());
+}
+
+TEST_F(PeerConnectionMediaTestUnifiedPlan,
+ SetCodecPreferencesAudioRejectsOnlyRtxRedFec) {
+ auto fake_engine = std::make_unique<FakeMediaEngine>();
+ auto audio_codecs = fake_engine->voice().send_codecs();
+ audio_codecs.push_back(cricket::CreateAudioRtxCodec(
+ audio_codecs.back().id + 1, audio_codecs.back().id));
+ audio_codecs.push_back(cricket::CreateAudioCodec(
+ audio_codecs.back().id + 1, cricket::kRedCodecName, 0, 1));
+ audio_codecs.push_back(cricket::CreateAudioCodec(
+ audio_codecs.back().id + 1, cricket::kUlpfecCodecName, 0, 1));
+ fake_engine->SetAudioCodecs(audio_codecs);
+
+ auto caller = CreatePeerConnectionWithAudio(std::move(fake_engine));
+
+ auto transceiver = caller->pc()->GetTransceivers().front();
+ auto codecs =
+ caller->pc_factory()
+ ->GetRtpSenderCapabilities(cricket::MediaType::MEDIA_TYPE_AUDIO)
+ .codecs;
+ auto codecs_only_rtx_red_fec = codecs;
+ auto it = std::remove_if(codecs_only_rtx_red_fec.begin(),
+ codecs_only_rtx_red_fec.end(),
+ [](const webrtc::RtpCodecCapability& codec) {
+ return !(codec.name == cricket::kRtxCodecName ||
+ codec.name == cricket::kRedCodecName ||
+ codec.name == cricket::kUlpfecCodecName);
+ });
+ codecs_only_rtx_red_fec.erase(it, codecs_only_rtx_red_fec.end());
+
+ auto result = transceiver->SetCodecPreferences(codecs_only_rtx_red_fec);
+ EXPECT_EQ(RTCErrorType::INVALID_MODIFICATION, result.type());
+}
+
+TEST_F(PeerConnectionMediaTestUnifiedPlan, SetCodecPreferencesAllAudioCodecs) {
+ auto caller = CreatePeerConnectionWithAudio();
+
+ auto sender_audio_codecs =
+ caller->pc_factory()
+ ->GetRtpSenderCapabilities(cricket::MEDIA_TYPE_AUDIO)
+ .codecs;
+
+ auto audio_transceiver = caller->pc()->GetTransceivers().front();
+
+ // Normal case, set all capabilities as preferences
+ EXPECT_TRUE(audio_transceiver->SetCodecPreferences(sender_audio_codecs).ok());
+ auto offer = caller->CreateOffer();
+ auto codecs = offer->description()
+ ->contents()[0]
+ .media_description()
+ ->as_audio()
+ ->codecs();
+ EXPECT_TRUE(CompareCodecs(sender_audio_codecs, codecs));
+}
+
+TEST_F(PeerConnectionMediaTestUnifiedPlan,
+ SetCodecPreferencesResetAudioCodecs) {
+ auto caller = CreatePeerConnectionWithAudio();
+
+ auto sender_audio_codecs =
+ caller->pc_factory()
+ ->GetRtpSenderCapabilities(cricket::MEDIA_TYPE_AUDIO)
+ .codecs;
+ std::vector<webrtc::RtpCodecCapability> empty_codecs = {};
+
+ auto audio_transceiver = caller->pc()->GetTransceivers().front();
+
+ // Normal case, reset codec preferences
+ EXPECT_TRUE(audio_transceiver->SetCodecPreferences(empty_codecs).ok());
+ auto offer = caller->CreateOffer();
+ auto codecs = offer->description()
+ ->contents()[0]
+ .media_description()
+ ->as_audio()
+ ->codecs();
+ EXPECT_TRUE(CompareCodecs(sender_audio_codecs, codecs));
+}
+
+TEST_F(PeerConnectionMediaTestUnifiedPlan,
+ SetCodecPreferencesVideoRejectsAudioCodec) {
+ auto caller = CreatePeerConnectionWithVideo();
+
+ auto transceiver = caller->pc()->GetTransceivers().front();
+ auto audio_codecs =
+ caller->pc_factory()
+ ->GetRtpSenderCapabilities(cricket::MediaType::MEDIA_TYPE_AUDIO)
+ .codecs;
+ auto codecs =
+ caller->pc_factory()
+ ->GetRtpSenderCapabilities(cricket::MediaType::MEDIA_TYPE_VIDEO)
+ .codecs;
+ codecs.insert(codecs.end(), audio_codecs.begin(), audio_codecs.end());
+ auto result = transceiver->SetCodecPreferences(codecs);
+ EXPECT_EQ(RTCErrorType::INVALID_MODIFICATION, result.type());
+}
+
+TEST_F(PeerConnectionMediaTestUnifiedPlan,
+ SetCodecPreferencesVideoRejectsOnlyRtxRedFec) {
+ auto fake_engine = std::make_unique<FakeMediaEngine>();
+ auto video_codecs = fake_engine->video().send_codecs();
+ video_codecs.push_back(cricket::CreateVideoRtxCodec(
+ video_codecs.back().id + 1, video_codecs.back().id));
+ video_codecs.push_back(cricket::CreateVideoCodec(video_codecs.back().id + 1,
+ cricket::kRedCodecName));
+ video_codecs.push_back(cricket::CreateVideoCodec(video_codecs.back().id + 1,
+ cricket::kUlpfecCodecName));
+ fake_engine->SetVideoCodecs(video_codecs);
+
+ auto caller = CreatePeerConnectionWithVideo(std::move(fake_engine));
+
+ auto transceiver = caller->pc()->GetTransceivers().front();
+ auto codecs =
+ caller->pc_factory()
+ ->GetRtpSenderCapabilities(cricket::MediaType::MEDIA_TYPE_VIDEO)
+ .codecs;
+ auto codecs_only_rtx_red_fec = codecs;
+ auto it = std::remove_if(codecs_only_rtx_red_fec.begin(),
+ codecs_only_rtx_red_fec.end(),
+ [](const webrtc::RtpCodecCapability& codec) {
+ return !(codec.name == cricket::kRtxCodecName ||
+ codec.name == cricket::kRedCodecName ||
+ codec.name == cricket::kUlpfecCodecName);
+ });
+ codecs_only_rtx_red_fec.erase(it, codecs_only_rtx_red_fec.end());
+
+ auto result = transceiver->SetCodecPreferences(codecs_only_rtx_red_fec);
+ EXPECT_EQ(RTCErrorType::INVALID_MODIFICATION, result.type());
+}
+
+TEST_F(PeerConnectionMediaTestUnifiedPlan, SetCodecPreferencesAllVideoCodecs) {
+ auto caller = CreatePeerConnectionWithVideo();
+
+ auto sender_video_codecs =
+ caller->pc_factory()
+ ->GetRtpSenderCapabilities(cricket::MEDIA_TYPE_VIDEO)
+ .codecs;
+
+ auto video_transceiver = caller->pc()->GetTransceivers().front();
+
+ // Normal case, setting preferences to normal capabilities
+ EXPECT_TRUE(video_transceiver->SetCodecPreferences(sender_video_codecs).ok());
+ auto offer = caller->CreateOffer();
+ auto codecs = offer->description()
+ ->contents()[0]
+ .media_description()
+ ->as_video()
+ ->codecs();
+ EXPECT_TRUE(CompareCodecs(sender_video_codecs, codecs));
+}
+
+TEST_F(PeerConnectionMediaTestUnifiedPlan,
+ SetCodecPreferencesResetVideoCodecs) {
+ auto caller = CreatePeerConnectionWithVideo();
+
+ auto sender_video_codecs =
+ caller->pc_factory()
+ ->GetRtpSenderCapabilities(cricket::MEDIA_TYPE_VIDEO)
+ .codecs;
+
+ std::vector<webrtc::RtpCodecCapability> empty_codecs = {};
+
+ auto video_transceiver = caller->pc()->GetTransceivers().front();
+
+ // Normal case, resetting preferences with empty list of codecs
+ EXPECT_TRUE(video_transceiver->SetCodecPreferences(empty_codecs).ok());
+ auto offer = caller->CreateOffer();
+ auto codecs = offer->description()
+ ->contents()[0]
+ .media_description()
+ ->as_video()
+ ->codecs();
+ EXPECT_TRUE(CompareCodecs(sender_video_codecs, codecs));
+}
+
+TEST_F(PeerConnectionMediaTestUnifiedPlan,
+ SetCodecPreferencesVideoCodecDuplicatesRemoved) {
+ auto caller = CreatePeerConnectionWithVideo();
+
+ auto sender_video_codecs =
+ caller->pc_factory()
+ ->GetRtpSenderCapabilities(cricket::MEDIA_TYPE_VIDEO)
+ .codecs;
+
+ auto video_transceiver = caller->pc()->GetTransceivers().front();
+
+ // Check duplicates are removed
+ auto single_codec = sender_video_codecs;
+ single_codec.resize(1);
+ auto duplicate_codec = single_codec;
+ duplicate_codec.push_back(duplicate_codec.front());
+ duplicate_codec.push_back(duplicate_codec.front());
+ duplicate_codec.push_back(duplicate_codec.front());
+
+ EXPECT_TRUE(video_transceiver->SetCodecPreferences(duplicate_codec).ok());
+ auto offer = caller->CreateOffer();
+ auto codecs = offer->description()
+ ->contents()[0]
+ .media_description()
+ ->as_video()
+ ->codecs();
+ EXPECT_TRUE(CompareCodecs(single_codec, codecs));
+}
+
+TEST_F(PeerConnectionMediaTestUnifiedPlan, SetCodecPreferencesVideoWithRtx) {
+ auto caller_fake_engine = std::make_unique<FakeMediaEngine>();
+ auto caller_video_codecs = caller_fake_engine->video().send_codecs();
+ caller_video_codecs.push_back(cricket::CreateVideoCodec(
+ caller_video_codecs.back().id + 1, cricket::kVp8CodecName));
+ caller_video_codecs.push_back(cricket::CreateVideoRtxCodec(
+ caller_video_codecs.back().id + 1, caller_video_codecs.back().id));
+ caller_video_codecs.push_back(cricket::CreateVideoCodec(
+ caller_video_codecs.back().id + 1, cricket::kVp9CodecName));
+ caller_video_codecs.push_back(cricket::CreateVideoRtxCodec(
+ caller_video_codecs.back().id + 1, caller_video_codecs.back().id));
+ caller_fake_engine->SetVideoCodecs(caller_video_codecs);
+
+ auto caller = CreatePeerConnectionWithVideo(std::move(caller_fake_engine));
+
+ auto sender_video_codecs =
+ caller->pc_factory()
+ ->GetRtpSenderCapabilities(cricket::MEDIA_TYPE_VIDEO)
+ .codecs;
+
+ auto video_transceiver = caller->pc()->GetTransceivers().front();
+
+ // Check that RTX codec is properly added
+ auto video_codecs_vpx_rtx = sender_video_codecs;
+ auto it =
+ std::remove_if(video_codecs_vpx_rtx.begin(), video_codecs_vpx_rtx.end(),
+ [](const webrtc::RtpCodecCapability& codec) {
+ return codec.name != cricket::kRtxCodecName &&
+ codec.name != cricket::kVp8CodecName &&
+ codec.name != cricket::kVp9CodecName;
+ });
+ video_codecs_vpx_rtx.erase(it, video_codecs_vpx_rtx.end());
+ absl::c_reverse(video_codecs_vpx_rtx);
+ EXPECT_EQ(video_codecs_vpx_rtx.size(), 3u); // VP8, VP9, RTX
+ EXPECT_TRUE(
+ video_transceiver->SetCodecPreferences(video_codecs_vpx_rtx).ok());
+ auto offer = caller->CreateOffer();
+ auto codecs = offer->description()
+ ->contents()[0]
+ .media_description()
+ ->as_video()
+ ->codecs();
+
+ EXPECT_TRUE(CompareCodecs(video_codecs_vpx_rtx, codecs));
+ EXPECT_EQ(codecs.size(), 4u);
+}
+
+TEST_F(PeerConnectionMediaTestUnifiedPlan,
+ SetCodecPreferencesVideoCodecsNegotiation) {
+ auto caller_fake_engine = std::make_unique<FakeMediaEngine>();
+ auto caller_video_codecs = caller_fake_engine->video().send_codecs();
+ caller_video_codecs.push_back(cricket::CreateVideoCodec(
+ caller_video_codecs.back().id + 1, cricket::kVp8CodecName));
+ caller_video_codecs.push_back(cricket::CreateVideoRtxCodec(
+ caller_video_codecs.back().id + 1, caller_video_codecs.back().id));
+ caller_video_codecs.push_back(cricket::CreateVideoCodec(
+ caller_video_codecs.back().id + 1, cricket::kVp9CodecName));
+ caller_video_codecs.push_back(cricket::CreateVideoRtxCodec(
+ caller_video_codecs.back().id + 1, caller_video_codecs.back().id));
+ caller_fake_engine->SetVideoCodecs(caller_video_codecs);
+
+ auto callee_fake_engine = std::make_unique<FakeMediaEngine>();
+ callee_fake_engine->SetVideoCodecs(caller_video_codecs);
+
+ auto caller = CreatePeerConnectionWithVideo(std::move(caller_fake_engine));
+ auto callee = CreatePeerConnection(std::move(callee_fake_engine));
+
+ auto video_codecs = caller->pc_factory()
+ ->GetRtpSenderCapabilities(cricket::MEDIA_TYPE_VIDEO)
+ .codecs;
+
+ auto send_transceiver = caller->pc()->GetTransceivers().front();
+
+ auto video_codecs_vpx = video_codecs;
+ auto it = std::remove_if(video_codecs_vpx.begin(), video_codecs_vpx.end(),
+ [](const webrtc::RtpCodecCapability& codec) {
+ return codec.name != cricket::kVp8CodecName &&
+ codec.name != cricket::kVp9CodecName;
+ });
+ video_codecs_vpx.erase(it, video_codecs_vpx.end());
+ EXPECT_EQ(video_codecs_vpx.size(), 2u); // VP8, VP9
+ EXPECT_TRUE(send_transceiver->SetCodecPreferences(video_codecs_vpx).ok());
+
+ auto offer = caller->CreateOfferAndSetAsLocal();
+ auto codecs = offer->description()
+ ->contents()[0]
+ .media_description()
+ ->as_video()
+ ->codecs();
+
+ EXPECT_EQ(codecs.size(), 2u); // VP8, VP9
+ EXPECT_TRUE(CompareCodecs(video_codecs_vpx, codecs));
+
+ callee->SetRemoteDescription(std::move(offer));
+
+ auto recv_transceiver = callee->pc()->GetTransceivers().front();
+ auto video_codecs_vp8_rtx = video_codecs;
+ it = std::remove_if(video_codecs_vp8_rtx.begin(), video_codecs_vp8_rtx.end(),
+ [](const webrtc::RtpCodecCapability& codec) {
+ bool r = codec.name != cricket::kVp8CodecName &&
+ codec.name != cricket::kRtxCodecName;
+ return r;
+ });
+ video_codecs_vp8_rtx.erase(it, video_codecs_vp8_rtx.end());
+ EXPECT_EQ(video_codecs_vp8_rtx.size(), 2u); // VP8, RTX
+ recv_transceiver->SetCodecPreferences(video_codecs_vp8_rtx);
+
+ auto answer = callee->CreateAnswerAndSetAsLocal();
+
+ auto recv_codecs = answer->description()
+ ->contents()[0]
+ .media_description()
+ ->as_video()
+ ->codecs();
+ EXPECT_EQ(recv_codecs.size(), 1u); // VP8
+}
+
+TEST_F(PeerConnectionMediaTestUnifiedPlan,
+ SetCodecPreferencesVideoCodecsNegotiationReverseOrder) {
+ auto caller_fake_engine = std::make_unique<FakeMediaEngine>();
+ auto caller_video_codecs = caller_fake_engine->video().send_codecs();
+ caller_video_codecs.push_back(cricket::CreateVideoCodec(
+ caller_video_codecs.back().id + 1, cricket::kVp8CodecName));
+ caller_video_codecs.push_back(cricket::CreateVideoRtxCodec(
+ caller_video_codecs.back().id + 1, caller_video_codecs.back().id));
+ caller_video_codecs.push_back(cricket::CreateVideoCodec(
+ caller_video_codecs.back().id + 1, cricket::kVp9CodecName));
+ caller_video_codecs.push_back(cricket::CreateVideoRtxCodec(
+ caller_video_codecs.back().id + 1, caller_video_codecs.back().id));
+ caller_fake_engine->SetVideoCodecs(caller_video_codecs);
+
+ auto callee_fake_engine = std::make_unique<FakeMediaEngine>();
+ callee_fake_engine->SetVideoCodecs(caller_video_codecs);
+
+ auto caller = CreatePeerConnectionWithVideo(std::move(caller_fake_engine));
+ auto callee = CreatePeerConnection(std::move(callee_fake_engine));
+
+ auto video_codecs = caller->pc_factory()
+ ->GetRtpSenderCapabilities(cricket::MEDIA_TYPE_VIDEO)
+ .codecs;
+
+ auto send_transceiver = caller->pc()->GetTransceivers().front();
+
+ auto video_codecs_vpx = video_codecs;
+ auto it = std::remove_if(video_codecs_vpx.begin(), video_codecs_vpx.end(),
+ [](const webrtc::RtpCodecCapability& codec) {
+ return codec.name != cricket::kVp8CodecName &&
+ codec.name != cricket::kVp9CodecName;
+ });
+ video_codecs_vpx.erase(it, video_codecs_vpx.end());
+ EXPECT_EQ(video_codecs_vpx.size(), 2u); // VP8, VP9
+ EXPECT_TRUE(send_transceiver->SetCodecPreferences(video_codecs_vpx).ok());
+
+ auto video_codecs_vpx_reverse = video_codecs_vpx;
+ absl::c_reverse(video_codecs_vpx_reverse);
+
+ auto offer = caller->CreateOfferAndSetAsLocal();
+ auto codecs = offer->description()
+ ->contents()[0]
+ .media_description()
+ ->as_video()
+ ->codecs();
+ EXPECT_EQ(codecs.size(), 2u); // VP9, VP8
+ EXPECT_TRUE(CompareCodecs(video_codecs_vpx, codecs));
+
+ callee->SetRemoteDescription(std::move(offer));
+
+ auto recv_transceiver = callee->pc()->GetTransceivers().front();
+ recv_transceiver->SetCodecPreferences(video_codecs_vpx_reverse);
+
+ auto answer = callee->CreateAnswerAndSetAsLocal();
+
+ auto recv_codecs = answer->description()
+ ->contents()[0]
+ .media_description()
+ ->as_video()
+ ->codecs();
+
+ EXPECT_TRUE(CompareCodecs(video_codecs_vpx_reverse, recv_codecs));
+}
+
+TEST_F(PeerConnectionMediaTestUnifiedPlan,
+ SetCodecPreferencesVoiceActivityDetection) {
+ auto fake_engine = std::make_unique<FakeMediaEngine>();
+ AddComfortNoiseCodecsToSend(fake_engine.get());
+ auto caller = CreatePeerConnectionWithAudio(std::move(fake_engine));
+
+ RTCOfferAnswerOptions options;
+ auto offer = caller->CreateOffer(options);
+ EXPECT_TRUE(HasAnyComfortNoiseCodecs(offer->description()));
+
+ auto transceiver = caller->pc()->GetTransceivers().front();
+ auto capabilities = caller->pc_factory()->GetRtpSenderCapabilities(
+ cricket::MediaType::MEDIA_TYPE_AUDIO);
+ EXPECT_TRUE(transceiver->SetCodecPreferences(capabilities.codecs).ok());
+
+ options.voice_activity_detection = false;
+ offer = caller->CreateOffer(options);
+ EXPECT_FALSE(HasAnyComfortNoiseCodecs(offer->description()));
+}
+
+// If the "default" payload types of audio/video codecs are the same, and
+// audio/video are bundled (as is the default), payload types should be
+// remapped to avoid conflict, as normally happens without using
+// SetCodecPreferences.
+TEST_F(PeerConnectionMediaTestUnifiedPlan,
+ SetCodecPreferencesAvoidsPayloadTypeConflictInOffer) {
+ auto fake_engine = std::make_unique<cricket::FakeMediaEngine>();
+
+ std::vector<cricket::AudioCodec> audio_codecs;
+ audio_codecs.emplace_back(cricket::CreateAudioCodec(100, "foo", 0, 1));
+ audio_codecs.emplace_back(cricket::CreateAudioRtxCodec(101, 100));
+ fake_engine->SetAudioCodecs(audio_codecs);
+
+ std::vector<cricket::VideoCodec> video_codecs;
+ video_codecs.emplace_back(cricket::CreateVideoCodec(100, "bar"));
+ video_codecs.emplace_back(cricket::CreateVideoRtxCodec(101, 100));
+ fake_engine->SetVideoCodecs(video_codecs);
+
+ auto caller = CreatePeerConnectionWithAudioVideo(std::move(fake_engine));
+ auto transceivers = caller->pc()->GetTransceivers();
+ ASSERT_EQ(2u, transceivers.size());
+
+ auto audio_transceiver = caller->pc()->GetTransceivers()[0];
+ auto capabilities = caller->pc_factory()->GetRtpSenderCapabilities(
+ cricket::MediaType::MEDIA_TYPE_AUDIO);
+ EXPECT_TRUE(audio_transceiver->SetCodecPreferences(capabilities.codecs).ok());
+
+ auto video_transceiver = caller->pc()->GetTransceivers()[1];
+ capabilities = caller->pc_factory()->GetRtpSenderCapabilities(
+ cricket::MediaType::MEDIA_TYPE_VIDEO);
+ EXPECT_TRUE(video_transceiver->SetCodecPreferences(capabilities.codecs).ok());
+
+ RTCOfferAnswerOptions options;
+ auto offer = caller->CreateOffer(options);
+ EXPECT_FALSE(HasPayloadTypeConflict(offer->description()));
+ // Sanity check that we got the primary codec and RTX.
+ EXPECT_EQ(2u, cricket::GetFirstAudioContentDescription(offer->description())
+ ->codecs()
+ .size());
+ EXPECT_EQ(2u, cricket::GetFirstVideoContentDescription(offer->description())
+ ->codecs()
+ .size());
+}
+
+// Same as above, but preferences set for the answer.
+TEST_F(PeerConnectionMediaTestUnifiedPlan,
+ SetCodecPreferencesAvoidsPayloadTypeConflictInAnswer) {
+ auto fake_engine = std::make_unique<cricket::FakeMediaEngine>();
+
+ std::vector<cricket::AudioCodec> audio_codecs;
+ audio_codecs.emplace_back(cricket::CreateAudioCodec(100, "foo", 0, 1));
+ audio_codecs.emplace_back(cricket::CreateAudioRtxCodec(101, 100));
+ fake_engine->SetAudioCodecs(audio_codecs);
+
+ std::vector<cricket::VideoCodec> video_codecs;
+ video_codecs.emplace_back(cricket::CreateVideoCodec(100, "bar"));
+ video_codecs.emplace_back(cricket::CreateVideoRtxCodec(101, 100));
+ fake_engine->SetVideoCodecs(video_codecs);
+
+ auto caller = CreatePeerConnectionWithAudioVideo(std::move(fake_engine));
+
+ RTCOfferAnswerOptions options;
+ caller->SetRemoteDescription(caller->CreateOffer(options));
+
+ auto transceivers = caller->pc()->GetTransceivers();
+ ASSERT_EQ(2u, transceivers.size());
+
+ auto audio_transceiver = caller->pc()->GetTransceivers()[0];
+ auto capabilities = caller->pc_factory()->GetRtpSenderCapabilities(
+ cricket::MediaType::MEDIA_TYPE_AUDIO);
+ EXPECT_TRUE(audio_transceiver->SetCodecPreferences(capabilities.codecs).ok());
+
+ auto video_transceiver = caller->pc()->GetTransceivers()[1];
+ capabilities = caller->pc_factory()->GetRtpSenderCapabilities(
+ cricket::MediaType::MEDIA_TYPE_VIDEO);
+ EXPECT_TRUE(video_transceiver->SetCodecPreferences(capabilities.codecs).ok());
+
+ auto answer = caller->CreateAnswer(options);
+
+ EXPECT_FALSE(HasPayloadTypeConflict(answer->description()));
+ // Sanity check that we got the primary codec and RTX.
+ EXPECT_EQ(2u, cricket::GetFirstAudioContentDescription(answer->description())
+ ->codecs()
+ .size());
+ EXPECT_EQ(2u, cricket::GetFirstVideoContentDescription(answer->description())
+ ->codecs()
+ .size());
+}
+
+// Same as above, but preferences set for a subsequent offer.
+TEST_F(PeerConnectionMediaTestUnifiedPlan,
+ SetCodecPreferencesAvoidsPayloadTypeConflictInSubsequentOffer) {
+ auto fake_engine = std::make_unique<cricket::FakeMediaEngine>();
+
+ std::vector<cricket::AudioCodec> audio_codecs;
+ audio_codecs.emplace_back(cricket::CreateAudioCodec(100, "foo", 0, 1));
+ audio_codecs.emplace_back(cricket::CreateAudioRtxCodec(101, 100));
+ fake_engine->SetAudioCodecs(audio_codecs);
+
+ std::vector<cricket::VideoCodec> video_codecs;
+ video_codecs.emplace_back(cricket::CreateVideoCodec(100, "bar"));
+ video_codecs.emplace_back(cricket::CreateVideoRtxCodec(101, 100));
+ fake_engine->SetVideoCodecs(video_codecs);
+
+ auto caller = CreatePeerConnectionWithAudioVideo(std::move(fake_engine));
+
+ RTCOfferAnswerOptions options;
+ caller->SetRemoteDescription(caller->CreateOffer(options));
+ caller->SetLocalDescription(caller->CreateAnswer(options));
+
+ auto transceivers = caller->pc()->GetTransceivers();
+ ASSERT_EQ(2u, transceivers.size());
+
+ auto audio_transceiver = caller->pc()->GetTransceivers()[0];
+ auto capabilities = caller->pc_factory()->GetRtpSenderCapabilities(
+ cricket::MediaType::MEDIA_TYPE_AUDIO);
+ EXPECT_TRUE(audio_transceiver->SetCodecPreferences(capabilities.codecs).ok());
+
+ auto video_transceiver = caller->pc()->GetTransceivers()[1];
+ capabilities = caller->pc_factory()->GetRtpSenderCapabilities(
+ cricket::MediaType::MEDIA_TYPE_VIDEO);
+ EXPECT_TRUE(video_transceiver->SetCodecPreferences(capabilities.codecs).ok());
+
+ auto reoffer = caller->CreateOffer(options);
+ ASSERT_THAT(reoffer, NotNull());
+
+ EXPECT_FALSE(HasPayloadTypeConflict(reoffer->description()));
+ // Sanity check that we got the primary codec and RTX.
+ EXPECT_EQ(2u, cricket::GetFirstAudioContentDescription(reoffer->description())
+ ->codecs()
+ .size());
+ EXPECT_EQ(2u, cricket::GetFirstVideoContentDescription(reoffer->description())
+ ->codecs()
+ .size());
+}
+
+INSTANTIATE_TEST_SUITE_P(PeerConnectionMediaTest,
+ PeerConnectionMediaTest,
+ Values(SdpSemantics::kPlanB_DEPRECATED,
+ SdpSemantics::kUnifiedPlan));
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/peer_connection_message_handler.cc b/third_party/libwebrtc/pc/peer_connection_message_handler.cc
new file mode 100644
index 0000000000..8ddeddea58
--- /dev/null
+++ b/third_party/libwebrtc/pc/peer_connection_message_handler.cc
@@ -0,0 +1,85 @@
+/*
+ * Copyright 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/peer_connection_message_handler.h"
+
+#include <utility>
+
+#include "api/jsep.h"
+#include "api/legacy_stats_types.h"
+#include "api/media_stream_interface.h"
+#include "api/peer_connection_interface.h"
+#include "api/scoped_refptr.h"
+#include "api/sequence_checker.h"
+#include "api/task_queue/pending_task_safety_flag.h"
+#include "pc/legacy_stats_collector_interface.h"
+#include "rtc_base/checks.h"
+
+namespace webrtc {
+namespace {
+
+template <typename T>
+rtc::scoped_refptr<T> WrapScoped(T* ptr) {
+ return rtc::scoped_refptr<T>(ptr);
+}
+
+} // namespace
+
+void PeerConnectionMessageHandler::PostSetSessionDescriptionSuccess(
+ SetSessionDescriptionObserver* observer) {
+ signaling_thread_->PostTask(
+ SafeTask(safety_.flag(),
+ [observer = WrapScoped(observer)] { observer->OnSuccess(); }));
+}
+
+void PeerConnectionMessageHandler::PostSetSessionDescriptionFailure(
+ SetSessionDescriptionObserver* observer,
+ RTCError&& error) {
+ RTC_DCHECK(!error.ok());
+ signaling_thread_->PostTask(SafeTask(
+ safety_.flag(),
+ [observer = WrapScoped(observer), error = std::move(error)]() mutable {
+ observer->OnFailure(std::move(error));
+ }));
+}
+
+void PeerConnectionMessageHandler::PostCreateSessionDescriptionFailure(
+ CreateSessionDescriptionObserver* observer,
+ RTCError error) {
+ RTC_DCHECK(!error.ok());
+ // Do not protect this task with the safety_.flag() to ensure
+ // observer is invoked even if the PeerConnection is destroyed early.
+ signaling_thread_->PostTask(
+ [observer = WrapScoped(observer), error = std::move(error)]() mutable {
+ observer->OnFailure(std::move(error));
+ });
+}
+
+void PeerConnectionMessageHandler::PostGetStats(
+ StatsObserver* observer,
+ LegacyStatsCollectorInterface* legacy_stats,
+ MediaStreamTrackInterface* track) {
+ signaling_thread_->PostTask(
+ SafeTask(safety_.flag(), [observer = WrapScoped(observer), legacy_stats,
+ track = WrapScoped(track)] {
+ StatsReports reports;
+ legacy_stats->GetStats(track.get(), &reports);
+ observer->OnComplete(reports);
+ }));
+}
+
+void PeerConnectionMessageHandler::RequestUsagePatternReport(
+ std::function<void()> func,
+ int delay_ms) {
+ signaling_thread_->PostDelayedTask(SafeTask(safety_.flag(), std::move(func)),
+ TimeDelta::Millis(delay_ms));
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/peer_connection_message_handler.h b/third_party/libwebrtc/pc/peer_connection_message_handler.h
new file mode 100644
index 0000000000..8bd0e5ebb1
--- /dev/null
+++ b/third_party/libwebrtc/pc/peer_connection_message_handler.h
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_PEER_CONNECTION_MESSAGE_HANDLER_H_
+#define PC_PEER_CONNECTION_MESSAGE_HANDLER_H_
+
+#include <functional>
+
+#include "api/jsep.h"
+#include "api/legacy_stats_types.h"
+#include "api/media_stream_interface.h"
+#include "api/peer_connection_interface.h"
+#include "api/rtc_error.h"
+#include "api/task_queue/pending_task_safety_flag.h"
+#include "api/task_queue/task_queue_base.h"
+#include "pc/legacy_stats_collector_interface.h"
+
+namespace webrtc {
+
+class PeerConnectionMessageHandler {
+ public:
+ explicit PeerConnectionMessageHandler(rtc::Thread* signaling_thread)
+ : signaling_thread_(signaling_thread) {}
+ ~PeerConnectionMessageHandler() = default;
+
+ void PostSetSessionDescriptionSuccess(
+ SetSessionDescriptionObserver* observer);
+ void PostSetSessionDescriptionFailure(SetSessionDescriptionObserver* observer,
+ RTCError&& error);
+ void PostCreateSessionDescriptionFailure(
+ CreateSessionDescriptionObserver* observer,
+ RTCError error);
+ void PostGetStats(StatsObserver* observer,
+ LegacyStatsCollectorInterface* legacy_stats,
+ MediaStreamTrackInterface* track);
+ void RequestUsagePatternReport(std::function<void()>, int delay_ms);
+
+ private:
+ ScopedTaskSafety safety_;
+ TaskQueueBase* const signaling_thread_;
+};
+
+} // namespace webrtc
+
+#endif // PC_PEER_CONNECTION_MESSAGE_HANDLER_H_
diff --git a/third_party/libwebrtc/pc/peer_connection_proxy.h b/third_party/libwebrtc/pc/peer_connection_proxy.h
new file mode 100644
index 0000000000..6db27f2dd5
--- /dev/null
+++ b/third_party/libwebrtc/pc/peer_connection_proxy.h
@@ -0,0 +1,170 @@
+/*
+ * Copyright 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_PEER_CONNECTION_PROXY_H_
+#define PC_PEER_CONNECTION_PROXY_H_
+
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "api/peer_connection_interface.h"
+#include "pc/proxy.h"
+
+namespace webrtc {
+
+// PeerConnection proxy objects will be constructed with two thread pointers,
+// signaling and network. The proxy macros don't have 'network' specific macros
+// and support for a secondary thread is provided via 'SECONDARY' macros.
+// TODO(deadbeef): Move this to .cc file. What threads methods are called on is
+// an implementation detail.
+BEGIN_PROXY_MAP(PeerConnection)
+PROXY_PRIMARY_THREAD_DESTRUCTOR()
+PROXY_METHOD0(rtc::scoped_refptr<StreamCollectionInterface>, local_streams)
+PROXY_METHOD0(rtc::scoped_refptr<StreamCollectionInterface>, remote_streams)
+PROXY_METHOD1(bool, AddStream, MediaStreamInterface*)
+PROXY_METHOD1(void, RemoveStream, MediaStreamInterface*)
+PROXY_METHOD2(RTCErrorOr<rtc::scoped_refptr<RtpSenderInterface>>,
+ AddTrack,
+ rtc::scoped_refptr<MediaStreamTrackInterface>,
+ const std::vector<std::string>&)
+PROXY_METHOD3(RTCErrorOr<rtc::scoped_refptr<RtpSenderInterface>>,
+ AddTrack,
+ rtc::scoped_refptr<MediaStreamTrackInterface>,
+ const std::vector<std::string>&,
+ const std::vector<RtpEncodingParameters>&)
+PROXY_METHOD1(RTCError,
+ RemoveTrackOrError,
+ rtc::scoped_refptr<RtpSenderInterface>)
+PROXY_METHOD1(RTCErrorOr<rtc::scoped_refptr<RtpTransceiverInterface>>,
+ AddTransceiver,
+ rtc::scoped_refptr<MediaStreamTrackInterface>)
+PROXY_METHOD2(RTCErrorOr<rtc::scoped_refptr<RtpTransceiverInterface>>,
+ AddTransceiver,
+ rtc::scoped_refptr<MediaStreamTrackInterface>,
+ const RtpTransceiverInit&)
+PROXY_METHOD1(RTCErrorOr<rtc::scoped_refptr<RtpTransceiverInterface>>,
+ AddTransceiver,
+ cricket::MediaType)
+PROXY_METHOD2(RTCErrorOr<rtc::scoped_refptr<RtpTransceiverInterface>>,
+ AddTransceiver,
+ cricket::MediaType,
+ const RtpTransceiverInit&)
+PROXY_METHOD2(rtc::scoped_refptr<RtpSenderInterface>,
+ CreateSender,
+ const std::string&,
+ const std::string&)
+PROXY_CONSTMETHOD0(std::vector<rtc::scoped_refptr<RtpSenderInterface>>,
+ GetSenders)
+PROXY_CONSTMETHOD0(std::vector<rtc::scoped_refptr<RtpReceiverInterface>>,
+ GetReceivers)
+PROXY_CONSTMETHOD0(std::vector<rtc::scoped_refptr<RtpTransceiverInterface>>,
+ GetTransceivers)
+PROXY_METHOD3(bool,
+ GetStats,
+ StatsObserver*,
+ MediaStreamTrackInterface*,
+ StatsOutputLevel)
+PROXY_METHOD1(void, GetStats, RTCStatsCollectorCallback*)
+PROXY_METHOD2(void,
+ GetStats,
+ rtc::scoped_refptr<RtpSenderInterface>,
+ rtc::scoped_refptr<RTCStatsCollectorCallback>)
+PROXY_METHOD2(void,
+ GetStats,
+ rtc::scoped_refptr<RtpReceiverInterface>,
+ rtc::scoped_refptr<RTCStatsCollectorCallback>)
+PROXY_METHOD0(void, ClearStatsCache)
+PROXY_METHOD2(RTCErrorOr<rtc::scoped_refptr<DataChannelInterface>>,
+ CreateDataChannelOrError,
+ const std::string&,
+ const DataChannelInit*)
+PROXY_CONSTMETHOD0(const SessionDescriptionInterface*, local_description)
+PROXY_CONSTMETHOD0(const SessionDescriptionInterface*, remote_description)
+PROXY_CONSTMETHOD0(const SessionDescriptionInterface*,
+ current_local_description)
+PROXY_CONSTMETHOD0(const SessionDescriptionInterface*,
+ current_remote_description)
+PROXY_CONSTMETHOD0(const SessionDescriptionInterface*,
+ pending_local_description)
+PROXY_CONSTMETHOD0(const SessionDescriptionInterface*,
+ pending_remote_description)
+PROXY_METHOD0(void, RestartIce)
+PROXY_METHOD2(void,
+ CreateOffer,
+ CreateSessionDescriptionObserver*,
+ const RTCOfferAnswerOptions&)
+PROXY_METHOD2(void,
+ CreateAnswer,
+ CreateSessionDescriptionObserver*,
+ const RTCOfferAnswerOptions&)
+PROXY_METHOD2(void,
+ SetLocalDescription,
+ std::unique_ptr<SessionDescriptionInterface>,
+ rtc::scoped_refptr<SetLocalDescriptionObserverInterface>)
+PROXY_METHOD1(void,
+ SetLocalDescription,
+ rtc::scoped_refptr<SetLocalDescriptionObserverInterface>)
+PROXY_METHOD2(void,
+ SetLocalDescription,
+ SetSessionDescriptionObserver*,
+ SessionDescriptionInterface*)
+PROXY_METHOD1(void, SetLocalDescription, SetSessionDescriptionObserver*)
+PROXY_METHOD2(void,
+ SetRemoteDescription,
+ std::unique_ptr<SessionDescriptionInterface>,
+ rtc::scoped_refptr<SetRemoteDescriptionObserverInterface>)
+PROXY_METHOD2(void,
+ SetRemoteDescription,
+ SetSessionDescriptionObserver*,
+ SessionDescriptionInterface*)
+PROXY_METHOD1(bool, ShouldFireNegotiationNeededEvent, uint32_t)
+PROXY_METHOD0(PeerConnectionInterface::RTCConfiguration, GetConfiguration)
+PROXY_METHOD1(RTCError,
+ SetConfiguration,
+ const PeerConnectionInterface::RTCConfiguration&)
+PROXY_METHOD1(bool, AddIceCandidate, const IceCandidateInterface*)
+PROXY_METHOD2(void,
+ AddIceCandidate,
+ std::unique_ptr<IceCandidateInterface>,
+ std::function<void(RTCError)>)
+PROXY_METHOD1(bool, RemoveIceCandidates, const std::vector<cricket::Candidate>&)
+PROXY_METHOD1(RTCError, SetBitrate, const BitrateSettings&)
+PROXY_METHOD1(void, SetAudioPlayout, bool)
+PROXY_METHOD1(void, SetAudioRecording, bool)
+// This method will be invoked on the network thread. See
+// PeerConnectionFactory::CreatePeerConnectionOrError for more details.
+PROXY_SECONDARY_METHOD1(rtc::scoped_refptr<DtlsTransportInterface>,
+ LookupDtlsTransportByMid,
+ const std::string&)
+// This method will be invoked on the network thread. See
+// PeerConnectionFactory::CreatePeerConnectionOrError for more details.
+PROXY_SECONDARY_CONSTMETHOD0(rtc::scoped_refptr<SctpTransportInterface>,
+ GetSctpTransport)
+PROXY_METHOD0(SignalingState, signaling_state)
+PROXY_METHOD0(IceConnectionState, ice_connection_state)
+PROXY_METHOD0(IceConnectionState, standardized_ice_connection_state)
+PROXY_METHOD0(PeerConnectionState, peer_connection_state)
+PROXY_METHOD0(IceGatheringState, ice_gathering_state)
+PROXY_METHOD0(absl::optional<bool>, can_trickle_ice_candidates)
+PROXY_METHOD1(void, AddAdaptationResource, rtc::scoped_refptr<Resource>)
+PROXY_METHOD2(bool,
+ StartRtcEventLog,
+ std::unique_ptr<RtcEventLogOutput>,
+ int64_t)
+PROXY_METHOD1(bool, StartRtcEventLog, std::unique_ptr<RtcEventLogOutput>)
+PROXY_METHOD0(void, StopRtcEventLog)
+PROXY_METHOD0(void, Close)
+BYPASS_PROXY_CONSTMETHOD0(rtc::Thread*, signaling_thread)
+END_PROXY_MAP(PeerConnection)
+
+} // namespace webrtc
+
+#endif // PC_PEER_CONNECTION_PROXY_H_
diff --git a/third_party/libwebrtc/pc/peer_connection_rampup_tests.cc b/third_party/libwebrtc/pc/peer_connection_rampup_tests.cc
new file mode 100644
index 0000000000..545a1d53d0
--- /dev/null
+++ b/third_party/libwebrtc/pc/peer_connection_rampup_tests.cc
@@ -0,0 +1,455 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <memory>
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "api/audio/audio_mixer.h"
+#include "api/audio_codecs/builtin_audio_decoder_factory.h"
+#include "api/audio_codecs/builtin_audio_encoder_factory.h"
+#include "api/audio_options.h"
+#include "api/create_peerconnection_factory.h"
+#include "api/jsep.h"
+#include "api/media_stream_interface.h"
+#include "api/peer_connection_interface.h"
+#include "api/rtc_error.h"
+#include "api/scoped_refptr.h"
+#include "api/stats/rtc_stats.h"
+#include "api/stats/rtc_stats_report.h"
+#include "api/stats/rtcstats_objects.h"
+#include "api/test/metrics/global_metrics_logger_and_exporter.h"
+#include "api/test/metrics/metric.h"
+#include "api/video_codecs/video_decoder_factory_template.h"
+#include "api/video_codecs/video_decoder_factory_template_dav1d_adapter.h"
+#include "api/video_codecs/video_decoder_factory_template_libvpx_vp8_adapter.h"
+#include "api/video_codecs/video_decoder_factory_template_libvpx_vp9_adapter.h"
+#include "api/video_codecs/video_decoder_factory_template_open_h264_adapter.h"
+#include "api/video_codecs/video_encoder_factory_template.h"
+#include "api/video_codecs/video_encoder_factory_template_libaom_av1_adapter.h"
+#include "api/video_codecs/video_encoder_factory_template_libvpx_vp8_adapter.h"
+#include "api/video_codecs/video_encoder_factory_template_libvpx_vp9_adapter.h"
+#include "api/video_codecs/video_encoder_factory_template_open_h264_adapter.h"
+#include "modules/audio_device/include/audio_device.h"
+#include "modules/audio_processing/include/audio_processing.h"
+#include "p2p/base/port_allocator.h"
+#include "p2p/base/port_interface.h"
+#include "p2p/base/test_turn_server.h"
+#include "p2p/client/basic_port_allocator.h"
+#include "pc/peer_connection.h"
+#include "pc/peer_connection_wrapper.h"
+#include "pc/test/fake_audio_capture_module.h"
+#include "pc/test/frame_generator_capturer_video_track_source.h"
+#include "pc/test/mock_peer_connection_observers.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/fake_network.h"
+#include "rtc_base/firewall_socket_server.h"
+#include "rtc_base/gunit.h"
+#include "rtc_base/helpers.h"
+#include "rtc_base/socket_address.h"
+#include "rtc_base/socket_factory.h"
+#include "rtc_base/ssl_certificate.h"
+#include "rtc_base/task_queue_for_test.h"
+#include "rtc_base/test_certificate_verifier.h"
+#include "rtc_base/thread.h"
+#include "rtc_base/virtual_socket_server.h"
+#include "system_wrappers/include/clock.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+namespace {
+
+using ::webrtc::test::GetGlobalMetricsLogger;
+using ::webrtc::test::ImprovementDirection;
+using ::webrtc::test::Unit;
+
+static const int kDefaultTestTimeMs = 15000;
+static const int kRampUpTimeMs = 5000;
+static const int kPollIntervalTimeMs = 50;
+static const int kDefaultTimeoutMs = 10000;
+static const rtc::SocketAddress kDefaultLocalAddress("1.1.1.1", 0);
+static const char kTurnInternalAddress[] = "88.88.88.0";
+static const char kTurnExternalAddress[] = "88.88.88.1";
+static const int kTurnInternalPort = 3478;
+static const int kTurnExternalPort = 0;
+// The video's configured max bitrate in webrtcvideoengine.cc is 1.7 Mbps.
+// Setting the network bandwidth to 1 Mbps allows the video's bitrate to push
+// the network's limitations.
+static const int kNetworkBandwidth = 1000000;
+
+} // namespace
+
+using RTCConfiguration = PeerConnectionInterface::RTCConfiguration;
+
+// This is an end to end test to verify that BWE is functioning when setting
+// up a one to one call at the PeerConnection level. The intention of the test
+// is to catch potential regressions for different ICE path configurations. The
+// test uses a VirtualSocketServer for it's underlying simulated network and
+// fake audio and video sources. The test is based upon rampup_tests.cc, but
+// instead is at the PeerConnection level and uses a different fake network
+// (rampup_tests.cc uses SimulatedNetwork). In the future, this test could
+// potentially test different network conditions and test video quality as well
+// (video_quality_test.cc does this, but at the call level).
+//
+// The perf test results are printed using the perf test support. If the
+// isolated_script_test_perf_output flag is specified in test_main.cc, then
+// the results are written to a JSON formatted file for the Chrome perf
+// dashboard. Since this test is a webrtc_perf_test, it will be run in the perf
+// console every webrtc commit.
+class PeerConnectionWrapperForRampUpTest : public PeerConnectionWrapper {
+ public:
+ using PeerConnectionWrapper::PeerConnectionWrapper;
+
+ PeerConnectionWrapperForRampUpTest(
+ rtc::scoped_refptr<PeerConnectionFactoryInterface> pc_factory,
+ rtc::scoped_refptr<PeerConnectionInterface> pc,
+ std::unique_ptr<MockPeerConnectionObserver> observer)
+ : PeerConnectionWrapper::PeerConnectionWrapper(pc_factory,
+ pc,
+ std::move(observer)) {}
+
+ bool AddIceCandidates(std::vector<const IceCandidateInterface*> candidates) {
+ bool success = true;
+ for (const auto candidate : candidates) {
+ if (!pc()->AddIceCandidate(candidate)) {
+ success = false;
+ }
+ }
+ return success;
+ }
+
+ rtc::scoped_refptr<VideoTrackInterface> CreateLocalVideoTrack(
+ FrameGeneratorCapturerVideoTrackSource::Config config,
+ Clock* clock) {
+ video_track_sources_.emplace_back(
+ rtc::make_ref_counted<FrameGeneratorCapturerVideoTrackSource>(
+ config, clock, /*is_screencast=*/false));
+ video_track_sources_.back()->Start();
+ return rtc::scoped_refptr<VideoTrackInterface>(
+ pc_factory()->CreateVideoTrack(video_track_sources_.back(),
+ rtc::CreateRandomUuid()));
+ }
+
+ rtc::scoped_refptr<AudioTrackInterface> CreateLocalAudioTrack(
+ const cricket::AudioOptions options) {
+ rtc::scoped_refptr<AudioSourceInterface> source =
+ pc_factory()->CreateAudioSource(options);
+ return pc_factory()->CreateAudioTrack(rtc::CreateRandomUuid(),
+ source.get());
+ }
+
+ private:
+ std::vector<rtc::scoped_refptr<FrameGeneratorCapturerVideoTrackSource>>
+ video_track_sources_;
+};
+
+// TODO(shampson): Paramaterize the test to run for both Plan B & Unified Plan.
+class PeerConnectionRampUpTest : public ::testing::Test {
+ public:
+ PeerConnectionRampUpTest()
+ : clock_(Clock::GetRealTimeClock()),
+ virtual_socket_server_(new rtc::VirtualSocketServer()),
+ firewall_socket_server_(
+ new rtc::FirewallSocketServer(virtual_socket_server_.get())),
+ firewall_socket_factory_(
+ new rtc::BasicPacketSocketFactory(firewall_socket_server_.get())),
+ network_thread_(new rtc::Thread(firewall_socket_server_.get())),
+ worker_thread_(rtc::Thread::Create()) {
+ network_thread_->SetName("PCNetworkThread", this);
+ worker_thread_->SetName("PCWorkerThread", this);
+ RTC_CHECK(network_thread_->Start());
+ RTC_CHECK(worker_thread_->Start());
+
+ virtual_socket_server_->set_bandwidth(kNetworkBandwidth / 8);
+ pc_factory_ = CreatePeerConnectionFactory(
+ network_thread_.get(), worker_thread_.get(), rtc::Thread::Current(),
+ rtc::scoped_refptr<AudioDeviceModule>(FakeAudioCaptureModule::Create()),
+ CreateBuiltinAudioEncoderFactory(), CreateBuiltinAudioDecoderFactory(),
+ std::make_unique<VideoEncoderFactoryTemplate<
+ LibvpxVp8EncoderTemplateAdapter, LibvpxVp9EncoderTemplateAdapter,
+ OpenH264EncoderTemplateAdapter, LibaomAv1EncoderTemplateAdapter>>(),
+ std::make_unique<VideoDecoderFactoryTemplate<
+ LibvpxVp8DecoderTemplateAdapter, LibvpxVp9DecoderTemplateAdapter,
+ OpenH264DecoderTemplateAdapter, Dav1dDecoderTemplateAdapter>>(),
+ nullptr /* audio_mixer */, nullptr /* audio_processing */);
+ }
+
+ virtual ~PeerConnectionRampUpTest() {
+ SendTask(network_thread(), [this] { turn_servers_.clear(); });
+ }
+
+ bool CreatePeerConnectionWrappers(const RTCConfiguration& caller_config,
+ const RTCConfiguration& callee_config) {
+ caller_ = CreatePeerConnectionWrapper(caller_config);
+ callee_ = CreatePeerConnectionWrapper(callee_config);
+ return caller_ && callee_;
+ }
+
+ std::unique_ptr<PeerConnectionWrapperForRampUpTest>
+ CreatePeerConnectionWrapper(const RTCConfiguration& config) {
+ auto* fake_network_manager = new rtc::FakeNetworkManager();
+ fake_network_manager->AddInterface(kDefaultLocalAddress);
+ fake_network_managers_.emplace_back(fake_network_manager);
+
+ auto observer = std::make_unique<MockPeerConnectionObserver>();
+ webrtc::PeerConnectionDependencies dependencies(observer.get());
+ cricket::BasicPortAllocator* port_allocator =
+ new cricket::BasicPortAllocator(fake_network_manager,
+ firewall_socket_factory_.get());
+
+ port_allocator->set_step_delay(cricket::kDefaultStepDelay);
+ dependencies.allocator =
+ std::unique_ptr<cricket::BasicPortAllocator>(port_allocator);
+ dependencies.tls_cert_verifier =
+ std::make_unique<rtc::TestCertificateVerifier>();
+
+ auto result = pc_factory_->CreatePeerConnectionOrError(
+ config, std::move(dependencies));
+ if (!result.ok()) {
+ return nullptr;
+ }
+
+ return std::make_unique<PeerConnectionWrapperForRampUpTest>(
+ pc_factory_, result.MoveValue(), std::move(observer));
+ }
+
+ void SetupOneWayCall() {
+ ASSERT_TRUE(caller_);
+ ASSERT_TRUE(callee_);
+ FrameGeneratorCapturerVideoTrackSource::Config config;
+ caller_->AddTrack(caller_->CreateLocalVideoTrack(config, clock_));
+ // Disable highpass filter so that we can get all the test audio frames.
+ cricket::AudioOptions options;
+ options.highpass_filter = false;
+ caller_->AddTrack(caller_->CreateLocalAudioTrack(options));
+
+ // Do the SDP negotiation, and also exchange ice candidates.
+ ASSERT_TRUE(caller_->ExchangeOfferAnswerWith(callee_.get()));
+ ASSERT_TRUE_WAIT(
+ caller_->signaling_state() == PeerConnectionInterface::kStable,
+ kDefaultTimeoutMs);
+ ASSERT_TRUE_WAIT(caller_->IsIceGatheringDone(), kDefaultTimeoutMs);
+ ASSERT_TRUE_WAIT(callee_->IsIceGatheringDone(), kDefaultTimeoutMs);
+
+ // Connect an ICE candidate pairs.
+ ASSERT_TRUE(
+ callee_->AddIceCandidates(caller_->observer()->GetAllCandidates()));
+ ASSERT_TRUE(
+ caller_->AddIceCandidates(callee_->observer()->GetAllCandidates()));
+ // This means that ICE and DTLS are connected.
+ ASSERT_TRUE_WAIT(callee_->IsIceConnected(), kDefaultTimeoutMs);
+ ASSERT_TRUE_WAIT(caller_->IsIceConnected(), kDefaultTimeoutMs);
+ }
+
+ void CreateTurnServer(cricket::ProtocolType type,
+ const std::string& common_name = "test turn server") {
+ rtc::Thread* thread = network_thread();
+ rtc::SocketFactory* factory = firewall_socket_server_.get();
+ std::unique_ptr<cricket::TestTurnServer> turn_server;
+ SendTask(network_thread_.get(), [&] {
+ static const rtc::SocketAddress turn_server_internal_address{
+ kTurnInternalAddress, kTurnInternalPort};
+ static const rtc::SocketAddress turn_server_external_address{
+ kTurnExternalAddress, kTurnExternalPort};
+ turn_server = std::make_unique<cricket::TestTurnServer>(
+ thread, factory, turn_server_internal_address,
+ turn_server_external_address, type, true /*ignore_bad_certs=*/,
+ common_name);
+ });
+ turn_servers_.push_back(std::move(turn_server));
+ }
+
+ // First runs the call for kRampUpTimeMs to ramp up the bandwidth estimate.
+ // Then runs the test for the remaining test time, grabbing the bandwidth
+ // estimation stat, every kPollIntervalTimeMs. When finished, averages the
+ // bandwidth estimations and prints the bandwidth estimation result as a perf
+ // metric.
+ void RunTest(const std::string& test_string) {
+ rtc::Thread::Current()->ProcessMessages(kRampUpTimeMs);
+ int number_of_polls =
+ (kDefaultTestTimeMs - kRampUpTimeMs) / kPollIntervalTimeMs;
+ int total_bwe = 0;
+ for (int i = 0; i < number_of_polls; ++i) {
+ rtc::Thread::Current()->ProcessMessages(kPollIntervalTimeMs);
+ total_bwe += static_cast<int>(GetCallerAvailableBitrateEstimate());
+ }
+ double average_bandwidth_estimate = total_bwe / number_of_polls;
+ std::string value_description =
+ "bwe_after_" + std::to_string(kDefaultTestTimeMs / 1000) + "_seconds";
+ GetGlobalMetricsLogger()->LogSingleValueMetric(
+ "peerconnection_ramp_up_" + test_string, value_description,
+ average_bandwidth_estimate, Unit::kUnitless,
+ ImprovementDirection::kNeitherIsBetter);
+ }
+
+ rtc::Thread* network_thread() { return network_thread_.get(); }
+
+ rtc::FirewallSocketServer* firewall_socket_server() {
+ return firewall_socket_server_.get();
+ }
+
+ PeerConnectionWrapperForRampUpTest* caller() { return caller_.get(); }
+
+ PeerConnectionWrapperForRampUpTest* callee() { return callee_.get(); }
+
+ private:
+ // Gets the caller's outgoing available bitrate from the stats. Returns 0 if
+ // something went wrong. It takes the outgoing bitrate from the current
+ // selected ICE candidate pair's stats.
+ double GetCallerAvailableBitrateEstimate() {
+ auto stats = caller_->GetStats();
+ auto transport_stats = stats->GetStatsOfType<RTCTransportStats>();
+ if (transport_stats.size() == 0u ||
+ !transport_stats[0]->selected_candidate_pair_id.is_defined()) {
+ return 0;
+ }
+ std::string selected_ice_id =
+ transport_stats[0]->selected_candidate_pair_id.ValueToString();
+ // Use the selected ICE candidate pair ID to get the appropriate ICE stats.
+ const RTCIceCandidatePairStats ice_candidate_pair_stats =
+ stats->Get(selected_ice_id)->cast_to<const RTCIceCandidatePairStats>();
+ if (ice_candidate_pair_stats.available_outgoing_bitrate.is_defined()) {
+ return *ice_candidate_pair_stats.available_outgoing_bitrate;
+ }
+ // We couldn't get the `available_outgoing_bitrate` for the active candidate
+ // pair.
+ return 0;
+ }
+
+ Clock* const clock_;
+ // The turn servers should be accessed & deleted on the network thread to
+ // avoid a race with the socket read/write which occurs on the network thread.
+ std::vector<std::unique_ptr<cricket::TestTurnServer>> turn_servers_;
+ // `virtual_socket_server_` is used by `network_thread_` so it must be
+ // destroyed later.
+ // TODO(bugs.webrtc.org/7668): We would like to update the virtual network we
+ // use for this test. VirtualSocketServer isn't ideal because:
+ // 1) It uses the same queue & network capacity for both directions.
+ // 2) VirtualSocketServer implements how the network bandwidth affects the
+ // send delay differently than the SimulatedNetwork, used by the
+ // FakeNetworkPipe. It would be ideal if all of levels of virtual
+ // networks used in testing were consistent.
+ // We would also like to update this test to record the time to ramp up,
+ // down, and back up (similar to in rampup_tests.cc). This is problematic with
+ // the VirtualSocketServer. The first ramp down time is very noisy and the
+ // second ramp up time can take up to 300 seconds, most likely due to a built
+ // up queue.
+ std::unique_ptr<rtc::VirtualSocketServer> virtual_socket_server_;
+ std::unique_ptr<rtc::FirewallSocketServer> firewall_socket_server_;
+ std::unique_ptr<rtc::BasicPacketSocketFactory> firewall_socket_factory_;
+
+ std::unique_ptr<rtc::Thread> network_thread_;
+ std::unique_ptr<rtc::Thread> worker_thread_;
+ // The `pc_factory` uses `network_thread_` & `worker_thread_`, so it must be
+ // destroyed first.
+ std::vector<std::unique_ptr<rtc::FakeNetworkManager>> fake_network_managers_;
+ rtc::scoped_refptr<PeerConnectionFactoryInterface> pc_factory_;
+ std::unique_ptr<PeerConnectionWrapperForRampUpTest> caller_;
+ std::unique_ptr<PeerConnectionWrapperForRampUpTest> callee_;
+};
+
+TEST_F(PeerConnectionRampUpTest, Bwe_After_TurnOverTCP) {
+ CreateTurnServer(cricket::ProtocolType::PROTO_TCP);
+ PeerConnectionInterface::IceServer ice_server;
+ std::string ice_server_url = "turn:" + std::string(kTurnInternalAddress) +
+ ":" + std::to_string(kTurnInternalPort) +
+ "?transport=tcp";
+ ice_server.urls.push_back(ice_server_url);
+ ice_server.username = "test";
+ ice_server.password = "test";
+ PeerConnectionInterface::RTCConfiguration client_1_config;
+ client_1_config.sdp_semantics = SdpSemantics::kUnifiedPlan;
+ client_1_config.servers.push_back(ice_server);
+ client_1_config.type = PeerConnectionInterface::kRelay;
+ PeerConnectionInterface::RTCConfiguration client_2_config;
+ client_2_config.sdp_semantics = SdpSemantics::kUnifiedPlan;
+ client_2_config.servers.push_back(ice_server);
+ client_2_config.type = PeerConnectionInterface::kRelay;
+ ASSERT_TRUE(CreatePeerConnectionWrappers(client_1_config, client_2_config));
+
+ SetupOneWayCall();
+ RunTest("turn_over_tcp");
+}
+
+TEST_F(PeerConnectionRampUpTest, Bwe_After_TurnOverUDP) {
+ CreateTurnServer(cricket::ProtocolType::PROTO_UDP);
+ PeerConnectionInterface::IceServer ice_server;
+ std::string ice_server_url = "turn:" + std::string(kTurnInternalAddress) +
+ ":" + std::to_string(kTurnInternalPort);
+
+ ice_server.urls.push_back(ice_server_url);
+ ice_server.username = "test";
+ ice_server.password = "test";
+ PeerConnectionInterface::RTCConfiguration client_1_config;
+ client_1_config.sdp_semantics = SdpSemantics::kUnifiedPlan;
+ client_1_config.servers.push_back(ice_server);
+ client_1_config.type = PeerConnectionInterface::kRelay;
+ PeerConnectionInterface::RTCConfiguration client_2_config;
+ client_2_config.sdp_semantics = SdpSemantics::kUnifiedPlan;
+ client_2_config.servers.push_back(ice_server);
+ client_2_config.type = PeerConnectionInterface::kRelay;
+ ASSERT_TRUE(CreatePeerConnectionWrappers(client_1_config, client_2_config));
+
+ SetupOneWayCall();
+ RunTest("turn_over_udp");
+}
+
+TEST_F(PeerConnectionRampUpTest, Bwe_After_TurnOverTLS) {
+ CreateTurnServer(cricket::ProtocolType::PROTO_TLS, kTurnInternalAddress);
+ PeerConnectionInterface::IceServer ice_server;
+ std::string ice_server_url = "turns:" + std::string(kTurnInternalAddress) +
+ ":" + std::to_string(kTurnInternalPort) +
+ "?transport=tcp";
+ ice_server.urls.push_back(ice_server_url);
+ ice_server.username = "test";
+ ice_server.password = "test";
+ PeerConnectionInterface::RTCConfiguration client_1_config;
+ client_1_config.sdp_semantics = SdpSemantics::kUnifiedPlan;
+ client_1_config.servers.push_back(ice_server);
+ client_1_config.type = PeerConnectionInterface::kRelay;
+ PeerConnectionInterface::RTCConfiguration client_2_config;
+ client_2_config.sdp_semantics = SdpSemantics::kUnifiedPlan;
+ client_2_config.servers.push_back(ice_server);
+ client_2_config.type = PeerConnectionInterface::kRelay;
+
+ ASSERT_TRUE(CreatePeerConnectionWrappers(client_1_config, client_2_config));
+
+ SetupOneWayCall();
+ RunTest("turn_over_tls");
+}
+
+TEST_F(PeerConnectionRampUpTest, Bwe_After_UDPPeerToPeer) {
+ PeerConnectionInterface::RTCConfiguration client_1_config;
+ client_1_config.sdp_semantics = SdpSemantics::kUnifiedPlan;
+ client_1_config.tcp_candidate_policy =
+ PeerConnection::kTcpCandidatePolicyDisabled;
+ PeerConnectionInterface::RTCConfiguration client_2_config;
+ client_2_config.sdp_semantics = SdpSemantics::kUnifiedPlan;
+ client_2_config.tcp_candidate_policy =
+ PeerConnection::kTcpCandidatePolicyDisabled;
+ ASSERT_TRUE(CreatePeerConnectionWrappers(client_1_config, client_2_config));
+
+ SetupOneWayCall();
+ RunTest("udp_peer_to_peer");
+}
+
+TEST_F(PeerConnectionRampUpTest, Bwe_After_TCPPeerToPeer) {
+ firewall_socket_server()->set_udp_sockets_enabled(false);
+ PeerConnectionInterface::RTCConfiguration config;
+ config.sdp_semantics = SdpSemantics::kUnifiedPlan;
+ ASSERT_TRUE(CreatePeerConnectionWrappers(config, config));
+
+ SetupOneWayCall();
+ RunTest("tcp_peer_to_peer");
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/peer_connection_rtp_unittest.cc b/third_party/libwebrtc/pc/peer_connection_rtp_unittest.cc
new file mode 100644
index 0000000000..b93e5923bb
--- /dev/null
+++ b/third_party/libwebrtc/pc/peer_connection_rtp_unittest.cc
@@ -0,0 +1,1983 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stddef.h>
+
+#include <cstdint>
+#include <memory>
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "api/audio/audio_mixer.h"
+#include "api/audio_codecs/builtin_audio_decoder_factory.h"
+#include "api/audio_codecs/builtin_audio_encoder_factory.h"
+#include "api/create_peerconnection_factory.h"
+#include "api/jsep.h"
+#include "api/media_stream_interface.h"
+#include "api/media_types.h"
+#include "api/peer_connection_interface.h"
+#include "api/rtc_error.h"
+#include "api/rtp_parameters.h"
+#include "api/rtp_receiver_interface.h"
+#include "api/rtp_sender_interface.h"
+#include "api/rtp_transceiver_direction.h"
+#include "api/rtp_transceiver_interface.h"
+#include "api/scoped_refptr.h"
+#include "api/set_remote_description_observer_interface.h"
+#include "api/uma_metrics.h"
+#include "api/video_codecs/video_decoder_factory_template.h"
+#include "api/video_codecs/video_decoder_factory_template_dav1d_adapter.h"
+#include "api/video_codecs/video_decoder_factory_template_libvpx_vp8_adapter.h"
+#include "api/video_codecs/video_decoder_factory_template_libvpx_vp9_adapter.h"
+#include "api/video_codecs/video_decoder_factory_template_open_h264_adapter.h"
+#include "api/video_codecs/video_encoder_factory_template.h"
+#include "api/video_codecs/video_encoder_factory_template_libaom_av1_adapter.h"
+#include "api/video_codecs/video_encoder_factory_template_libvpx_vp8_adapter.h"
+#include "api/video_codecs/video_encoder_factory_template_libvpx_vp9_adapter.h"
+#include "api/video_codecs/video_encoder_factory_template_open_h264_adapter.h"
+#include "media/base/stream_params.h"
+#include "modules/audio_device/include/audio_device.h"
+#include "modules/audio_processing/include/audio_processing.h"
+#include "p2p/base/port_allocator.h"
+#include "pc/media_session.h"
+#include "pc/peer_connection_wrapper.h"
+#include "pc/sdp_utils.h"
+#include "pc/session_description.h"
+#include "pc/test/fake_audio_capture_module.h"
+#include "pc/test/integration_test_helpers.h"
+#include "pc/test/mock_peer_connection_observers.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/gunit.h"
+#include "rtc_base/rtc_certificate_generator.h"
+#include "rtc_base/thread.h"
+#include "system_wrappers/include/metrics.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+
+// This file contains tests for RTP Media API-related behavior of
+// `webrtc::PeerConnection`, see https://w3c.github.io/webrtc-pc/#rtp-media-api.
+
+namespace webrtc {
+
+using RTCConfiguration = PeerConnectionInterface::RTCConfiguration;
+using ::testing::ElementsAre;
+using ::testing::Pair;
+using ::testing::UnorderedElementsAre;
+using ::testing::Values;
+
+template <typename MethodFunctor>
+class OnSuccessObserver : public webrtc::SetRemoteDescriptionObserverInterface {
+ public:
+ explicit OnSuccessObserver(MethodFunctor on_success)
+ : on_success_(std::move(on_success)) {}
+
+ // webrtc::SetRemoteDescriptionObserverInterface implementation.
+ void OnSetRemoteDescriptionComplete(webrtc::RTCError error) override {
+ RTC_CHECK(error.ok());
+ on_success_();
+ }
+
+ private:
+ MethodFunctor on_success_;
+};
+
+class PeerConnectionRtpBaseTest : public ::testing::Test {
+ public:
+ explicit PeerConnectionRtpBaseTest(SdpSemantics sdp_semantics)
+ : sdp_semantics_(sdp_semantics),
+ pc_factory_(CreatePeerConnectionFactory(
+ rtc::Thread::Current(),
+ rtc::Thread::Current(),
+ rtc::Thread::Current(),
+ FakeAudioCaptureModule::Create(),
+ CreateBuiltinAudioEncoderFactory(),
+ CreateBuiltinAudioDecoderFactory(),
+ std::make_unique<
+ VideoEncoderFactoryTemplate<LibvpxVp8EncoderTemplateAdapter,
+ LibvpxVp9EncoderTemplateAdapter,
+ OpenH264EncoderTemplateAdapter,
+ LibaomAv1EncoderTemplateAdapter>>(),
+ std::make_unique<
+ VideoDecoderFactoryTemplate<LibvpxVp8DecoderTemplateAdapter,
+ LibvpxVp9DecoderTemplateAdapter,
+ OpenH264DecoderTemplateAdapter,
+ Dav1dDecoderTemplateAdapter>>(),
+ nullptr /* audio_mixer */,
+ nullptr /* audio_processing */)) {
+ webrtc::metrics::Reset();
+ }
+
+ std::unique_ptr<PeerConnectionWrapper> CreatePeerConnection() {
+ return CreatePeerConnection(RTCConfiguration());
+ }
+
+ std::unique_ptr<PeerConnectionWrapper> CreatePeerConnectionWithPlanB() {
+ RTCConfiguration config;
+ config.sdp_semantics = SdpSemantics::kPlanB_DEPRECATED;
+ return CreatePeerConnectionInternal(config);
+ }
+
+ std::unique_ptr<PeerConnectionWrapper> CreatePeerConnectionWithUnifiedPlan() {
+ RTCConfiguration config;
+ config.sdp_semantics = SdpSemantics::kUnifiedPlan;
+ return CreatePeerConnectionInternal(config);
+ }
+
+ std::unique_ptr<PeerConnectionWrapper> CreatePeerConnection(
+ const RTCConfiguration& config) {
+ RTCConfiguration modified_config = config;
+ modified_config.sdp_semantics = sdp_semantics_;
+ return CreatePeerConnectionInternal(modified_config);
+ }
+
+ protected:
+ const SdpSemantics sdp_semantics_;
+ rtc::scoped_refptr<PeerConnectionFactoryInterface> pc_factory_;
+
+ private:
+ // Private so that tests don't accidentally bypass the SdpSemantics
+ // adjustment.
+ std::unique_ptr<PeerConnectionWrapper> CreatePeerConnectionInternal(
+ const RTCConfiguration& config) {
+ auto observer = std::make_unique<MockPeerConnectionObserver>();
+ auto result = pc_factory_->CreatePeerConnectionOrError(
+ config, PeerConnectionDependencies(observer.get()));
+ EXPECT_TRUE(result.ok());
+ observer->SetPeerConnectionInterface(result.value().get());
+ return std::make_unique<PeerConnectionWrapper>(
+ pc_factory_, result.MoveValue(), std::move(observer));
+ }
+
+ rtc::AutoThread main_thread_;
+};
+
+class PeerConnectionRtpTest
+ : public PeerConnectionRtpBaseTest,
+ public ::testing::WithParamInterface<SdpSemantics> {
+ protected:
+ PeerConnectionRtpTest() : PeerConnectionRtpBaseTest(GetParam()) {}
+};
+
+class PeerConnectionRtpTestPlanB : public PeerConnectionRtpBaseTest {
+ protected:
+ PeerConnectionRtpTestPlanB()
+ : PeerConnectionRtpBaseTest(SdpSemantics::kPlanB_DEPRECATED) {}
+};
+
+class PeerConnectionRtpTestUnifiedPlan : public PeerConnectionRtpBaseTest {
+ protected:
+ PeerConnectionRtpTestUnifiedPlan()
+ : PeerConnectionRtpBaseTest(SdpSemantics::kUnifiedPlan) {}
+
+ // Helper to emulate an SFU that rejects an offered media section
+ // in answer.
+ bool ExchangeOfferAnswerWhereRemoteStopsTransceiver(
+ PeerConnectionWrapper* caller,
+ PeerConnectionWrapper* callee,
+ size_t mid_to_stop) {
+ auto offer = caller->CreateOffer();
+ caller->SetLocalDescription(CloneSessionDescription(offer.get()));
+ callee->SetRemoteDescription(std::move(offer));
+ EXPECT_LT(mid_to_stop, callee->pc()->GetTransceivers().size());
+ // Must use StopInternal in order to do instant reject.
+ callee->pc()->GetTransceivers()[mid_to_stop]->StopInternal();
+ auto answer = callee->CreateAnswer();
+ EXPECT_TRUE(answer);
+ bool set_local_answer =
+ callee->SetLocalDescription(CloneSessionDescription(answer.get()));
+ EXPECT_TRUE(set_local_answer);
+ bool set_remote_answer = caller->SetRemoteDescription(std::move(answer));
+ EXPECT_TRUE(set_remote_answer);
+ return set_remote_answer;
+ }
+};
+
+// These tests cover `webrtc::PeerConnectionObserver` callbacks firing upon
+// setting the remote description.
+
+TEST_P(PeerConnectionRtpTest, AddTrackWithoutStreamFiresOnAddTrack) {
+ auto caller = CreatePeerConnection();
+ auto callee = CreatePeerConnection();
+
+ ASSERT_TRUE(caller->AddAudioTrack("audio_track"));
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+
+ ASSERT_EQ(callee->observer()->add_track_events_.size(), 1u);
+ const auto& add_track_event = callee->observer()->add_track_events_[0];
+ EXPECT_EQ(add_track_event.streams, add_track_event.receiver->streams());
+
+ if (sdp_semantics_ == SdpSemantics::kPlanB_DEPRECATED) {
+ // Since we are not supporting the no stream case with Plan B, there should
+ // be a generated stream, even though we didn't set one with AddTrack.
+ ASSERT_EQ(1u, add_track_event.streams.size());
+ EXPECT_TRUE(add_track_event.streams[0]->FindAudioTrack("audio_track"));
+ } else {
+ EXPECT_EQ(0u, add_track_event.streams.size());
+ }
+}
+
+TEST_P(PeerConnectionRtpTest, AddTrackWithStreamFiresOnAddTrack) {
+ auto caller = CreatePeerConnection();
+ auto callee = CreatePeerConnection();
+
+ ASSERT_TRUE(caller->AddAudioTrack("audio_track", {"audio_stream"}));
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+
+ ASSERT_EQ(callee->observer()->add_track_events_.size(), 1u);
+ auto& add_track_event = callee->observer()->add_track_events_[0];
+ ASSERT_EQ(add_track_event.streams.size(), 1u);
+ EXPECT_EQ("audio_stream", add_track_event.streams[0]->id());
+ EXPECT_TRUE(add_track_event.streams[0]->FindAudioTrack("audio_track"));
+ EXPECT_EQ(add_track_event.streams, add_track_event.receiver->streams());
+}
+
+TEST_P(PeerConnectionRtpTest, RemoveTrackWithoutStreamFiresOnRemoveTrack) {
+ auto caller = CreatePeerConnection();
+ auto callee = CreatePeerConnection();
+
+ auto sender = caller->AddAudioTrack("audio_track", {});
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+ ASSERT_EQ(callee->observer()->add_track_events_.size(), 1u);
+ ASSERT_TRUE(
+ caller->SetRemoteDescription(callee->CreateAnswerAndSetAsLocal()));
+
+ EXPECT_TRUE(caller->pc()->RemoveTrackOrError(sender).ok());
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+
+ ASSERT_EQ(callee->observer()->add_track_events_.size(), 1u);
+ EXPECT_EQ(callee->observer()->GetAddTrackReceivers(),
+ callee->observer()->remove_track_events_);
+}
+
+TEST_P(PeerConnectionRtpTest, RemoveTrackWithStreamFiresOnRemoveTrack) {
+ auto caller = CreatePeerConnection();
+ auto callee = CreatePeerConnection();
+
+ auto sender = caller->AddAudioTrack("audio_track", {"audio_stream"});
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+ ASSERT_EQ(callee->observer()->add_track_events_.size(), 1u);
+ ASSERT_TRUE(
+ caller->SetRemoteDescription(callee->CreateAnswerAndSetAsLocal()));
+
+ EXPECT_TRUE(caller->pc()->RemoveTrackOrError(sender).ok());
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+
+ ASSERT_EQ(callee->observer()->add_track_events_.size(), 1u);
+ EXPECT_EQ(callee->observer()->GetAddTrackReceivers(),
+ callee->observer()->remove_track_events_);
+ EXPECT_EQ(0u, callee->observer()->remote_streams()->count());
+}
+
+TEST_P(PeerConnectionRtpTest, RemoveTrackWithSharedStreamFiresOnRemoveTrack) {
+ auto caller = CreatePeerConnection();
+ auto callee = CreatePeerConnection();
+
+ const char kSharedStreamId[] = "shared_audio_stream";
+ auto sender1 = caller->AddAudioTrack("audio_track1", {kSharedStreamId});
+ auto sender2 = caller->AddAudioTrack("audio_track2", {kSharedStreamId});
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+ ASSERT_EQ(callee->observer()->add_track_events_.size(), 2u);
+ ASSERT_TRUE(
+ caller->SetRemoteDescription(callee->CreateAnswerAndSetAsLocal()));
+
+ // Remove "audio_track1".
+ EXPECT_TRUE(caller->pc()->RemoveTrackOrError(sender1).ok());
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+ ASSERT_EQ(callee->observer()->add_track_events_.size(), 2u);
+ EXPECT_EQ(
+ std::vector<rtc::scoped_refptr<RtpReceiverInterface>>{
+ callee->observer()->add_track_events_[0].receiver},
+ callee->observer()->remove_track_events_);
+ ASSERT_EQ(1u, callee->observer()->remote_streams()->count());
+ ASSERT_TRUE(
+ caller->SetRemoteDescription(callee->CreateAnswerAndSetAsLocal()));
+
+ // Remove "audio_track2".
+ EXPECT_TRUE(caller->pc()->RemoveTrackOrError(sender2).ok());
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+ ASSERT_EQ(callee->observer()->add_track_events_.size(), 2u);
+ EXPECT_EQ(callee->observer()->GetAddTrackReceivers(),
+ callee->observer()->remove_track_events_);
+ EXPECT_EQ(0u, callee->observer()->remote_streams()->count());
+}
+
+// Tests the edge case that if a stream ID changes for a given track that both
+// OnRemoveTrack and OnAddTrack is fired.
+TEST_F(PeerConnectionRtpTestPlanB,
+ RemoteStreamIdChangesFiresOnRemoveAndOnAddTrack) {
+ auto caller = CreatePeerConnection();
+ auto callee = CreatePeerConnection();
+
+ const char kStreamId1[] = "stream1";
+ const char kStreamId2[] = "stream2";
+ caller->AddAudioTrack("audio_track1", {kStreamId1});
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+ EXPECT_EQ(callee->observer()->add_track_events_.size(), 1u);
+
+ // Change the stream ID of the sender in the session description.
+ auto offer = caller->CreateOfferAndSetAsLocal();
+ auto* audio_desc =
+ cricket::GetFirstAudioContentDescription(offer->description());
+ ASSERT_EQ(audio_desc->mutable_streams().size(), 1u);
+ audio_desc->mutable_streams()[0].set_stream_ids({kStreamId2});
+ ASSERT_TRUE(callee->SetRemoteDescription(std::move(offer)));
+
+ ASSERT_EQ(callee->observer()->add_track_events_.size(), 2u);
+ EXPECT_EQ(callee->observer()->add_track_events_[1].streams[0]->id(),
+ kStreamId2);
+ ASSERT_EQ(callee->observer()->remove_track_events_.size(), 1u);
+ EXPECT_EQ(callee->observer()->remove_track_events_[0]->streams()[0]->id(),
+ kStreamId1);
+}
+
+// Tests that setting a remote description with sending transceivers will fire
+// the OnTrack callback for each transceiver and setting a remote description
+// with receive only transceivers will not call OnTrack. One transceiver is
+// created without any stream_ids, while the other is created with multiple
+// stream_ids.
+TEST_F(PeerConnectionRtpTestUnifiedPlan, AddTransceiverCallsOnTrack) {
+ const std::string kStreamId1 = "video_stream1";
+ const std::string kStreamId2 = "video_stream2";
+ auto caller = CreatePeerConnection();
+ auto callee = CreatePeerConnection();
+
+ auto audio_transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO);
+ RtpTransceiverInit video_transceiver_init;
+ video_transceiver_init.stream_ids = {kStreamId1, kStreamId2};
+ auto video_transceiver =
+ caller->AddTransceiver(cricket::MEDIA_TYPE_VIDEO, video_transceiver_init);
+
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+
+ ASSERT_EQ(0u, caller->observer()->on_track_transceivers_.size());
+ ASSERT_EQ(2u, callee->observer()->on_track_transceivers_.size());
+ EXPECT_EQ(audio_transceiver->mid(),
+ callee->pc()->GetTransceivers()[0]->mid());
+ EXPECT_EQ(video_transceiver->mid(),
+ callee->pc()->GetTransceivers()[1]->mid());
+ std::vector<rtc::scoped_refptr<MediaStreamInterface>> audio_streams =
+ callee->pc()->GetTransceivers()[0]->receiver()->streams();
+ std::vector<rtc::scoped_refptr<MediaStreamInterface>> video_streams =
+ callee->pc()->GetTransceivers()[1]->receiver()->streams();
+ ASSERT_EQ(0u, audio_streams.size());
+ ASSERT_EQ(2u, video_streams.size());
+ EXPECT_EQ(kStreamId1, video_streams[0]->id());
+ EXPECT_EQ(kStreamId2, video_streams[1]->id());
+}
+
+// Test that doing additional offer/answer exchanges with no changes to tracks
+// will cause no additional OnTrack calls after the tracks have been negotiated.
+TEST_F(PeerConnectionRtpTestUnifiedPlan, ReofferDoesNotCallOnTrack) {
+ auto caller = CreatePeerConnection();
+ auto callee = CreatePeerConnection();
+
+ caller->AddAudioTrack("audio");
+ callee->AddAudioTrack("audio");
+
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+ EXPECT_EQ(1u, caller->observer()->on_track_transceivers_.size());
+ EXPECT_EQ(1u, callee->observer()->on_track_transceivers_.size());
+
+ // If caller reoffers with no changes expect no additional OnTrack calls.
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+ EXPECT_EQ(1u, caller->observer()->on_track_transceivers_.size());
+ EXPECT_EQ(1u, callee->observer()->on_track_transceivers_.size());
+
+ // Also if callee reoffers with no changes expect no additional OnTrack calls.
+ ASSERT_TRUE(callee->ExchangeOfferAnswerWith(caller.get()));
+ EXPECT_EQ(1u, caller->observer()->on_track_transceivers_.size());
+ EXPECT_EQ(1u, callee->observer()->on_track_transceivers_.size());
+}
+
+// Test that OnTrack is called when the transceiver direction changes to send
+// the track.
+TEST_F(PeerConnectionRtpTestUnifiedPlan, SetDirectionCallsOnTrack) {
+ auto caller = CreatePeerConnection();
+ auto callee = CreatePeerConnection();
+
+ auto transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO);
+ EXPECT_TRUE(
+ transceiver->SetDirectionWithError(RtpTransceiverDirection::kInactive)
+ .ok());
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+ EXPECT_EQ(0u, caller->observer()->on_track_transceivers_.size());
+ EXPECT_EQ(0u, callee->observer()->on_track_transceivers_.size());
+
+ EXPECT_TRUE(
+ transceiver->SetDirectionWithError(RtpTransceiverDirection::kSendOnly)
+ .ok());
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+ EXPECT_EQ(0u, caller->observer()->on_track_transceivers_.size());
+ EXPECT_EQ(1u, callee->observer()->on_track_transceivers_.size());
+
+ // If the direction changes but it is still receiving on the remote side, then
+ // OnTrack should not be fired again.
+ EXPECT_TRUE(
+ transceiver->SetDirectionWithError(RtpTransceiverDirection::kSendRecv)
+ .ok());
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+ EXPECT_EQ(0u, caller->observer()->on_track_transceivers_.size());
+ EXPECT_EQ(1u, callee->observer()->on_track_transceivers_.size());
+}
+
+// Test that OnTrack is called twice when a sendrecv call is started, the callee
+// changes the direction to inactive, then changes it back to sendrecv.
+TEST_F(PeerConnectionRtpTestUnifiedPlan, SetDirectionHoldCallsOnTrackTwice) {
+ auto caller = CreatePeerConnection();
+ auto callee = CreatePeerConnection();
+
+ auto transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO);
+
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+ EXPECT_EQ(0u, caller->observer()->on_track_transceivers_.size());
+ EXPECT_EQ(1u, callee->observer()->on_track_transceivers_.size());
+
+ // Put the call on hold by no longer receiving the track.
+ EXPECT_TRUE(callee->pc()
+ ->GetTransceivers()[0]
+ ->SetDirectionWithError(RtpTransceiverDirection::kInactive)
+ .ok());
+
+ ASSERT_TRUE(callee->ExchangeOfferAnswerWith(caller.get()));
+ EXPECT_EQ(0u, caller->observer()->on_track_transceivers_.size());
+ EXPECT_EQ(1u, callee->observer()->on_track_transceivers_.size());
+
+ // Resume the call by changing the direction to recvonly. This should call
+ // OnTrack again on the callee side.
+ EXPECT_TRUE(callee->pc()
+ ->GetTransceivers()[0]
+ ->SetDirectionWithError(RtpTransceiverDirection::kRecvOnly)
+ .ok());
+
+ ASSERT_TRUE(callee->ExchangeOfferAnswerWith(caller.get()));
+ EXPECT_EQ(0u, caller->observer()->on_track_transceivers_.size());
+ EXPECT_EQ(2u, callee->observer()->on_track_transceivers_.size());
+}
+
+// Test that setting a remote offer twice with no answer in the middle results
+// in OnAddTrack being fired only once.
+TEST_F(PeerConnectionRtpTestUnifiedPlan,
+ ApplyTwoRemoteOffersWithNoAnswerResultsInOneAddTrackEvent) {
+ auto caller = CreatePeerConnection();
+ auto callee = CreatePeerConnection();
+
+ caller->AddAudioTrack("audio_track", {});
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+ ASSERT_EQ(1u, callee->observer()->add_track_events_.size());
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+ EXPECT_EQ(1u, callee->observer()->add_track_events_.size());
+}
+
+// Test that setting a remote offer twice with no answer in the middle and the
+// track being removed between the two offers results in OnAddTrack being called
+// once the first time and OnRemoveTrack being called once the second time.
+TEST_F(PeerConnectionRtpTestUnifiedPlan,
+ ApplyRemoteOfferAddThenRemoteOfferRemoveResultsInOneRemoveTrackEvent) {
+ auto caller = CreatePeerConnection();
+ auto callee = CreatePeerConnection();
+
+ auto sender = caller->AddAudioTrack("audio_track", {});
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+ ASSERT_EQ(1u, callee->observer()->add_track_events_.size());
+ EXPECT_EQ(0u, callee->observer()->remove_track_events_.size());
+
+ caller->pc()->RemoveTrackOrError(sender);
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+ EXPECT_EQ(1u, callee->observer()->add_track_events_.size());
+ EXPECT_EQ(1u, callee->observer()->remove_track_events_.size());
+}
+
+// Test that changing the direction from receiving to not receiving between
+// setting the remote offer and creating / setting the local answer results in
+// a remove track event when SetLocalDescription is called.
+TEST_F(PeerConnectionRtpTestUnifiedPlan,
+ ChangeDirectionInAnswerResultsInRemoveTrackEvent) {
+ auto caller = CreatePeerConnection();
+ caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO);
+ auto callee = CreatePeerConnection();
+ callee->AddAudioTrack("audio_track", {});
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOffer()));
+ EXPECT_EQ(1u, callee->observer()->add_track_events_.size());
+ EXPECT_EQ(0u, callee->observer()->remove_track_events_.size());
+
+ auto callee_transceiver = callee->pc()->GetTransceivers()[0];
+ EXPECT_TRUE(callee_transceiver
+ ->SetDirectionWithError(RtpTransceiverDirection::kSendOnly)
+ .ok());
+
+ ASSERT_TRUE(callee->SetLocalDescription(callee->CreateAnswer()));
+ EXPECT_EQ(1u, callee->observer()->add_track_events_.size());
+ EXPECT_EQ(1u, callee->observer()->remove_track_events_.size());
+}
+
+TEST_F(PeerConnectionRtpTestUnifiedPlan, ChangeMsidWhileReceiving) {
+ auto caller = CreatePeerConnection();
+ caller->AddAudioTrack("audio_track", {"stream1"});
+ auto callee = CreatePeerConnection();
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+
+ ASSERT_EQ(1u, callee->observer()->on_track_transceivers_.size());
+ auto transceiver = callee->observer()->on_track_transceivers_[0];
+ ASSERT_EQ(1u, transceiver->receiver()->streams().size());
+ EXPECT_EQ("stream1", transceiver->receiver()->streams()[0]->id());
+
+ ASSERT_TRUE(callee->CreateAnswerAndSetAsLocal());
+
+ // Change the stream ID in the offer.
+ caller->pc()->GetSenders()[0]->SetStreams({"stream2"});
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+ ASSERT_EQ(1u, transceiver->receiver()->streams().size());
+ EXPECT_EQ("stream2", transceiver->receiver()->streams()[0]->id());
+}
+
+// These tests examine the state of the peer connection as a result of
+// performing SetRemoteDescription().
+
+TEST_P(PeerConnectionRtpTest, AddTrackWithoutStreamAddsReceiver) {
+ auto caller = CreatePeerConnection();
+ auto callee = CreatePeerConnection();
+
+ ASSERT_TRUE(caller->AddAudioTrack("audio_track", {}));
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+
+ EXPECT_EQ(callee->pc()->GetReceivers().size(), 1u);
+ auto receiver_added = callee->pc()->GetReceivers()[0];
+ EXPECT_EQ("audio_track", receiver_added->track()->id());
+
+ if (sdp_semantics_ == SdpSemantics::kPlanB_DEPRECATED) {
+ // Since we are not supporting the no stream case with Plan B, there should
+ // be a generated stream, even though we didn't set one with AddTrack.
+ ASSERT_EQ(1u, receiver_added->streams().size());
+ EXPECT_TRUE(receiver_added->streams()[0]->FindAudioTrack("audio_track"));
+ } else {
+ EXPECT_EQ(0u, receiver_added->streams().size());
+ }
+}
+
+TEST_P(PeerConnectionRtpTest, AddTrackWithStreamAddsReceiver) {
+ auto caller = CreatePeerConnection();
+ auto callee = CreatePeerConnection();
+
+ ASSERT_TRUE(caller->AddAudioTrack("audio_track", {"audio_stream"}));
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+
+ EXPECT_EQ(callee->pc()->GetReceivers().size(), 1u);
+ auto receiver_added = callee->pc()->GetReceivers()[0];
+ EXPECT_EQ("audio_track", receiver_added->track()->id());
+ EXPECT_EQ(receiver_added->streams().size(), 1u);
+ EXPECT_EQ("audio_stream", receiver_added->streams()[0]->id());
+ EXPECT_TRUE(receiver_added->streams()[0]->FindAudioTrack("audio_track"));
+}
+
+TEST_P(PeerConnectionRtpTest, RemoveTrackWithoutStreamRemovesReceiver) {
+ auto caller = CreatePeerConnection();
+ auto callee = CreatePeerConnection();
+
+ auto sender = caller->AddAudioTrack("audio_track", {});
+ ASSERT_TRUE(sender);
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+
+ ASSERT_EQ(callee->pc()->GetReceivers().size(), 1u);
+ auto receiver = callee->pc()->GetReceivers()[0];
+ ASSERT_TRUE(caller->pc()->RemoveTrackOrError(sender).ok());
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+
+ if (sdp_semantics_ == SdpSemantics::kUnifiedPlan) {
+ // With Unified Plan the receiver stays but the transceiver transitions to
+ // inactive.
+ ASSERT_EQ(1u, callee->pc()->GetReceivers().size());
+ EXPECT_EQ(RtpTransceiverDirection::kInactive,
+ callee->pc()->GetTransceivers()[0]->current_direction());
+ } else {
+ // With Plan B the receiver is removed.
+ ASSERT_EQ(0u, callee->pc()->GetReceivers().size());
+ }
+}
+
+TEST_P(PeerConnectionRtpTest, RemoveTrackWithStreamRemovesReceiver) {
+ auto caller = CreatePeerConnection();
+ auto callee = CreatePeerConnection();
+
+ auto sender = caller->AddAudioTrack("audio_track", {"audio_stream"});
+ ASSERT_TRUE(sender);
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+ ASSERT_EQ(callee->pc()->GetReceivers().size(), 1u);
+ auto receiver = callee->pc()->GetReceivers()[0];
+ ASSERT_TRUE(caller->pc()->RemoveTrackOrError(sender).ok());
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+
+ if (sdp_semantics_ == SdpSemantics::kUnifiedPlan) {
+ // With Unified Plan the receiver stays but the transceiver transitions to
+ // inactive.
+ EXPECT_EQ(1u, callee->pc()->GetReceivers().size());
+ EXPECT_EQ(RtpTransceiverDirection::kInactive,
+ callee->pc()->GetTransceivers()[0]->current_direction());
+ } else {
+ // With Plan B the receiver is removed.
+ EXPECT_EQ(0u, callee->pc()->GetReceivers().size());
+ }
+}
+
+TEST_P(PeerConnectionRtpTest, RemoveTrackWithSharedStreamRemovesReceiver) {
+ auto caller = CreatePeerConnection();
+ auto callee = CreatePeerConnection();
+
+ const char kSharedStreamId[] = "shared_audio_stream";
+ auto sender1 = caller->AddAudioTrack("audio_track1", {kSharedStreamId});
+ auto sender2 = caller->AddAudioTrack("audio_track2", {kSharedStreamId});
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+ ASSERT_EQ(2u, callee->pc()->GetReceivers().size());
+
+ // Remove "audio_track1".
+ EXPECT_TRUE(caller->pc()->RemoveTrackOrError(sender1).ok());
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+
+ if (sdp_semantics_ == SdpSemantics::kUnifiedPlan) {
+ // With Unified Plan the receiver stays but the transceiver transitions to
+ // inactive.
+ ASSERT_EQ(2u, callee->pc()->GetReceivers().size());
+ auto transceiver = callee->pc()->GetTransceivers()[0];
+ EXPECT_EQ("audio_track1", transceiver->receiver()->track()->id());
+ EXPECT_EQ(RtpTransceiverDirection::kInactive,
+ transceiver->current_direction());
+ } else {
+ // With Plan B the receiver is removed.
+ ASSERT_EQ(1u, callee->pc()->GetReceivers().size());
+ EXPECT_EQ("audio_track2", callee->pc()->GetReceivers()[0]->track()->id());
+ }
+
+ // Remove "audio_track2".
+ EXPECT_TRUE(caller->pc()->RemoveTrackOrError(sender2).ok());
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+
+ if (sdp_semantics_ == SdpSemantics::kUnifiedPlan) {
+ // With Unified Plan the receiver stays but the transceiver transitions to
+ // inactive.
+ ASSERT_EQ(2u, callee->pc()->GetReceivers().size());
+ auto transceiver = callee->pc()->GetTransceivers()[1];
+ EXPECT_EQ("audio_track2", transceiver->receiver()->track()->id());
+ EXPECT_EQ(RtpTransceiverDirection::kInactive,
+ transceiver->current_direction());
+ } else {
+ // With Plan B the receiver is removed.
+ ASSERT_EQ(0u, callee->pc()->GetReceivers().size());
+ }
+}
+
+TEST_P(PeerConnectionRtpTest, AudioGetParametersHasHeaderExtensions) {
+ auto caller = CreatePeerConnection();
+ auto callee = CreatePeerConnection();
+ auto sender = caller->AddAudioTrack("audio_track");
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+
+ ASSERT_GT(caller->pc()->GetSenders().size(), 0u);
+ EXPECT_GT(sender->GetParameters().header_extensions.size(), 0u);
+
+ ASSERT_GT(callee->pc()->GetReceivers().size(), 0u);
+ auto receiver = callee->pc()->GetReceivers()[0];
+ EXPECT_GT(receiver->GetParameters().header_extensions.size(), 0u);
+}
+
+TEST_P(PeerConnectionRtpTest, VideoGetParametersHasHeaderExtensions) {
+ auto caller = CreatePeerConnection();
+ auto callee = CreatePeerConnection();
+ auto sender = caller->AddVideoTrack("video_track");
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+
+ ASSERT_GT(caller->pc()->GetSenders().size(), 0u);
+ EXPECT_GT(sender->GetParameters().header_extensions.size(), 0u);
+
+ ASSERT_GT(callee->pc()->GetReceivers().size(), 0u);
+ auto receiver = callee->pc()->GetReceivers()[0];
+ EXPECT_GT(receiver->GetParameters().header_extensions.size(), 0u);
+}
+
+// Invokes SetRemoteDescription() twice in a row without synchronizing the two
+// calls and examine the state of the peer connection inside the callbacks to
+// ensure that the second call does not occur prematurely, contaminating the
+// state of the peer connection of the first callback.
+TEST_F(PeerConnectionRtpTestPlanB,
+ StatesCorrelateWithSetRemoteDescriptionCall) {
+ auto caller = CreatePeerConnection();
+ auto callee = CreatePeerConnection();
+
+ // Create SDP for adding a track and for removing it. This will be used in the
+ // first and second SetRemoteDescription() calls.
+ auto sender = caller->AddAudioTrack("audio_track", {});
+ auto srd1_sdp = caller->CreateOfferAndSetAsLocal();
+ EXPECT_TRUE(caller->pc()->RemoveTrackOrError(sender).ok());
+ auto srd2_sdp = caller->CreateOfferAndSetAsLocal();
+
+ // In the first SetRemoteDescription() callback, check that we have a
+ // receiver for the track.
+ auto pc = callee->pc();
+ bool srd1_callback_called = false;
+ auto srd1_callback = [&srd1_callback_called, &pc]() {
+ EXPECT_EQ(pc->GetReceivers().size(), 1u);
+ srd1_callback_called = true;
+ };
+
+ // In the second SetRemoteDescription() callback, check that the receiver has
+ // been removed.
+ // TODO(hbos): When we implement Unified Plan, receivers will not be removed.
+ // Instead, the transceiver owning the receiver will become inactive.
+ // https://crbug.com/webrtc/7600
+ bool srd2_callback_called = false;
+ auto srd2_callback = [&srd2_callback_called, &pc]() {
+ EXPECT_TRUE(pc->GetReceivers().empty());
+ srd2_callback_called = true;
+ };
+
+ // Invoke SetRemoteDescription() twice in a row without synchronizing the two
+ // calls. The callbacks verify that the two calls are synchronized, as in, the
+ // effects of the second SetRemoteDescription() call must not have happened by
+ // the time the first callback is invoked. If it has then the receiver that is
+ // added as a result of the first SetRemoteDescription() call will already
+ // have been removed as a result of the second SetRemoteDescription() call
+ // when the first callback is invoked.
+ callee->pc()->SetRemoteDescription(
+ std::move(srd1_sdp),
+ rtc::make_ref_counted<OnSuccessObserver<decltype(srd1_callback)>>(
+ srd1_callback));
+ callee->pc()->SetRemoteDescription(
+ std::move(srd2_sdp),
+ rtc::make_ref_counted<OnSuccessObserver<decltype(srd2_callback)>>(
+ srd2_callback));
+ EXPECT_TRUE_WAIT(srd1_callback_called, kDefaultTimeout);
+ EXPECT_TRUE_WAIT(srd2_callback_called, kDefaultTimeout);
+}
+
+// Tests that a remote track is created with the signaled MSIDs when they are
+// communicated with a=msid and no SSRCs are signaled at all (i.e., no a=ssrc
+// lines).
+TEST_F(PeerConnectionRtpTestUnifiedPlan, UnsignaledSsrcCreatesReceiverStreams) {
+ auto caller = CreatePeerConnection();
+ auto callee = CreatePeerConnection();
+ const char kStreamId1[] = "stream1";
+ const char kStreamId2[] = "stream2";
+ caller->AddTrack(caller->CreateAudioTrack("audio_track1"),
+ {kStreamId1, kStreamId2});
+
+ auto offer = caller->CreateOfferAndSetAsLocal();
+ // Munge the offer to take out everything but the stream_ids.
+ auto contents = offer->description()->contents();
+ ASSERT_TRUE(!contents.empty());
+ ASSERT_TRUE(!contents[0].media_description()->streams().empty());
+ std::vector<std::string> stream_ids =
+ contents[0].media_description()->streams()[0].stream_ids();
+ contents[0].media_description()->mutable_streams().clear();
+ cricket::StreamParams new_stream;
+ new_stream.set_stream_ids(stream_ids);
+ contents[0].media_description()->AddStream(new_stream);
+
+ // Set the remote description and verify that the streams were added to the
+ // receiver correctly.
+ ASSERT_TRUE(
+ callee->SetRemoteDescription(CloneSessionDescription(offer.get())));
+ auto receivers = callee->pc()->GetReceivers();
+ ASSERT_EQ(receivers.size(), 1u);
+ ASSERT_EQ(receivers[0]->streams().size(), 2u);
+ EXPECT_EQ(receivers[0]->streams()[0]->id(), kStreamId1);
+ EXPECT_EQ(receivers[0]->streams()[1]->id(), kStreamId2);
+}
+TEST_F(PeerConnectionRtpTestUnifiedPlan, TracksDoNotEndWhenSsrcChanges) {
+ constexpr uint32_t kFirstMungedSsrc = 1337u;
+
+ auto caller = CreatePeerConnection();
+ auto callee = CreatePeerConnection();
+
+ // Caller offers to receive audio and video.
+ RtpTransceiverInit init;
+ init.direction = RtpTransceiverDirection::kRecvOnly;
+ caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO, init);
+ caller->AddTransceiver(cricket::MEDIA_TYPE_VIDEO, init);
+
+ // Callee wants to send audio and video tracks.
+ callee->AddTrack(callee->CreateAudioTrack("audio_track"), {});
+ callee->AddTrack(callee->CreateVideoTrack("video_track"), {});
+
+ // Do inittial offer/answer exchange.
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+ ASSERT_TRUE(
+ caller->SetRemoteDescription(callee->CreateAnswerAndSetAsLocal()));
+ ASSERT_EQ(caller->observer()->add_track_events_.size(), 2u);
+ ASSERT_EQ(caller->pc()->GetReceivers().size(), 2u);
+
+ // Do a follow-up offer/answer exchange where the SSRCs are modified.
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+ auto answer = callee->CreateAnswer();
+ auto& contents = answer->description()->contents();
+ ASSERT_TRUE(!contents.empty());
+ for (size_t i = 0; i < contents.size(); ++i) {
+ auto& mutable_streams = contents[i].media_description()->mutable_streams();
+ ASSERT_EQ(mutable_streams.size(), 1u);
+ ReplaceFirstSsrc(mutable_streams[0],
+ kFirstMungedSsrc + static_cast<uint32_t>(i));
+ }
+ ASSERT_TRUE(
+ callee->SetLocalDescription(CloneSessionDescription(answer.get())));
+ ASSERT_TRUE(
+ caller->SetRemoteDescription(CloneSessionDescription(answer.get())));
+
+ // No furher track events should fire because we never changed direction, only
+ // SSRCs.
+ ASSERT_EQ(caller->observer()->add_track_events_.size(), 2u);
+ // We should have the same number of receivers as before.
+ auto receivers = caller->pc()->GetReceivers();
+ ASSERT_EQ(receivers.size(), 2u);
+ // The tracks are still alive.
+ EXPECT_EQ(receivers[0]->track()->state(),
+ MediaStreamTrackInterface::TrackState::kLive);
+ EXPECT_EQ(receivers[1]->track()->state(),
+ MediaStreamTrackInterface::TrackState::kLive);
+}
+
+// Tests that with Unified Plan if the the stream id changes for a track when
+// when setting a new remote description, that the media stream is updated
+// appropriately for the receiver.
+// TODO(https://github.com/w3c/webrtc-pc/issues/1937): Resolve spec issue or fix
+// test.
+TEST_F(PeerConnectionRtpTestUnifiedPlan,
+ DISABLED_RemoteStreamIdChangesUpdatesReceiver) {
+ auto caller = CreatePeerConnection();
+ auto callee = CreatePeerConnection();
+
+ const char kStreamId1[] = "stream1";
+ const char kStreamId2[] = "stream2";
+ caller->AddAudioTrack("audio_track1", {kStreamId1});
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+ EXPECT_EQ(callee->observer()->add_track_events_.size(), 1u);
+
+ // Change the stream id of the sender in the session description.
+ auto offer = caller->CreateOfferAndSetAsLocal();
+ auto contents = offer->description()->contents();
+ ASSERT_EQ(contents.size(), 1u);
+ ASSERT_EQ(contents[0].media_description()->mutable_streams().size(), 1u);
+ contents[0].media_description()->mutable_streams()[0].set_stream_ids(
+ {kStreamId2});
+
+ // Set the remote description and verify that the stream was updated
+ // properly.
+ ASSERT_TRUE(callee->SetRemoteDescription(std::move(offer)));
+ auto receivers = callee->pc()->GetReceivers();
+ ASSERT_EQ(receivers.size(), 1u);
+ ASSERT_EQ(receivers[0]->streams().size(), 1u);
+ EXPECT_EQ(receivers[0]->streams()[0]->id(), kStreamId2);
+}
+
+// This tests a regression caught by a downstream client, that occured when
+// applying a remote description with a SessionDescription object that
+// contained StreamParams that didn't have ids. Although there were multiple
+// remote audio senders, FindSenderInfo didn't find them as unique, because
+// it looked up by StreamParam.id, which none had. This meant only one
+// AudioRtpReceiver was created, as opposed to one for each remote sender.
+TEST_F(PeerConnectionRtpTestPlanB,
+ MultipleRemoteSendersWithoutStreamParamIdAddsMultipleReceivers) {
+ auto caller = CreatePeerConnection();
+ auto callee = CreatePeerConnection();
+
+ const char kStreamId1[] = "stream1";
+ const char kStreamId2[] = "stream2";
+ caller->AddAudioTrack("audio_track1", {kStreamId1});
+ caller->AddAudioTrack("audio_track2", {kStreamId2});
+
+ auto offer = caller->CreateOfferAndSetAsLocal();
+ auto mutable_streams =
+ cricket::GetFirstAudioContentDescription(offer->description())
+ ->mutable_streams();
+ ASSERT_EQ(mutable_streams.size(), 2u);
+ // Clear the IDs in the StreamParams.
+ mutable_streams[0].id.clear();
+ mutable_streams[1].id.clear();
+ ASSERT_TRUE(
+ callee->SetRemoteDescription(CloneSessionDescription(offer.get())));
+
+ auto receivers = callee->pc()->GetReceivers();
+ ASSERT_EQ(receivers.size(), 2u);
+ ASSERT_EQ(receivers[0]->streams().size(), 1u);
+ EXPECT_EQ(kStreamId1, receivers[0]->streams()[0]->id());
+ ASSERT_EQ(receivers[1]->streams().size(), 1u);
+ EXPECT_EQ(kStreamId2, receivers[1]->streams()[0]->id());
+}
+
+// Tests for the legacy SetRemoteDescription() function signature.
+
+// Sanity test making sure the callback is invoked.
+TEST_P(PeerConnectionRtpTest, LegacyObserverOnSuccess) {
+ auto caller = CreatePeerConnection();
+ auto callee = CreatePeerConnection();
+
+ std::string error;
+ ASSERT_TRUE(
+ callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal(), &error));
+}
+
+// Verifies legacy behavior: The observer is not called if if the peer
+// connection is destroyed because the asynchronous callback is executed in the
+// peer connection's message handler.
+TEST_P(PeerConnectionRtpTest,
+ LegacyObserverNotCalledIfPeerConnectionDereferenced) {
+ auto caller = CreatePeerConnection();
+ auto callee = CreatePeerConnection();
+
+ rtc::scoped_refptr<webrtc::MockSetSessionDescriptionObserver> observer =
+ rtc::make_ref_counted<webrtc::MockSetSessionDescriptionObserver>();
+
+ auto offer = caller->CreateOfferAndSetAsLocal();
+ callee->pc()->SetRemoteDescription(observer.get(), offer.release());
+ callee = nullptr;
+ rtc::Thread::Current()->ProcessMessages(0);
+ EXPECT_FALSE(observer->called());
+}
+
+// RtpTransceiver Tests.
+
+// Test that by default there are no transceivers with Unified Plan.
+TEST_F(PeerConnectionRtpTestUnifiedPlan, PeerConnectionHasNoTransceivers) {
+ auto caller = CreatePeerConnection();
+ EXPECT_THAT(caller->pc()->GetTransceivers(), ElementsAre());
+}
+
+// Test that a transceiver created with the audio kind has the correct initial
+// properties.
+TEST_F(PeerConnectionRtpTestUnifiedPlan,
+ AddTransceiverHasCorrectInitProperties) {
+ auto caller = CreatePeerConnection();
+
+ auto transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO);
+ EXPECT_EQ(absl::nullopt, transceiver->mid());
+ EXPECT_FALSE(transceiver->stopped());
+ EXPECT_EQ(RtpTransceiverDirection::kSendRecv, transceiver->direction());
+ EXPECT_EQ(absl::nullopt, transceiver->current_direction());
+}
+
+// Test that adding a transceiver with the audio kind creates an audio sender
+// and audio receiver with the receiver having a live audio track.
+TEST_F(PeerConnectionRtpTestUnifiedPlan,
+ AddAudioTransceiverCreatesAudioSenderAndReceiver) {
+ auto caller = CreatePeerConnection();
+
+ auto transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO);
+ EXPECT_EQ(cricket::MEDIA_TYPE_AUDIO, transceiver->media_type());
+
+ ASSERT_TRUE(transceiver->sender());
+ EXPECT_EQ(cricket::MEDIA_TYPE_AUDIO, transceiver->sender()->media_type());
+
+ ASSERT_TRUE(transceiver->receiver());
+ EXPECT_EQ(cricket::MEDIA_TYPE_AUDIO, transceiver->receiver()->media_type());
+
+ auto track = transceiver->receiver()->track();
+ ASSERT_TRUE(track);
+ EXPECT_EQ(MediaStreamTrackInterface::kAudioKind, track->kind());
+ EXPECT_EQ(MediaStreamTrackInterface::TrackState::kLive, track->state());
+}
+
+// Test that adding a transceiver with the video kind creates an video sender
+// and video receiver with the receiver having a live video track.
+TEST_F(PeerConnectionRtpTestUnifiedPlan,
+ AddAudioTransceiverCreatesVideoSenderAndReceiver) {
+ auto caller = CreatePeerConnection();
+
+ auto transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_VIDEO);
+ EXPECT_EQ(cricket::MEDIA_TYPE_VIDEO, transceiver->media_type());
+
+ ASSERT_TRUE(transceiver->sender());
+ EXPECT_EQ(cricket::MEDIA_TYPE_VIDEO, transceiver->sender()->media_type());
+
+ ASSERT_TRUE(transceiver->receiver());
+ EXPECT_EQ(cricket::MEDIA_TYPE_VIDEO, transceiver->receiver()->media_type());
+
+ auto track = transceiver->receiver()->track();
+ ASSERT_TRUE(track);
+ EXPECT_EQ(MediaStreamTrackInterface::kVideoKind, track->kind());
+ EXPECT_EQ(MediaStreamTrackInterface::TrackState::kLive, track->state());
+}
+
+// Test that after a call to AddTransceiver, the transceiver shows in
+// GetTransceivers(), the transceiver's sender shows in GetSenders(), and the
+// transceiver's receiver shows in GetReceivers().
+TEST_F(PeerConnectionRtpTestUnifiedPlan, AddTransceiverShowsInLists) {
+ auto caller = CreatePeerConnection();
+
+ auto transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO);
+ EXPECT_EQ(
+ std::vector<rtc::scoped_refptr<RtpTransceiverInterface>>{transceiver},
+ caller->pc()->GetTransceivers());
+ EXPECT_EQ(
+ std::vector<rtc::scoped_refptr<RtpSenderInterface>>{
+ transceiver->sender()},
+ caller->pc()->GetSenders());
+ EXPECT_EQ(
+ std::vector<rtc::scoped_refptr<RtpReceiverInterface>>{
+ transceiver->receiver()},
+ caller->pc()->GetReceivers());
+}
+
+// Test that the direction passed in through the AddTransceiver init parameter
+// is set in the returned transceiver.
+TEST_F(PeerConnectionRtpTestUnifiedPlan,
+ AddTransceiverWithDirectionIsReflected) {
+ auto caller = CreatePeerConnection();
+
+ RtpTransceiverInit init;
+ init.direction = RtpTransceiverDirection::kSendOnly;
+ auto transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO, init);
+ EXPECT_EQ(RtpTransceiverDirection::kSendOnly, transceiver->direction());
+}
+
+// Test that calling AddTransceiver with a track creates a transceiver which has
+// its sender's track set to the passed-in track.
+TEST_F(PeerConnectionRtpTestUnifiedPlan,
+ AddTransceiverWithTrackCreatesSenderWithTrack) {
+ auto caller = CreatePeerConnection();
+
+ auto audio_track = caller->CreateAudioTrack("audio track");
+ auto transceiver = caller->AddTransceiver(audio_track);
+
+ auto sender = transceiver->sender();
+ ASSERT_TRUE(sender->track());
+ EXPECT_EQ(audio_track, sender->track());
+
+ auto receiver = transceiver->receiver();
+ ASSERT_TRUE(receiver->track());
+ EXPECT_EQ(MediaStreamTrackInterface::kAudioKind, receiver->track()->kind());
+ EXPECT_EQ(MediaStreamTrackInterface::TrackState::kLive,
+ receiver->track()->state());
+}
+
+// Test that calling AddTransceiver twice with the same track creates distinct
+// transceivers, senders with the same track.
+TEST_F(PeerConnectionRtpTestUnifiedPlan,
+ AddTransceiverTwiceWithSameTrackCreatesMultipleTransceivers) {
+ auto caller = CreatePeerConnection();
+
+ auto audio_track = caller->CreateAudioTrack("audio track");
+
+ auto transceiver1 = caller->AddTransceiver(audio_track);
+ auto transceiver2 = caller->AddTransceiver(audio_track);
+
+ EXPECT_NE(transceiver1, transceiver2);
+
+ auto sender1 = transceiver1->sender();
+ auto sender2 = transceiver2->sender();
+ EXPECT_NE(sender1, sender2);
+ EXPECT_EQ(audio_track, sender1->track());
+ EXPECT_EQ(audio_track, sender2->track());
+
+ EXPECT_THAT(caller->pc()->GetTransceivers(),
+ UnorderedElementsAre(transceiver1, transceiver2));
+ EXPECT_THAT(caller->pc()->GetSenders(),
+ UnorderedElementsAre(sender1, sender2));
+}
+
+// RtpTransceiver error handling tests.
+
+TEST_F(PeerConnectionRtpTestUnifiedPlan,
+ AddTransceiverWithInvalidKindReturnsError) {
+ auto caller = CreatePeerConnection();
+
+ auto result = caller->pc()->AddTransceiver(cricket::MEDIA_TYPE_DATA);
+ EXPECT_EQ(RTCErrorType::INVALID_PARAMETER, result.error().type());
+}
+
+TEST_F(PeerConnectionRtpTestUnifiedPlan,
+ CanClosePeerConnectionWithoutCrashing) {
+ auto caller = CreatePeerConnection();
+
+ caller->pc()->Close();
+}
+
+// Unified Plan AddTrack tests.
+
+// Test that adding an audio track creates a new audio RtpSender with the given
+// track.
+TEST_F(PeerConnectionRtpTestUnifiedPlan, AddAudioTrackCreatesAudioSender) {
+ auto caller = CreatePeerConnection();
+
+ auto audio_track = caller->CreateAudioTrack("a");
+ auto sender = caller->AddTrack(audio_track);
+ ASSERT_TRUE(sender);
+
+ EXPECT_EQ(cricket::MEDIA_TYPE_AUDIO, sender->media_type());
+ EXPECT_EQ(audio_track, sender->track());
+}
+
+// Test that adding a video track creates a new video RtpSender with the given
+// track.
+TEST_F(PeerConnectionRtpTestUnifiedPlan, AddVideoTrackCreatesVideoSender) {
+ auto caller = CreatePeerConnection();
+
+ auto video_track = caller->CreateVideoTrack("a");
+ auto sender = caller->AddTrack(video_track);
+ ASSERT_TRUE(sender);
+
+ EXPECT_EQ(cricket::MEDIA_TYPE_VIDEO, sender->media_type());
+ EXPECT_EQ(video_track, sender->track());
+}
+
+// Test that adding a track to a new PeerConnection creates an RtpTransceiver
+// with the sender that AddTrack returns and in the sendrecv direction.
+TEST_F(PeerConnectionRtpTestUnifiedPlan, AddFirstTrackCreatesTransceiver) {
+ auto caller = CreatePeerConnection();
+
+ auto sender = caller->AddAudioTrack("a");
+ ASSERT_TRUE(sender);
+
+ auto transceivers = caller->pc()->GetTransceivers();
+ ASSERT_EQ(1u, transceivers.size());
+ EXPECT_EQ(sender, transceivers[0]->sender());
+ EXPECT_EQ(RtpTransceiverDirection::kSendRecv, transceivers[0]->direction());
+}
+
+// Test that if a transceiver of the same type but no track had been added to
+// the PeerConnection and later a call to AddTrack is made, the resulting sender
+// is the transceiver's sender and the sender's track is the newly-added track.
+TEST_F(PeerConnectionRtpTestUnifiedPlan, AddTrackReusesTransceiver) {
+ auto caller = CreatePeerConnection();
+
+ auto transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO);
+ auto audio_track = caller->CreateAudioTrack("a");
+ auto sender = caller->AddTrack(audio_track);
+ ASSERT_TRUE(sender);
+
+ auto transceivers = caller->pc()->GetTransceivers();
+ ASSERT_EQ(1u, transceivers.size());
+ EXPECT_EQ(transceiver, transceivers[0]);
+ EXPECT_EQ(sender, transceiver->sender());
+ EXPECT_EQ(audio_track, sender->track());
+}
+
+TEST_F(PeerConnectionRtpTestUnifiedPlan,
+ AddTrackWithSendEncodingDoesNotReuseTransceiver) {
+ auto caller = CreatePeerConnection();
+
+ auto transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO);
+ auto audio_track = caller->CreateAudioTrack("a");
+ RtpEncodingParameters encoding;
+ auto sender = caller->AddTrack(audio_track, {}, {encoding});
+ ASSERT_TRUE(sender);
+
+ auto transceivers = caller->pc()->GetTransceivers();
+ ASSERT_EQ(2u, transceivers.size());
+ EXPECT_EQ(transceiver, transceivers[0]);
+ EXPECT_NE(sender, transceiver->sender());
+ EXPECT_EQ(audio_track, sender->track());
+}
+
+// Test that adding two tracks to a new PeerConnection creates two
+// RtpTransceivers in the same order.
+TEST_F(PeerConnectionRtpTestUnifiedPlan, TwoAddTrackCreatesTwoTransceivers) {
+ auto caller = CreatePeerConnection();
+
+ auto sender1 = caller->AddAudioTrack("a");
+ auto sender2 = caller->AddVideoTrack("v");
+ ASSERT_TRUE(sender2);
+
+ auto transceivers = caller->pc()->GetTransceivers();
+ ASSERT_EQ(2u, transceivers.size());
+ EXPECT_EQ(sender1, transceivers[0]->sender());
+ EXPECT_EQ(sender2, transceivers[1]->sender());
+}
+
+// Test that if there are multiple transceivers with no sending track then a
+// later call to AddTrack will use the one of the same type as the newly-added
+// track.
+TEST_F(PeerConnectionRtpTestUnifiedPlan, AddTrackReusesTransceiverOfType) {
+ auto caller = CreatePeerConnection();
+
+ auto audio_transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO);
+ auto video_transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_VIDEO);
+ auto sender = caller->AddVideoTrack("v");
+
+ ASSERT_EQ(2u, caller->pc()->GetTransceivers().size());
+ EXPECT_NE(sender, audio_transceiver->sender());
+ EXPECT_EQ(sender, video_transceiver->sender());
+}
+
+// Test that if the only transceivers that do not have a sending track have a
+// different type from the added track, then AddTrack will create a new
+// transceiver for the track.
+TEST_F(PeerConnectionRtpTestUnifiedPlan,
+ AddTrackDoesNotReuseTransceiverOfWrongType) {
+ auto caller = CreatePeerConnection();
+
+ caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO);
+ auto sender = caller->AddVideoTrack("v");
+
+ auto transceivers = caller->pc()->GetTransceivers();
+ ASSERT_EQ(2u, transceivers.size());
+ EXPECT_NE(sender, transceivers[0]->sender());
+ EXPECT_EQ(sender, transceivers[1]->sender());
+}
+
+// Test that the first available transceiver is reused by AddTrack when multiple
+// are available.
+TEST_F(PeerConnectionRtpTestUnifiedPlan,
+ AddTrackReusesFirstMatchingTransceiver) {
+ auto caller = CreatePeerConnection();
+
+ caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO);
+ caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO);
+ auto sender = caller->AddAudioTrack("a");
+
+ auto transceivers = caller->pc()->GetTransceivers();
+ ASSERT_EQ(2u, transceivers.size());
+ EXPECT_EQ(sender, transceivers[0]->sender());
+ EXPECT_NE(sender, transceivers[1]->sender());
+}
+
+// Test that a call to AddTrack that reuses a transceiver will change the
+// direction from inactive to sendonly.
+TEST_F(PeerConnectionRtpTestUnifiedPlan,
+ AddTrackChangesDirectionFromInactiveToSendOnly) {
+ auto caller = CreatePeerConnection();
+ auto callee = CreatePeerConnection();
+
+ RtpTransceiverInit init;
+ init.direction = RtpTransceiverDirection::kInactive;
+ auto transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO, init);
+ EXPECT_TRUE(caller->observer()->legacy_renegotiation_needed());
+ EXPECT_TRUE(caller->observer()->has_negotiation_needed_event());
+
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+ caller->observer()->clear_legacy_renegotiation_needed();
+ caller->observer()->clear_latest_negotiation_needed_event();
+ ASSERT_TRUE(caller->AddAudioTrack("a"));
+ EXPECT_TRUE(caller->observer()->legacy_renegotiation_needed());
+ EXPECT_TRUE(caller->observer()->has_negotiation_needed_event());
+
+ EXPECT_EQ(RtpTransceiverDirection::kSendOnly, transceiver->direction());
+}
+
+// Test that a call to AddTrack that reuses a transceiver will change the
+// direction from recvonly to sendrecv.
+TEST_F(PeerConnectionRtpTestUnifiedPlan,
+ AddTrackChangesDirectionFromRecvOnlyToSendRecv) {
+ auto caller = CreatePeerConnection();
+ auto callee = CreatePeerConnection();
+
+ RtpTransceiverInit init;
+ init.direction = RtpTransceiverDirection::kRecvOnly;
+ auto transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO, init);
+ EXPECT_TRUE(caller->observer()->legacy_renegotiation_needed());
+ EXPECT_TRUE(caller->observer()->has_negotiation_needed_event());
+
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+ caller->observer()->clear_legacy_renegotiation_needed();
+ caller->observer()->clear_latest_negotiation_needed_event();
+ ASSERT_TRUE(caller->AddAudioTrack("a"));
+ EXPECT_TRUE(caller->observer()->legacy_renegotiation_needed());
+ EXPECT_TRUE(caller->observer()->has_negotiation_needed_event());
+
+ EXPECT_EQ(RtpTransceiverDirection::kSendRecv, transceiver->direction());
+}
+
+TEST_F(PeerConnectionRtpTestUnifiedPlan, AddTrackCreatesSenderWithTrackId) {
+ const std::string kTrackId = "audio_track";
+
+ auto caller = CreatePeerConnection();
+
+ auto audio_track = caller->CreateAudioTrack(kTrackId);
+ auto sender = caller->AddTrack(audio_track);
+
+ EXPECT_EQ(kTrackId, sender->id());
+}
+
+// Unified Plan AddTrack error handling.
+
+TEST_F(PeerConnectionRtpTestUnifiedPlan, AddTrackErrorIfClosed) {
+ auto caller = CreatePeerConnection();
+
+ auto audio_track = caller->CreateAudioTrack("a");
+ caller->pc()->Close();
+
+ caller->observer()->clear_legacy_renegotiation_needed();
+ caller->observer()->clear_latest_negotiation_needed_event();
+ auto result = caller->pc()->AddTrack(audio_track, std::vector<std::string>());
+ EXPECT_EQ(RTCErrorType::INVALID_STATE, result.error().type());
+ EXPECT_FALSE(caller->observer()->legacy_renegotiation_needed());
+ EXPECT_FALSE(caller->observer()->has_negotiation_needed_event());
+}
+
+TEST_F(PeerConnectionRtpTestUnifiedPlan, AddTrackErrorIfTrackAlreadyHasSender) {
+ auto caller = CreatePeerConnection();
+
+ auto audio_track = caller->CreateAudioTrack("a");
+ ASSERT_TRUE(caller->AddTrack(audio_track));
+
+ caller->observer()->clear_legacy_renegotiation_needed();
+ caller->observer()->clear_latest_negotiation_needed_event();
+ auto result = caller->pc()->AddTrack(audio_track, std::vector<std::string>());
+ EXPECT_EQ(RTCErrorType::INVALID_PARAMETER, result.error().type());
+ EXPECT_FALSE(caller->observer()->legacy_renegotiation_needed());
+ EXPECT_FALSE(caller->observer()->has_negotiation_needed_event());
+}
+
+// Unified Plan RemoveTrack tests.
+
+// Test that calling RemoveTrack on a sender with a previously-added track
+// clears the sender's track.
+TEST_F(PeerConnectionRtpTestUnifiedPlan, RemoveTrackClearsSenderTrack) {
+ auto caller = CreatePeerConnection();
+
+ auto sender = caller->AddAudioTrack("a");
+ ASSERT_TRUE(caller->pc()->RemoveTrackOrError(sender).ok());
+
+ EXPECT_FALSE(sender->track());
+}
+
+// Test that calling RemoveTrack on a sender where the transceiver is configured
+// in the sendrecv direction changes the transceiver's direction to recvonly.
+TEST_F(PeerConnectionRtpTestUnifiedPlan,
+ RemoveTrackChangesDirectionFromSendRecvToRecvOnly) {
+ auto caller = CreatePeerConnection();
+ auto callee = CreatePeerConnection();
+
+ RtpTransceiverInit init;
+ init.direction = RtpTransceiverDirection::kSendRecv;
+ auto transceiver =
+ caller->AddTransceiver(caller->CreateAudioTrack("a"), init);
+ EXPECT_TRUE(caller->observer()->legacy_renegotiation_needed());
+ EXPECT_TRUE(caller->observer()->has_negotiation_needed_event());
+
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+ caller->observer()->clear_legacy_renegotiation_needed();
+ caller->observer()->clear_latest_negotiation_needed_event();
+
+ ASSERT_TRUE(caller->pc()->RemoveTrackOrError(transceiver->sender()).ok());
+ EXPECT_TRUE(caller->observer()->legacy_renegotiation_needed());
+ EXPECT_TRUE(caller->observer()->has_negotiation_needed_event());
+
+ EXPECT_EQ(RtpTransceiverDirection::kRecvOnly, transceiver->direction());
+}
+
+// Test that calling RemoveTrack on a sender where the transceiver is configured
+// in the sendonly direction changes the transceiver's direction to inactive.
+TEST_F(PeerConnectionRtpTestUnifiedPlan,
+ RemoveTrackChangesDirectionFromSendOnlyToInactive) {
+ auto caller = CreatePeerConnection();
+ auto callee = CreatePeerConnection();
+
+ RtpTransceiverInit init;
+ init.direction = RtpTransceiverDirection::kSendOnly;
+ auto transceiver =
+ caller->AddTransceiver(caller->CreateAudioTrack("a"), init);
+ EXPECT_TRUE(caller->observer()->legacy_renegotiation_needed());
+ EXPECT_TRUE(caller->observer()->has_negotiation_needed_event());
+
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+ caller->observer()->clear_legacy_renegotiation_needed();
+ caller->observer()->clear_latest_negotiation_needed_event();
+
+ ASSERT_TRUE(caller->pc()->RemoveTrackOrError(transceiver->sender()).ok());
+ EXPECT_TRUE(caller->observer()->legacy_renegotiation_needed());
+ EXPECT_TRUE(caller->observer()->has_negotiation_needed_event());
+
+ EXPECT_EQ(RtpTransceiverDirection::kInactive, transceiver->direction());
+}
+
+// Test that calling RemoveTrack with a sender that has a null track results in
+// no change in state.
+TEST_F(PeerConnectionRtpTestUnifiedPlan, RemoveTrackWithNullSenderTrackIsNoOp) {
+ auto caller = CreatePeerConnection();
+
+ auto sender = caller->AddAudioTrack("a");
+ auto transceiver = caller->pc()->GetTransceivers()[0];
+ ASSERT_TRUE(sender->SetTrack(nullptr));
+
+ caller->observer()->clear_legacy_renegotiation_needed();
+ caller->observer()->clear_latest_negotiation_needed_event();
+ ASSERT_TRUE(caller->pc()->RemoveTrackOrError(sender).ok());
+ EXPECT_FALSE(caller->observer()->legacy_renegotiation_needed());
+ EXPECT_FALSE(caller->observer()->has_negotiation_needed_event());
+
+ EXPECT_EQ(RtpTransceiverDirection::kSendRecv, transceiver->direction());
+}
+
+// Unified Plan RemoveTrack error handling.
+
+TEST_F(PeerConnectionRtpTestUnifiedPlan, RemoveTrackErrorIfClosed) {
+ auto caller = CreatePeerConnection();
+
+ auto sender = caller->AddAudioTrack("a");
+ caller->pc()->Close();
+
+ caller->observer()->clear_legacy_renegotiation_needed();
+ caller->observer()->clear_latest_negotiation_needed_event();
+ EXPECT_FALSE(caller->pc()->RemoveTrackOrError(sender).ok());
+ EXPECT_FALSE(caller->observer()->legacy_renegotiation_needed());
+ EXPECT_FALSE(caller->observer()->has_negotiation_needed_event());
+}
+
+TEST_F(PeerConnectionRtpTestUnifiedPlan,
+ RemoveTrackNoErrorIfTrackAlreadyRemoved) {
+ auto caller = CreatePeerConnection();
+
+ auto sender = caller->AddAudioTrack("a");
+ ASSERT_TRUE(caller->pc()->RemoveTrackOrError(sender).ok());
+
+ caller->observer()->clear_legacy_renegotiation_needed();
+ caller->observer()->clear_latest_negotiation_needed_event();
+ EXPECT_TRUE(caller->pc()->RemoveTrackOrError(sender).ok());
+ EXPECT_FALSE(caller->observer()->legacy_renegotiation_needed());
+ EXPECT_FALSE(caller->observer()->has_negotiation_needed_event());
+}
+
+// Test that setting offers that add/remove/add a track repeatedly without
+// setting the appropriate answer in between works.
+// These are regression tests for bugs.webrtc.org/9401
+TEST_F(PeerConnectionRtpTestUnifiedPlan, AddRemoveAddTrackOffersWorksAudio) {
+ auto caller = CreatePeerConnection();
+
+ auto sender1 = caller->AddAudioTrack("audio1");
+ ASSERT_TRUE(caller->SetLocalDescription(caller->CreateOffer()));
+
+ caller->pc()->RemoveTrackOrError(sender1);
+ ASSERT_TRUE(caller->SetLocalDescription(caller->CreateOffer()));
+
+ // This will re-use the transceiver created by the first AddTrack.
+ auto sender2 = caller->AddAudioTrack("audio2");
+ ASSERT_TRUE(caller->SetLocalDescription(caller->CreateOffer()));
+
+ EXPECT_EQ(1u, caller->pc()->GetTransceivers().size());
+ EXPECT_EQ(sender1, sender2);
+}
+TEST_F(PeerConnectionRtpTestUnifiedPlan, AddRemoveAddTrackOffersWorksVideo) {
+ auto caller = CreatePeerConnection();
+
+ auto sender1 = caller->AddVideoTrack("video1");
+ ASSERT_TRUE(caller->SetLocalDescription(caller->CreateOffer()));
+
+ caller->pc()->RemoveTrackOrError(sender1);
+ ASSERT_TRUE(caller->SetLocalDescription(caller->CreateOffer()));
+
+ // This will re-use the transceiver created by the first AddTrack.
+ auto sender2 = caller->AddVideoTrack("video2");
+ ASSERT_TRUE(caller->SetLocalDescription(caller->CreateOffer()));
+
+ EXPECT_EQ(1u, caller->pc()->GetTransceivers().size());
+ EXPECT_EQ(sender1, sender2);
+}
+
+// Test that CreateOffer succeeds if two tracks with the same label are added.
+TEST_F(PeerConnectionRtpTestUnifiedPlan, CreateOfferSameTrackLabel) {
+ auto caller = CreatePeerConnection();
+
+ auto audio_sender = caller->AddAudioTrack("track", {});
+ auto video_sender = caller->AddVideoTrack("track", {});
+
+ EXPECT_TRUE(caller->CreateOffer());
+
+ EXPECT_EQ(audio_sender->track()->id(), video_sender->track()->id());
+ EXPECT_NE(audio_sender->id(), video_sender->id());
+}
+
+// Test that CreateAnswer succeeds if two tracks with the same label are added.
+TEST_F(PeerConnectionRtpTestUnifiedPlan, CreateAnswerSameTrackLabel) {
+ auto caller = CreatePeerConnection();
+ auto callee = CreatePeerConnection();
+
+ RtpTransceiverInit recvonly;
+ recvonly.direction = RtpTransceiverDirection::kRecvOnly;
+ caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO, recvonly);
+ caller->AddTransceiver(cricket::MEDIA_TYPE_VIDEO, recvonly);
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOffer()));
+
+ auto audio_sender = callee->AddAudioTrack("track", {});
+ auto video_sender = callee->AddVideoTrack("track", {});
+
+ EXPECT_TRUE(callee->CreateAnswer());
+
+ EXPECT_EQ(audio_sender->track()->id(), video_sender->track()->id());
+ EXPECT_NE(audio_sender->id(), video_sender->id());
+}
+
+// Test that calling AddTrack, RemoveTrack and AddTrack again creates a second
+// m= section with a random sender id (different from the first, now rejected,
+// m= section).
+TEST_F(PeerConnectionRtpTestUnifiedPlan,
+ AddRemoveAddTrackGeneratesNewSenderId) {
+ auto caller = CreatePeerConnection();
+ auto callee = CreatePeerConnection();
+
+ auto track = caller->CreateVideoTrack("video");
+ auto sender1 = caller->AddTrack(track);
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+
+ caller->pc()->RemoveTrackOrError(sender1);
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+
+ auto sender2 = caller->AddTrack(track);
+
+ EXPECT_NE(sender1, sender2);
+ EXPECT_NE(sender1->id(), sender2->id());
+ std::string sender2_id = sender2->id();
+
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+
+ // The sender's ID should not change after negotiation.
+ EXPECT_EQ(sender2_id, sender2->id());
+}
+
+// Test that OnRenegotiationNeeded is fired if SetDirection is called on an
+// active RtpTransceiver with a new direction.
+TEST_F(PeerConnectionRtpTestUnifiedPlan,
+ RenegotiationNeededAfterTransceiverSetDirection) {
+ auto caller = CreatePeerConnection();
+ auto callee = CreatePeerConnection();
+ EXPECT_FALSE(caller->observer()->legacy_renegotiation_needed());
+ EXPECT_FALSE(caller->observer()->has_negotiation_needed_event());
+
+ auto transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO);
+ EXPECT_TRUE(caller->observer()->legacy_renegotiation_needed());
+ EXPECT_TRUE(caller->observer()->has_negotiation_needed_event());
+
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+ caller->observer()->clear_legacy_renegotiation_needed();
+ caller->observer()->clear_latest_negotiation_needed_event();
+
+ transceiver->SetDirectionWithError(RtpTransceiverDirection::kInactive);
+ EXPECT_TRUE(caller->observer()->legacy_renegotiation_needed());
+ EXPECT_TRUE(caller->observer()->has_negotiation_needed_event());
+}
+
+// Test that OnRenegotiationNeeded is not fired if SetDirection is called on an
+// active RtpTransceiver with current direction.
+TEST_F(PeerConnectionRtpTestUnifiedPlan,
+ NoRenegotiationNeededAfterTransceiverSetSameDirection) {
+ auto caller = CreatePeerConnection();
+
+ auto transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO);
+
+ caller->observer()->clear_legacy_renegotiation_needed();
+ caller->observer()->clear_latest_negotiation_needed_event();
+ transceiver->SetDirectionWithError(transceiver->direction());
+ EXPECT_FALSE(caller->observer()->legacy_renegotiation_needed());
+ EXPECT_FALSE(caller->observer()->has_negotiation_needed_event());
+}
+
+// Test that OnRenegotiationNeeded is not fired if SetDirection is called on a
+// stopped RtpTransceiver.
+TEST_F(PeerConnectionRtpTestUnifiedPlan,
+ NoRenegotiationNeededAfterSetDirectionOnStoppedTransceiver) {
+ auto caller = CreatePeerConnection();
+
+ auto transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO);
+ transceiver->StopInternal();
+
+ caller->observer()->clear_legacy_renegotiation_needed();
+ caller->observer()->clear_latest_negotiation_needed_event();
+ transceiver->SetDirectionWithError(RtpTransceiverDirection::kInactive);
+ EXPECT_FALSE(caller->observer()->legacy_renegotiation_needed());
+ EXPECT_FALSE(caller->observer()->has_negotiation_needed_event());
+}
+
+// Test that currentDirection returnes "stopped" if the transceiver was stopped.
+TEST_F(PeerConnectionRtpTestUnifiedPlan,
+ CheckStoppedCurrentDirectionOnStoppedTransceiver) {
+ auto caller = CreatePeerConnection();
+
+ auto transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO);
+ transceiver->StopInternal();
+
+ EXPECT_TRUE(transceiver->stopping());
+ EXPECT_TRUE(transceiver->stopped());
+ EXPECT_EQ(RtpTransceiverDirection::kStopped,
+ transceiver->current_direction());
+}
+
+// Test that InvalidState is thrown on a stopping transceiver.
+TEST_F(PeerConnectionRtpTestUnifiedPlan,
+ CheckForInvalidStateOnStoppingTransceiver) {
+ auto caller = CreatePeerConnection();
+
+ auto transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO);
+ transceiver->StopStandard();
+
+ EXPECT_TRUE(transceiver->stopping());
+ EXPECT_FALSE(transceiver->stopped());
+ EXPECT_EQ(
+ RTCErrorType::INVALID_STATE,
+ transceiver->SetDirectionWithError(RtpTransceiverDirection::kInactive)
+ .type());
+}
+
+// Test that InvalidState is thrown on a stopped transceiver.
+TEST_F(PeerConnectionRtpTestUnifiedPlan,
+ CheckForInvalidStateOnStoppedTransceiver) {
+ auto caller = CreatePeerConnection();
+
+ auto transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO);
+ transceiver->StopInternal();
+
+ EXPECT_TRUE(transceiver->stopping());
+ EXPECT_TRUE(transceiver->stopped());
+ EXPECT_EQ(
+ RTCErrorType::INVALID_STATE,
+ transceiver->SetDirectionWithError(RtpTransceiverDirection::kInactive)
+ .type());
+}
+
+// Test that TypeError is thrown if the direction is set to "stopped".
+TEST_F(PeerConnectionRtpTestUnifiedPlan,
+ CheckForTypeErrorForStoppedOnTransceiver) {
+ auto caller = CreatePeerConnection();
+
+ auto transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO);
+ EXPECT_EQ(
+ RTCErrorType::INVALID_PARAMETER,
+ transceiver->SetDirectionWithError(RtpTransceiverDirection::kStopped)
+ .type());
+}
+
+// Test that you can do createOffer/setLocalDescription with a stopped
+// media section.
+TEST_F(PeerConnectionRtpTestUnifiedPlan,
+ SetLocalDescriptionWithStoppedMediaSection) {
+ auto caller = CreatePeerConnection();
+ auto callee = CreatePeerConnection();
+ auto transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO);
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+ callee->pc()->GetTransceivers()[0]->StopStandard();
+ ASSERT_TRUE(callee->ExchangeOfferAnswerWith(caller.get()));
+ EXPECT_EQ(RtpTransceiverDirection::kStopped,
+ transceiver->current_direction());
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+}
+
+TEST_F(PeerConnectionRtpTestUnifiedPlan,
+ StopAndNegotiateCausesTransceiverToDisappear) {
+ auto caller = CreatePeerConnection();
+ auto callee = CreatePeerConnection();
+ auto transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO);
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+ callee->pc()->GetTransceivers()[0]->StopStandard();
+ ASSERT_TRUE(callee->ExchangeOfferAnswerWith(caller.get()));
+ EXPECT_EQ(RtpTransceiverDirection::kStopped,
+ transceiver->current_direction());
+ EXPECT_EQ(0U, caller->pc()->GetTransceivers().size());
+ EXPECT_EQ(0U, callee->pc()->GetTransceivers().size());
+ EXPECT_EQ(0U, caller->pc()->GetSenders().size());
+ EXPECT_EQ(0U, callee->pc()->GetSenders().size());
+ EXPECT_EQ(0U, caller->pc()->GetReceivers().size());
+ EXPECT_EQ(0U, callee->pc()->GetReceivers().size());
+}
+
+TEST_F(PeerConnectionRtpTestUnifiedPlan,
+ SetLocalDescriptionWorksAfterRepeatedAddRemove) {
+ auto caller = CreatePeerConnection();
+ auto callee = CreatePeerConnection();
+ auto video_track = caller->CreateVideoTrack("v");
+ auto track = caller->CreateAudioTrack("a");
+ caller->AddTransceiver(video_track);
+ auto transceiver = caller->AddTransceiver(track);
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+ caller->pc()->RemoveTrackOrError(transceiver->sender());
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+ caller->AddTrack(track);
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+ caller->pc()->RemoveTrackOrError(transceiver->sender());
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+}
+
+// This is a repro of Chromium bug https://crbug.com/1134686
+TEST_F(PeerConnectionRtpTestUnifiedPlan,
+ SetLocalDescriptionWorksAfterRepeatedAddRemoveWithRemoteReject) {
+ auto caller = CreatePeerConnection();
+ auto callee = CreatePeerConnection();
+ auto video_track = caller->CreateVideoTrack("v");
+ auto track = caller->CreateAudioTrack("a");
+ caller->AddTransceiver(video_track);
+ auto transceiver = caller->AddTransceiver(track);
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+ caller->pc()->RemoveTrackOrError(transceiver->sender());
+ ExchangeOfferAnswerWhereRemoteStopsTransceiver(caller.get(), callee.get(), 1);
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+ caller->AddTrack(track);
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+ caller->pc()->RemoveTrackOrError(transceiver->sender());
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+}
+
+// Test that AddTransceiver fails if trying to use unimplemented RTP encoding
+// parameters with the send_encodings parameters.
+TEST_F(PeerConnectionRtpTestUnifiedPlan,
+ CheckForUnsupportedEncodingParameters) {
+ auto caller = CreatePeerConnection();
+
+ RtpTransceiverInit init;
+ init.send_encodings.emplace_back();
+
+ auto default_send_encodings = init.send_encodings;
+
+ // Unimplemented RtpParameters: ssrc, codec_payload_type, fec, rtx, dtx,
+ // ptime, scale_framerate_down_by, dependency_rids.
+ init.send_encodings[0].ssrc = 1;
+ EXPECT_EQ(RTCErrorType::UNSUPPORTED_PARAMETER,
+ caller->pc()
+ ->AddTransceiver(cricket::MEDIA_TYPE_AUDIO, init)
+ .error()
+ .type());
+ init.send_encodings = default_send_encodings;
+}
+
+// Test that AddTransceiver fails if trying to use invalid RTP encoding
+// parameters with the send_encodings parameters.
+TEST_F(PeerConnectionRtpTestUnifiedPlan, CheckForInvalidEncodingParameters) {
+ auto caller = CreatePeerConnection();
+
+ RtpTransceiverInit init;
+ init.send_encodings.emplace_back();
+
+ auto default_send_encodings = init.send_encodings;
+
+ init.send_encodings[0].scale_resolution_down_by = 0.5;
+ EXPECT_EQ(RTCErrorType::INVALID_RANGE,
+ caller->pc()
+ ->AddTransceiver(cricket::MEDIA_TYPE_VIDEO, init)
+ .error()
+ .type());
+ init.send_encodings = default_send_encodings;
+
+ init.send_encodings[0].bitrate_priority = 0;
+ EXPECT_EQ(RTCErrorType::INVALID_RANGE,
+ caller->pc()
+ ->AddTransceiver(cricket::MEDIA_TYPE_VIDEO, init)
+ .error()
+ .type());
+ init.send_encodings = default_send_encodings;
+
+ init.send_encodings[0].min_bitrate_bps = 200000;
+ init.send_encodings[0].max_bitrate_bps = 100000;
+ EXPECT_EQ(RTCErrorType::INVALID_RANGE,
+ caller->pc()
+ ->AddTransceiver(cricket::MEDIA_TYPE_VIDEO, init)
+ .error()
+ .type());
+ init.send_encodings = default_send_encodings;
+
+ init.send_encodings[0].num_temporal_layers = 0;
+ EXPECT_EQ(RTCErrorType::INVALID_RANGE,
+ caller->pc()
+ ->AddTransceiver(cricket::MEDIA_TYPE_VIDEO, init)
+ .error()
+ .type());
+ init.send_encodings = default_send_encodings;
+
+ init.send_encodings[0].num_temporal_layers = 5;
+ EXPECT_EQ(RTCErrorType::INVALID_RANGE,
+ caller->pc()
+ ->AddTransceiver(cricket::MEDIA_TYPE_VIDEO, init)
+ .error()
+ .type());
+ init.send_encodings = default_send_encodings;
+}
+
+// Test that AddTransceiver transfers the send_encodings to the sender and they
+// are retained after SetLocalDescription().
+TEST_F(PeerConnectionRtpTestUnifiedPlan, SendEncodingsPassedToSender) {
+ auto caller = CreatePeerConnection();
+
+ RtpTransceiverInit init;
+ init.send_encodings.emplace_back();
+ init.send_encodings[0].active = false;
+ init.send_encodings[0].max_bitrate_bps = 180000;
+
+ auto result = caller->pc()->AddTransceiver(cricket::MEDIA_TYPE_AUDIO, init);
+ ASSERT_TRUE(result.ok());
+
+ auto init_send_encodings = result.value()->sender()->init_send_encodings();
+ EXPECT_FALSE(init_send_encodings[0].active);
+ EXPECT_EQ(init_send_encodings[0].max_bitrate_bps, 180000);
+
+ auto parameters = result.value()->sender()->GetParameters();
+ EXPECT_FALSE(parameters.encodings[0].active);
+ EXPECT_EQ(parameters.encodings[0].max_bitrate_bps, 180000);
+
+ ASSERT_TRUE(caller->SetLocalDescription(caller->CreateOffer()));
+
+ parameters = result.value()->sender()->GetParameters();
+ EXPECT_FALSE(parameters.encodings[0].active);
+ EXPECT_EQ(parameters.encodings[0].max_bitrate_bps, 180000);
+}
+
+// Test MSID signaling between Unified Plan and Plan B endpoints. There are two
+// options for this kind of signaling: media section based (a=msid) and ssrc
+// based (a=ssrc MSID). While JSEP only specifies media section MSID signaling,
+// we want to ensure compatibility with older Plan B endpoints that might expect
+// ssrc based MSID signaling. Thus we test here that Unified Plan offers both
+// types but answers with the same type as the offer.
+
+class PeerConnectionMsidSignalingTest
+ : public PeerConnectionRtpTestUnifiedPlan {};
+
+TEST_F(PeerConnectionMsidSignalingTest, UnifiedPlanTalkingToOurself) {
+ auto caller = CreatePeerConnectionWithUnifiedPlan();
+ caller->AddAudioTrack("caller_audio");
+ auto callee = CreatePeerConnectionWithUnifiedPlan();
+ callee->AddAudioTrack("callee_audio");
+
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+
+ // Offer should have had both a=msid and a=ssrc MSID lines.
+ auto* offer = callee->pc()->remote_description();
+ EXPECT_EQ((cricket::kMsidSignalingMediaSection |
+ cricket::kMsidSignalingSsrcAttribute),
+ offer->description()->msid_signaling());
+
+ // Answer should have had only a=msid lines.
+ auto* answer = caller->pc()->remote_description();
+ EXPECT_EQ(cricket::kMsidSignalingMediaSection,
+ answer->description()->msid_signaling());
+}
+
+TEST_F(PeerConnectionMsidSignalingTest, PlanBOfferToUnifiedPlanAnswer) {
+ auto caller = CreatePeerConnectionWithPlanB();
+ caller->AddAudioTrack("caller_audio");
+ auto callee = CreatePeerConnectionWithUnifiedPlan();
+ callee->AddAudioTrack("callee_audio");
+
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+
+ // Offer should have only a=ssrc MSID lines.
+ auto* offer = callee->pc()->remote_description();
+ EXPECT_EQ(cricket::kMsidSignalingSsrcAttribute,
+ offer->description()->msid_signaling());
+
+ // Answer should have only a=ssrc MSID lines to match the offer.
+ auto* answer = caller->pc()->remote_description();
+ EXPECT_EQ(cricket::kMsidSignalingSsrcAttribute,
+ answer->description()->msid_signaling());
+}
+
+// This tests that a Plan B endpoint appropriately sets the remote description
+// from a Unified Plan offer. When the Unified Plan offer contains a=msid lines
+// that signal no stream ids or multiple stream ids we expect that the Plan B
+// endpoint always has exactly one media stream per track.
+TEST_F(PeerConnectionMsidSignalingTest, UnifiedPlanToPlanBAnswer) {
+ const std::string kStreamId1 = "audio_stream_1";
+ const std::string kStreamId2 = "audio_stream_2";
+
+ auto caller = CreatePeerConnectionWithUnifiedPlan();
+ caller->AddAudioTrack("caller_audio", {kStreamId1, kStreamId2});
+ caller->AddVideoTrack("caller_video", {});
+ auto callee = CreatePeerConnectionWithPlanB();
+ callee->AddAudioTrack("callee_audio");
+ caller->AddVideoTrack("callee_video");
+
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+
+ // Offer should have had both a=msid and a=ssrc MSID lines.
+ auto* offer = callee->pc()->remote_description();
+ EXPECT_EQ((cricket::kMsidSignalingMediaSection |
+ cricket::kMsidSignalingSsrcAttribute),
+ offer->description()->msid_signaling());
+
+ // Callee should always have 1 stream for all of it's receivers.
+ const auto& track_events = callee->observer()->add_track_events_;
+ ASSERT_EQ(2u, track_events.size());
+ ASSERT_EQ(1u, track_events[0].streams.size());
+ EXPECT_EQ(kStreamId1, track_events[0].streams[0]->id());
+ ASSERT_EQ(1u, track_events[1].streams.size());
+ // This autogenerated a stream id for the empty one signalled.
+ EXPECT_FALSE(track_events[1].streams[0]->id().empty());
+}
+
+TEST_F(PeerConnectionMsidSignalingTest, PureUnifiedPlanToUs) {
+ auto caller = CreatePeerConnectionWithUnifiedPlan();
+ caller->AddAudioTrack("caller_audio");
+ auto callee = CreatePeerConnectionWithUnifiedPlan();
+ callee->AddAudioTrack("callee_audio");
+
+ auto offer = caller->CreateOffer();
+ // Simulate a pure Unified Plan offerer by setting the MSID signaling to media
+ // section only.
+ offer->description()->set_msid_signaling(cricket::kMsidSignalingMediaSection);
+
+ ASSERT_TRUE(
+ caller->SetLocalDescription(CloneSessionDescription(offer.get())));
+ ASSERT_TRUE(callee->SetRemoteDescription(std::move(offer)));
+
+ // Answer should have only a=msid to match the offer.
+ auto answer = callee->CreateAnswer();
+ EXPECT_EQ(cricket::kMsidSignalingMediaSection,
+ answer->description()->msid_signaling());
+}
+
+// Sender setups in a call.
+
+TEST_P(PeerConnectionRtpTest, CreateTwoSendersWithSameTrack) {
+ auto caller = CreatePeerConnection();
+ auto callee = CreatePeerConnection();
+
+ auto track = caller->CreateAudioTrack("audio_track");
+ auto sender1 = caller->AddTrack(track);
+ ASSERT_TRUE(sender1);
+ // We need to temporarily reset the track for the subsequent AddTrack() to
+ // succeed.
+ EXPECT_TRUE(sender1->SetTrack(nullptr));
+ auto sender2 = caller->AddTrack(track);
+ EXPECT_TRUE(sender2);
+ EXPECT_TRUE(sender1->SetTrack(track.get()));
+
+ if (sdp_semantics_ == SdpSemantics::kPlanB_DEPRECATED) {
+ // TODO(hbos): When https://crbug.com/webrtc/8734 is resolved, this should
+ // return true, and doing `callee->SetRemoteDescription()` should work.
+ EXPECT_FALSE(caller->CreateOfferAndSetAsLocal());
+ } else {
+ EXPECT_TRUE(caller->CreateOfferAndSetAsLocal());
+ }
+}
+
+// This test exercises the code path that fires a NegotiationNeeded
+// notification when the stream IDs of the local description differ from
+// the ones in the transceiver.
+TEST_F(PeerConnectionRtpTestUnifiedPlan,
+ ChangeAssociatedStreamsTriggersRenegotiation) {
+ auto caller = CreatePeerConnection();
+ auto callee = CreatePeerConnection();
+
+ RtpTransceiverInit init;
+ init.direction = RtpTransceiverDirection::kSendRecv;
+ auto transceiver =
+ caller->AddTransceiver(caller->CreateAudioTrack("a"), init);
+ EXPECT_TRUE(caller->observer()->legacy_renegotiation_needed());
+ EXPECT_TRUE(caller->observer()->has_negotiation_needed_event());
+
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+ caller->observer()->clear_legacy_renegotiation_needed();
+ caller->observer()->clear_latest_negotiation_needed_event();
+
+ transceiver->sender()->SetStreams({"stream3", "stream4", "stream5"});
+ EXPECT_TRUE(caller->observer()->legacy_renegotiation_needed());
+ EXPECT_TRUE(caller->observer()->has_negotiation_needed_event());
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+ auto callee_streams = callee->pc()->GetReceivers()[0]->streams();
+ ASSERT_EQ(3u, callee_streams.size());
+ EXPECT_EQ("stream3", callee_streams[0]->id());
+ EXPECT_EQ("stream4", callee_streams[1]->id());
+ EXPECT_EQ("stream5", callee_streams[2]->id());
+}
+
+INSTANTIATE_TEST_SUITE_P(PeerConnectionRtpTest,
+ PeerConnectionRtpTest,
+ Values(SdpSemantics::kPlanB_DEPRECATED,
+ SdpSemantics::kUnifiedPlan));
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/peer_connection_signaling_unittest.cc b/third_party/libwebrtc/pc/peer_connection_signaling_unittest.cc
new file mode 100644
index 0000000000..8ca59fc20c
--- /dev/null
+++ b/third_party/libwebrtc/pc/peer_connection_signaling_unittest.cc
@@ -0,0 +1,1368 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This file contains tests that check the PeerConnection's signaling state
+// machine, as well as tests that check basic, media-agnostic aspects of SDP.
+
+#include <algorithm>
+#include <cstdint>
+#include <functional>
+#include <map>
+#include <memory>
+#include <set>
+#include <string>
+#include <tuple>
+#include <type_traits>
+#include <utility>
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "api/audio/audio_mixer.h"
+#include "api/audio_codecs/builtin_audio_decoder_factory.h"
+#include "api/audio_codecs/builtin_audio_encoder_factory.h"
+#include "api/create_peerconnection_factory.h"
+#include "api/dtls_transport_interface.h"
+#include "api/jsep.h"
+#include "api/media_types.h"
+#include "api/peer_connection_interface.h"
+#include "api/rtc_error.h"
+#include "api/rtp_receiver_interface.h"
+#include "api/rtp_sender_interface.h"
+#include "api/rtp_transceiver_interface.h"
+#include "api/scoped_refptr.h"
+#include "api/set_local_description_observer_interface.h"
+#include "api/set_remote_description_observer_interface.h"
+#include "api/video_codecs/video_decoder_factory_template.h"
+#include "api/video_codecs/video_decoder_factory_template_dav1d_adapter.h"
+#include "api/video_codecs/video_decoder_factory_template_libvpx_vp8_adapter.h"
+#include "api/video_codecs/video_decoder_factory_template_libvpx_vp9_adapter.h"
+#include "api/video_codecs/video_decoder_factory_template_open_h264_adapter.h"
+#include "api/video_codecs/video_encoder_factory_template.h"
+#include "api/video_codecs/video_encoder_factory_template_libaom_av1_adapter.h"
+#include "api/video_codecs/video_encoder_factory_template_libvpx_vp8_adapter.h"
+#include "api/video_codecs/video_encoder_factory_template_libvpx_vp9_adapter.h"
+#include "api/video_codecs/video_encoder_factory_template_open_h264_adapter.h"
+#include "media/base/codec.h"
+#include "modules/audio_device/include/audio_device.h"
+#include "modules/audio_processing/include/audio_processing.h"
+#include "p2p/base/port_allocator.h"
+#include "pc/peer_connection.h"
+#include "pc/peer_connection_proxy.h"
+#include "pc/peer_connection_wrapper.h"
+#include "pc/sdp_utils.h"
+#include "pc/session_description.h"
+#include "pc/test/mock_peer_connection_observers.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/rtc_certificate.h"
+#include "rtc_base/rtc_certificate_generator.h"
+#include "rtc_base/string_encode.h"
+#include "rtc_base/thread.h"
+#include "test/gtest.h"
+#ifdef WEBRTC_ANDROID
+#include "pc/test/android_test_initializer.h"
+#endif
+#include "pc/test/fake_audio_capture_module.h"
+#include "pc/test/fake_rtc_certificate_generator.h"
+#include "rtc_base/gunit.h"
+#include "rtc_base/virtual_socket_server.h"
+
+namespace webrtc {
+
+using SignalingState = PeerConnectionInterface::SignalingState;
+using RTCConfiguration = PeerConnectionInterface::RTCConfiguration;
+using RTCOfferAnswerOptions = PeerConnectionInterface::RTCOfferAnswerOptions;
+using ::testing::Bool;
+using ::testing::Combine;
+using ::testing::Values;
+
+namespace {
+const int64_t kWaitTimeout = 10000;
+} // namespace
+
+class PeerConnectionWrapperForSignalingTest : public PeerConnectionWrapper {
+ public:
+ using PeerConnectionWrapper::PeerConnectionWrapper;
+
+ bool initial_offerer() {
+ return GetInternalPeerConnection()->initial_offerer();
+ }
+
+ PeerConnection* GetInternalPeerConnection() {
+ auto* pci =
+ static_cast<PeerConnectionProxyWithInternal<PeerConnectionInterface>*>(
+ pc());
+ return static_cast<PeerConnection*>(pci->internal());
+ }
+};
+
+class ExecuteFunctionOnCreateSessionDescriptionObserver
+ : public CreateSessionDescriptionObserver {
+ public:
+ ExecuteFunctionOnCreateSessionDescriptionObserver(
+ std::function<void(SessionDescriptionInterface*)> function)
+ : function_(std::move(function)) {}
+ ~ExecuteFunctionOnCreateSessionDescriptionObserver() override {
+ RTC_DCHECK(was_called_);
+ }
+
+ bool was_called() const { return was_called_; }
+
+ void OnSuccess(SessionDescriptionInterface* desc) override {
+ RTC_DCHECK(!was_called_);
+ was_called_ = true;
+ function_(desc);
+ }
+
+ void OnFailure(RTCError error) override { RTC_DCHECK_NOTREACHED(); }
+
+ private:
+ bool was_called_ = false;
+ std::function<void(SessionDescriptionInterface*)> function_;
+};
+
+class PeerConnectionSignalingBaseTest : public ::testing::Test {
+ protected:
+ typedef std::unique_ptr<PeerConnectionWrapperForSignalingTest> WrapperPtr;
+
+ explicit PeerConnectionSignalingBaseTest(SdpSemantics sdp_semantics)
+ : vss_(new rtc::VirtualSocketServer()),
+ main_(vss_.get()),
+ sdp_semantics_(sdp_semantics) {
+#ifdef WEBRTC_ANDROID
+ InitializeAndroidObjects();
+#endif
+ pc_factory_ = CreatePeerConnectionFactory(
+ rtc::Thread::Current(), rtc::Thread::Current(), rtc::Thread::Current(),
+ rtc::scoped_refptr<AudioDeviceModule>(FakeAudioCaptureModule::Create()),
+ CreateBuiltinAudioEncoderFactory(), CreateBuiltinAudioDecoderFactory(),
+ std::make_unique<VideoEncoderFactoryTemplate<
+ LibvpxVp8EncoderTemplateAdapter, LibvpxVp9EncoderTemplateAdapter,
+ OpenH264EncoderTemplateAdapter, LibaomAv1EncoderTemplateAdapter>>(),
+ std::make_unique<VideoDecoderFactoryTemplate<
+ LibvpxVp8DecoderTemplateAdapter, LibvpxVp9DecoderTemplateAdapter,
+ OpenH264DecoderTemplateAdapter, Dav1dDecoderTemplateAdapter>>(),
+ nullptr /* audio_mixer */, nullptr /* audio_processing */);
+ }
+
+ WrapperPtr CreatePeerConnection() {
+ return CreatePeerConnection(RTCConfiguration());
+ }
+
+ WrapperPtr CreatePeerConnection(const RTCConfiguration& config) {
+ auto observer = std::make_unique<MockPeerConnectionObserver>();
+ RTCConfiguration modified_config = config;
+ modified_config.sdp_semantics = sdp_semantics_;
+ auto result = pc_factory_->CreatePeerConnectionOrError(
+ modified_config, PeerConnectionDependencies(observer.get()));
+ if (!result.ok()) {
+ return nullptr;
+ }
+
+ observer->SetPeerConnectionInterface(result.value().get());
+ return std::make_unique<PeerConnectionWrapperForSignalingTest>(
+ pc_factory_, result.MoveValue(), std::move(observer));
+ }
+
+ // Accepts the same arguments as CreatePeerConnection and adds default audio
+ // and video tracks.
+ template <typename... Args>
+ WrapperPtr CreatePeerConnectionWithAudioVideo(Args&&... args) {
+ auto wrapper = CreatePeerConnection(std::forward<Args>(args)...);
+ if (!wrapper) {
+ return nullptr;
+ }
+ wrapper->AddAudioTrack("a");
+ wrapper->AddVideoTrack("v");
+ return wrapper;
+ }
+
+ int NumberOfDtlsTransports(const WrapperPtr& pc_wrapper) {
+ std::set<DtlsTransportInterface*> transports;
+ auto transceivers = pc_wrapper->pc()->GetTransceivers();
+
+ for (auto& transceiver : transceivers) {
+ if (transceiver->sender()->dtls_transport()) {
+ EXPECT_TRUE(transceiver->receiver()->dtls_transport());
+ EXPECT_EQ(transceiver->sender()->dtls_transport().get(),
+ transceiver->receiver()->dtls_transport().get());
+ transports.insert(transceiver->sender()->dtls_transport().get());
+ } else {
+ // If one transceiver is missing, they all should be.
+ EXPECT_EQ(0UL, transports.size());
+ }
+ }
+ return transports.size();
+ }
+
+ bool HasDtlsTransport(const WrapperPtr& pc_wrapper) {
+ return NumberOfDtlsTransports(pc_wrapper) > 0;
+ }
+
+ std::unique_ptr<rtc::VirtualSocketServer> vss_;
+ rtc::AutoSocketServerThread main_;
+ rtc::scoped_refptr<PeerConnectionFactoryInterface> pc_factory_;
+ const SdpSemantics sdp_semantics_;
+};
+
+class PeerConnectionSignalingTest
+ : public PeerConnectionSignalingBaseTest,
+ public ::testing::WithParamInterface<SdpSemantics> {
+ protected:
+ PeerConnectionSignalingTest() : PeerConnectionSignalingBaseTest(GetParam()) {}
+};
+
+TEST_P(PeerConnectionSignalingTest, SetLocalOfferTwiceWorks) {
+ auto caller = CreatePeerConnection();
+
+ EXPECT_TRUE(caller->SetLocalDescription(caller->CreateOffer()));
+ EXPECT_TRUE(caller->SetLocalDescription(caller->CreateOffer()));
+}
+
+TEST_P(PeerConnectionSignalingTest, SetRemoteOfferTwiceWorks) {
+ auto caller = CreatePeerConnection();
+ auto callee = CreatePeerConnection();
+
+ EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateOffer()));
+ EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateOffer()));
+}
+
+TEST_P(PeerConnectionSignalingTest, FailToSetNullLocalDescription) {
+ auto caller = CreatePeerConnection();
+ std::string error;
+ ASSERT_FALSE(caller->SetLocalDescription(nullptr, &error));
+ EXPECT_EQ("SessionDescription is NULL.", error);
+}
+
+TEST_P(PeerConnectionSignalingTest, FailToSetNullRemoteDescription) {
+ auto caller = CreatePeerConnection();
+ std::string error;
+ ASSERT_FALSE(caller->SetRemoteDescription(nullptr, &error));
+ EXPECT_EQ("SessionDescription is NULL.", error);
+}
+
+// The following parameterized test verifies that calls to various signaling
+// methods on PeerConnection will succeed/fail depending on what is the
+// PeerConnection's signaling state. Note that the test tries many different
+// forms of SignalingState::kClosed by arriving at a valid state then calling
+// `Close()`. This is intended to catch cases where the PeerConnection signaling
+// method ignores the closed flag but may work/not work because of the single
+// state the PeerConnection was created in before it was closed.
+
+class PeerConnectionSignalingStateTest
+ : public PeerConnectionSignalingBaseTest,
+ public ::testing::WithParamInterface<
+ std::tuple<SdpSemantics, SignalingState, bool>> {
+ protected:
+ PeerConnectionSignalingStateTest()
+ : PeerConnectionSignalingBaseTest(std::get<0>(GetParam())),
+ state_under_test_(std::make_tuple(std::get<1>(GetParam()),
+ std::get<2>(GetParam()))) {}
+
+ RTCConfiguration GetConfig() {
+ RTCConfiguration config;
+ config.certificates.push_back(
+ FakeRTCCertificateGenerator::GenerateCertificate());
+ return config;
+ }
+
+ WrapperPtr CreatePeerConnectionUnderTest() {
+ return CreatePeerConnectionInState(state_under_test_);
+ }
+
+ WrapperPtr CreatePeerConnectionInState(SignalingState state) {
+ return CreatePeerConnectionInState(std::make_tuple(state, false));
+ }
+
+ WrapperPtr CreatePeerConnectionInState(
+ std::tuple<SignalingState, bool> state_tuple) {
+ SignalingState state = std::get<0>(state_tuple);
+ bool closed = std::get<1>(state_tuple);
+
+ auto wrapper = CreatePeerConnectionWithAudioVideo(GetConfig());
+ switch (state) {
+ case SignalingState::kStable: {
+ break;
+ }
+ case SignalingState::kHaveLocalOffer: {
+ wrapper->SetLocalDescription(wrapper->CreateOffer());
+ break;
+ }
+ case SignalingState::kHaveLocalPrAnswer: {
+ auto caller = CreatePeerConnectionWithAudioVideo(GetConfig());
+ wrapper->SetRemoteDescription(caller->CreateOffer());
+ auto answer = wrapper->CreateAnswer();
+ wrapper->SetLocalDescription(
+ CloneSessionDescriptionAsType(answer.get(), SdpType::kPrAnswer));
+ break;
+ }
+ case SignalingState::kHaveRemoteOffer: {
+ auto caller = CreatePeerConnectionWithAudioVideo(GetConfig());
+ wrapper->SetRemoteDescription(caller->CreateOffer());
+ break;
+ }
+ case SignalingState::kHaveRemotePrAnswer: {
+ auto callee = CreatePeerConnectionWithAudioVideo(GetConfig());
+ callee->SetRemoteDescription(wrapper->CreateOfferAndSetAsLocal());
+ auto answer = callee->CreateAnswer();
+ wrapper->SetRemoteDescription(
+ CloneSessionDescriptionAsType(answer.get(), SdpType::kPrAnswer));
+ break;
+ }
+ case SignalingState::kClosed: {
+ RTC_DCHECK_NOTREACHED()
+ << "Set the second member of the tuple to true to "
+ "achieve a closed state from an existing, valid "
+ "state.";
+ }
+ }
+
+ RTC_DCHECK_EQ(state, wrapper->pc()->signaling_state());
+
+ if (closed) {
+ wrapper->pc()->Close();
+ RTC_DCHECK_EQ(SignalingState::kClosed, wrapper->signaling_state());
+ }
+
+ return wrapper;
+ }
+
+ std::tuple<SignalingState, bool> state_under_test_;
+};
+
+TEST_P(PeerConnectionSignalingStateTest, CreateOffer) {
+ auto wrapper = CreatePeerConnectionUnderTest();
+ if (wrapper->signaling_state() != SignalingState::kClosed) {
+ EXPECT_TRUE(wrapper->CreateOffer());
+ } else {
+ std::string error;
+ ASSERT_FALSE(wrapper->CreateOffer(RTCOfferAnswerOptions(), &error));
+ EXPECT_PRED_FORMAT2(AssertStartsWith, error,
+ "CreateOffer called when PeerConnection is closed.");
+ }
+}
+
+TEST_P(PeerConnectionSignalingStateTest, CreateAnswer) {
+ auto wrapper = CreatePeerConnectionUnderTest();
+ if (wrapper->signaling_state() == SignalingState::kHaveLocalPrAnswer ||
+ wrapper->signaling_state() == SignalingState::kHaveRemoteOffer) {
+ EXPECT_TRUE(wrapper->CreateAnswer());
+ } else {
+ std::string error;
+ ASSERT_FALSE(wrapper->CreateAnswer(RTCOfferAnswerOptions(), &error));
+ EXPECT_EQ(error,
+ "PeerConnection cannot create an answer in a state other than "
+ "have-remote-offer or have-local-pranswer.");
+ }
+}
+
+TEST_P(PeerConnectionSignalingStateTest, SetLocalOffer) {
+ auto wrapper = CreatePeerConnectionUnderTest();
+ if (wrapper->signaling_state() == SignalingState::kStable ||
+ wrapper->signaling_state() == SignalingState::kHaveLocalOffer) {
+ // Need to call CreateOffer on the PeerConnection under test, otherwise when
+ // setting the local offer it will want to verify the DTLS fingerprint
+ // against the locally generated certificate, but without a call to
+ // CreateOffer the certificate will never be generated.
+ EXPECT_TRUE(wrapper->SetLocalDescription(wrapper->CreateOffer()));
+ } else {
+ auto wrapper_for_offer =
+ CreatePeerConnectionInState(SignalingState::kHaveLocalOffer);
+ auto offer =
+ CloneSessionDescription(wrapper_for_offer->pc()->local_description());
+
+ std::string error;
+ ASSERT_FALSE(wrapper->SetLocalDescription(std::move(offer), &error));
+ EXPECT_PRED_FORMAT2(
+ AssertStartsWith, error,
+ "Failed to set local offer sdp: Called in wrong state:");
+ }
+}
+
+TEST_P(PeerConnectionSignalingStateTest, SetLocalPrAnswer) {
+ auto wrapper_for_pranswer =
+ CreatePeerConnectionInState(SignalingState::kHaveLocalPrAnswer);
+ auto pranswer =
+ CloneSessionDescription(wrapper_for_pranswer->pc()->local_description());
+
+ auto wrapper = CreatePeerConnectionUnderTest();
+ if (wrapper->signaling_state() == SignalingState::kHaveLocalPrAnswer ||
+ wrapper->signaling_state() == SignalingState::kHaveRemoteOffer) {
+ EXPECT_TRUE(wrapper->SetLocalDescription(std::move(pranswer)));
+ } else {
+ std::string error;
+ ASSERT_FALSE(wrapper->SetLocalDescription(std::move(pranswer), &error));
+ EXPECT_PRED_FORMAT2(
+ AssertStartsWith, error,
+ "Failed to set local pranswer sdp: Called in wrong state:");
+ }
+}
+
+TEST_P(PeerConnectionSignalingStateTest, SetLocalAnswer) {
+ auto wrapper_for_answer =
+ CreatePeerConnectionInState(SignalingState::kHaveRemoteOffer);
+ auto answer = wrapper_for_answer->CreateAnswer();
+
+ auto wrapper = CreatePeerConnectionUnderTest();
+ if (wrapper->signaling_state() == SignalingState::kHaveLocalPrAnswer ||
+ wrapper->signaling_state() == SignalingState::kHaveRemoteOffer) {
+ EXPECT_TRUE(wrapper->SetLocalDescription(std::move(answer)));
+ } else {
+ std::string error;
+ ASSERT_FALSE(wrapper->SetLocalDescription(std::move(answer), &error));
+ EXPECT_PRED_FORMAT2(
+ AssertStartsWith, error,
+ "Failed to set local answer sdp: Called in wrong state:");
+ }
+}
+
+TEST_P(PeerConnectionSignalingStateTest, SetRemoteOffer) {
+ auto wrapper_for_offer =
+ CreatePeerConnectionInState(SignalingState::kHaveRemoteOffer);
+ auto offer =
+ CloneSessionDescription(wrapper_for_offer->pc()->remote_description());
+
+ auto wrapper = CreatePeerConnectionUnderTest();
+ if (wrapper->signaling_state() == SignalingState::kStable ||
+ wrapper->signaling_state() == SignalingState::kHaveRemoteOffer) {
+ EXPECT_TRUE(wrapper->SetRemoteDescription(std::move(offer)));
+ } else {
+ std::string error;
+ ASSERT_FALSE(wrapper->SetRemoteDescription(std::move(offer), &error));
+ EXPECT_PRED_FORMAT2(
+ AssertStartsWith, error,
+ "Failed to set remote offer sdp: Called in wrong state:");
+ }
+}
+
+TEST_P(PeerConnectionSignalingStateTest, SetRemotePrAnswer) {
+ auto wrapper_for_pranswer =
+ CreatePeerConnectionInState(SignalingState::kHaveRemotePrAnswer);
+ auto pranswer =
+ CloneSessionDescription(wrapper_for_pranswer->pc()->remote_description());
+
+ auto wrapper = CreatePeerConnectionUnderTest();
+ if (wrapper->signaling_state() == SignalingState::kHaveLocalOffer ||
+ wrapper->signaling_state() == SignalingState::kHaveRemotePrAnswer) {
+ EXPECT_TRUE(wrapper->SetRemoteDescription(std::move(pranswer)));
+ } else {
+ std::string error;
+ ASSERT_FALSE(wrapper->SetRemoteDescription(std::move(pranswer), &error));
+ EXPECT_PRED_FORMAT2(
+ AssertStartsWith, error,
+ "Failed to set remote pranswer sdp: Called in wrong state:");
+ }
+}
+
+TEST_P(PeerConnectionSignalingStateTest, SetRemoteAnswer) {
+ auto wrapper_for_answer =
+ CreatePeerConnectionInState(SignalingState::kHaveRemoteOffer);
+ auto answer = wrapper_for_answer->CreateAnswer();
+
+ auto wrapper = CreatePeerConnectionUnderTest();
+ if (wrapper->signaling_state() == SignalingState::kHaveLocalOffer ||
+ wrapper->signaling_state() == SignalingState::kHaveRemotePrAnswer) {
+ EXPECT_TRUE(wrapper->SetRemoteDescription(std::move(answer)));
+ } else {
+ std::string error;
+ ASSERT_FALSE(wrapper->SetRemoteDescription(std::move(answer), &error));
+ EXPECT_PRED_FORMAT2(
+ AssertStartsWith, error,
+ "Failed to set remote answer sdp: Called in wrong state:");
+ }
+}
+
+INSTANTIATE_TEST_SUITE_P(PeerConnectionSignalingTest,
+ PeerConnectionSignalingStateTest,
+ Combine(Values(SdpSemantics::kPlanB_DEPRECATED,
+ SdpSemantics::kUnifiedPlan),
+ Values(SignalingState::kStable,
+ SignalingState::kHaveLocalOffer,
+ SignalingState::kHaveLocalPrAnswer,
+ SignalingState::kHaveRemoteOffer,
+ SignalingState::kHaveRemotePrAnswer),
+ Bool()));
+
+// Test that CreateAnswer fails if a round of offer/answer has been done and
+// the PeerConnection is in the stable state.
+TEST_P(PeerConnectionSignalingTest, CreateAnswerFailsIfStable) {
+ auto caller = CreatePeerConnection();
+ auto callee = CreatePeerConnection();
+
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+
+ ASSERT_EQ(SignalingState::kStable, caller->signaling_state());
+ EXPECT_FALSE(caller->CreateAnswer());
+
+ ASSERT_EQ(SignalingState::kStable, callee->signaling_state());
+ EXPECT_FALSE(callee->CreateAnswer());
+}
+
+// According to https://tools.ietf.org/html/rfc3264#section-8, the session id
+// stays the same but the version must be incremented if a later, different
+// session description is generated. These two tests verify that is the case for
+// both offers and answers.
+TEST_P(PeerConnectionSignalingTest,
+ SessionVersionIncrementedInSubsequentDifferentOffer) {
+ auto caller = CreatePeerConnection();
+ auto callee = CreatePeerConnection();
+
+ auto original_offer = caller->CreateOfferAndSetAsLocal();
+ const std::string original_id = original_offer->session_id();
+ const std::string original_version = original_offer->session_version();
+
+ ASSERT_TRUE(callee->SetRemoteDescription(std::move(original_offer)));
+ ASSERT_TRUE(caller->SetRemoteDescription(callee->CreateAnswer()));
+
+ // Add track to get a different offer.
+ caller->AddAudioTrack("a");
+
+ auto later_offer = caller->CreateOffer();
+
+ EXPECT_EQ(original_id, later_offer->session_id());
+ EXPECT_LT(rtc::FromString<uint64_t>(original_version),
+ rtc::FromString<uint64_t>(later_offer->session_version()));
+}
+TEST_P(PeerConnectionSignalingTest,
+ SessionVersionIncrementedInSubsequentDifferentAnswer) {
+ auto caller = CreatePeerConnection();
+ auto callee = CreatePeerConnection();
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+
+ auto original_answer = callee->CreateAnswer();
+ const std::string original_id = original_answer->session_id();
+ const std::string original_version = original_answer->session_version();
+
+ // Add track to get a different answer.
+ callee->AddAudioTrack("a");
+
+ auto later_answer = callee->CreateAnswer();
+
+ EXPECT_EQ(original_id, later_answer->session_id());
+ EXPECT_LT(rtc::FromString<uint64_t>(original_version),
+ rtc::FromString<uint64_t>(later_answer->session_version()));
+}
+
+TEST_P(PeerConnectionSignalingTest, InitiatorFlagSetOnCallerAndNotOnCallee) {
+ auto caller = CreatePeerConnectionWithAudioVideo();
+ auto callee = CreatePeerConnectionWithAudioVideo();
+
+ EXPECT_FALSE(caller->initial_offerer());
+ EXPECT_FALSE(callee->initial_offerer());
+
+ ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()));
+
+ EXPECT_TRUE(caller->initial_offerer());
+ EXPECT_FALSE(callee->initial_offerer());
+
+ ASSERT_TRUE(
+ caller->SetRemoteDescription(callee->CreateAnswerAndSetAsLocal()));
+
+ EXPECT_TRUE(caller->initial_offerer());
+ EXPECT_FALSE(callee->initial_offerer());
+}
+
+// Test creating a PeerConnection, request multiple offers, destroy the
+// PeerConnection and make sure we get success/failure callbacks for all of the
+// requests.
+// Background: crbug.com/507307
+TEST_P(PeerConnectionSignalingTest, CreateOffersAndShutdown) {
+ auto caller = CreatePeerConnection();
+
+ RTCOfferAnswerOptions options;
+ options.offer_to_receive_audio =
+ RTCOfferAnswerOptions::kOfferToReceiveMediaTrue;
+
+ rtc::scoped_refptr<MockCreateSessionDescriptionObserver> observers[100];
+ for (auto& observer : observers) {
+ observer = rtc::make_ref_counted<MockCreateSessionDescriptionObserver>();
+ caller->pc()->CreateOffer(observer.get(), options);
+ }
+
+ // Destroy the PeerConnection.
+ caller.reset(nullptr);
+
+ for (auto& observer : observers) {
+ // We expect to have received a notification now even if the PeerConnection
+ // was terminated. The offer creation may or may not have succeeded, but we
+ // must have received a notification.
+ EXPECT_TRUE_WAIT(observer->called(), kWaitTimeout);
+ }
+}
+
+// Similar to the above test, but by closing the PC first the CreateOffer() will
+// fail "early", which triggers a codepath where the PeerConnection is
+// reponsible for invoking the observer, instead of the normal codepath where
+// the WebRtcSessionDescriptionFactory is responsible for it.
+TEST_P(PeerConnectionSignalingTest, CloseCreateOfferAndShutdown) {
+ auto caller = CreatePeerConnection();
+ auto observer = rtc::make_ref_counted<MockCreateSessionDescriptionObserver>();
+ caller->pc()->Close();
+ caller->pc()->CreateOffer(observer.get(), RTCOfferAnswerOptions());
+ caller.reset(nullptr);
+ EXPECT_TRUE_WAIT(observer->called(), kWaitTimeout);
+}
+
+TEST_P(PeerConnectionSignalingTest,
+ ImplicitCreateOfferAndShutdownWithOldObserver) {
+ auto caller = CreatePeerConnection();
+ auto observer = MockSetSessionDescriptionObserver::Create();
+ caller->pc()->SetLocalDescription(observer.get());
+ caller.reset(nullptr);
+ // The old observer does not get invoked because posted messages are lost.
+ EXPECT_FALSE(observer->called());
+}
+
+TEST_P(PeerConnectionSignalingTest, ImplicitCreateOfferAndShutdown) {
+ auto caller = CreatePeerConnection();
+ auto observer = rtc::make_ref_counted<FakeSetLocalDescriptionObserver>();
+ caller->pc()->SetLocalDescription(observer);
+ caller.reset(nullptr);
+ // The new observer gets invoked because it is called immediately.
+ EXPECT_TRUE(observer->called());
+ EXPECT_FALSE(observer->error().ok());
+}
+
+TEST_P(PeerConnectionSignalingTest,
+ CloseBeforeImplicitCreateOfferAndShutdownWithOldObserver) {
+ auto caller = CreatePeerConnection();
+ auto observer = MockSetSessionDescriptionObserver::Create();
+ caller->pc()->Close();
+ caller->pc()->SetLocalDescription(observer.get());
+ caller.reset(nullptr);
+ // The old observer does not get invoked because posted messages are lost.
+ EXPECT_FALSE(observer->called());
+}
+
+TEST_P(PeerConnectionSignalingTest, CloseBeforeImplicitCreateOfferAndShutdown) {
+ auto caller = CreatePeerConnection();
+ auto observer = rtc::make_ref_counted<FakeSetLocalDescriptionObserver>();
+ caller->pc()->Close();
+ caller->pc()->SetLocalDescription(observer);
+ caller.reset(nullptr);
+ // The new observer gets invoked because it is called immediately.
+ EXPECT_TRUE(observer->called());
+ EXPECT_FALSE(observer->error().ok());
+}
+
+TEST_P(PeerConnectionSignalingTest,
+ CloseAfterImplicitCreateOfferAndShutdownWithOldObserver) {
+ auto caller = CreatePeerConnection();
+ auto observer = MockSetSessionDescriptionObserver::Create();
+ caller->pc()->SetLocalDescription(observer.get());
+ caller->pc()->Close();
+ caller.reset(nullptr);
+ // The old observer does not get invoked because posted messages are lost.
+ EXPECT_FALSE(observer->called());
+}
+
+TEST_P(PeerConnectionSignalingTest, CloseAfterImplicitCreateOfferAndShutdown) {
+ auto caller = CreatePeerConnection();
+ auto observer = rtc::make_ref_counted<FakeSetLocalDescriptionObserver>();
+ caller->pc()->SetLocalDescription(observer);
+ caller->pc()->Close();
+ caller.reset(nullptr);
+ // The new observer gets invoked because it is called immediately.
+ EXPECT_TRUE(observer->called());
+ EXPECT_FALSE(observer->error().ok());
+}
+
+TEST_P(PeerConnectionSignalingTest,
+ SetLocalDescriptionNewObserverIsInvokedImmediately) {
+ auto caller = CreatePeerConnection();
+ auto offer = caller->CreateOffer(RTCOfferAnswerOptions());
+
+ auto observer = rtc::make_ref_counted<FakeSetLocalDescriptionObserver>();
+ caller->pc()->SetLocalDescription(std::move(offer), observer);
+ // The new observer is invoked immediately.
+ EXPECT_TRUE(observer->called());
+ EXPECT_TRUE(observer->error().ok());
+}
+
+TEST_P(PeerConnectionSignalingTest,
+ SetLocalDescriptionOldObserverIsInvokedInAPostedMessage) {
+ auto caller = CreatePeerConnection();
+ auto offer = caller->CreateOffer(RTCOfferAnswerOptions());
+
+ auto observer = MockSetSessionDescriptionObserver::Create();
+ caller->pc()->SetLocalDescription(observer.get(), offer.release());
+ // The old observer is not invoked immediately.
+ EXPECT_FALSE(observer->called());
+ // Process all currently pending messages by waiting for a posted task to run.
+ bool checkpoint_reached = false;
+ rtc::Thread::Current()->PostTask(
+ [&checkpoint_reached] { checkpoint_reached = true; });
+ EXPECT_TRUE_WAIT(checkpoint_reached, kWaitTimeout);
+ // If resolving the observer was pending, it must now have been called.
+ EXPECT_TRUE(observer->called());
+}
+
+TEST_P(PeerConnectionSignalingTest, SetRemoteDescriptionExecutesImmediately) {
+ auto caller = CreatePeerConnectionWithAudioVideo();
+ auto callee = CreatePeerConnection();
+
+ // This offer will cause receivers to be created.
+ auto offer = caller->CreateOffer(RTCOfferAnswerOptions());
+
+ // By not waiting for the observer's callback we can verify that the operation
+ // executed immediately.
+ callee->pc()->SetRemoteDescription(
+ std::move(offer),
+ rtc::make_ref_counted<FakeSetRemoteDescriptionObserver>());
+ EXPECT_EQ(2u, callee->pc()->GetReceivers().size());
+}
+
+TEST_P(PeerConnectionSignalingTest, CreateOfferBlocksSetRemoteDescription) {
+ auto caller = CreatePeerConnectionWithAudioVideo();
+ auto callee = CreatePeerConnection();
+
+ // This offer will cause receivers to be created.
+ auto offer = caller->CreateOffer(RTCOfferAnswerOptions());
+
+ EXPECT_EQ(0u, callee->pc()->GetReceivers().size());
+ auto offer_observer =
+ rtc::make_ref_counted<MockCreateSessionDescriptionObserver>();
+ // Synchronously invoke CreateOffer() and SetRemoteDescription(). The
+ // SetRemoteDescription() operation should be chained to be executed
+ // asynchronously, when CreateOffer() completes.
+ callee->pc()->CreateOffer(offer_observer.get(), RTCOfferAnswerOptions());
+ callee->pc()->SetRemoteDescription(
+ std::move(offer),
+ rtc::make_ref_counted<FakeSetRemoteDescriptionObserver>());
+ // CreateOffer() is asynchronous; without message processing this operation
+ // should not have completed.
+ EXPECT_FALSE(offer_observer->called());
+ // Due to chaining, the receivers should not have been created by the offer
+ // yet.
+ EXPECT_EQ(0u, callee->pc()->GetReceivers().size());
+ // EXPECT_TRUE_WAIT causes messages to be processed...
+ EXPECT_TRUE_WAIT(offer_observer->called(), kWaitTimeout);
+ // Now that the offer has been completed, SetRemoteDescription() will have
+ // been executed next in the chain.
+ EXPECT_EQ(2u, callee->pc()->GetReceivers().size());
+}
+
+TEST_P(PeerConnectionSignalingTest,
+ ParameterlessSetLocalDescriptionCreatesOffer) {
+ auto caller = CreatePeerConnectionWithAudioVideo();
+
+ auto observer = MockSetSessionDescriptionObserver::Create();
+ caller->pc()->SetLocalDescription(observer.get());
+
+ // The offer is created asynchronously; message processing is needed for it to
+ // complete.
+ EXPECT_FALSE(observer->called());
+ EXPECT_FALSE(caller->pc()->pending_local_description());
+ EXPECT_EQ(PeerConnection::kStable, caller->signaling_state());
+
+ // Wait for messages to be processed.
+ EXPECT_TRUE_WAIT(observer->called(), kWaitTimeout);
+ EXPECT_TRUE(observer->result());
+ EXPECT_TRUE(caller->pc()->pending_local_description());
+ EXPECT_EQ(SdpType::kOffer,
+ caller->pc()->pending_local_description()->GetType());
+ EXPECT_EQ(PeerConnection::kHaveLocalOffer, caller->signaling_state());
+}
+
+TEST_P(PeerConnectionSignalingTest,
+ ParameterlessSetLocalDescriptionCreatesAnswer) {
+ auto caller = CreatePeerConnectionWithAudioVideo();
+ auto callee = CreatePeerConnectionWithAudioVideo();
+
+ callee->SetRemoteDescription(caller->CreateOffer());
+ EXPECT_EQ(PeerConnection::kHaveRemoteOffer, callee->signaling_state());
+
+ auto observer = MockSetSessionDescriptionObserver::Create();
+ callee->pc()->SetLocalDescription(observer.get());
+
+ // The answer is created asynchronously; message processing is needed for it
+ // to complete.
+ EXPECT_FALSE(observer->called());
+ EXPECT_FALSE(callee->pc()->current_local_description());
+
+ // Wait for messages to be processed.
+ EXPECT_TRUE_WAIT(observer->called(), kWaitTimeout);
+ EXPECT_TRUE(observer->result());
+ EXPECT_TRUE(callee->pc()->current_local_description());
+ EXPECT_EQ(SdpType::kAnswer,
+ callee->pc()->current_local_description()->GetType());
+ EXPECT_EQ(PeerConnection::kStable, callee->signaling_state());
+}
+
+TEST_P(PeerConnectionSignalingTest,
+ ParameterlessSetLocalDescriptionFullExchange) {
+ auto caller = CreatePeerConnectionWithAudioVideo();
+ auto callee = CreatePeerConnectionWithAudioVideo();
+
+ // SetLocalDescription(), implicitly creating an offer.
+ auto caller_set_local_description_observer =
+ MockSetSessionDescriptionObserver::Create();
+ caller->pc()->SetLocalDescription(
+ caller_set_local_description_observer.get());
+ EXPECT_TRUE_WAIT(caller_set_local_description_observer->called(),
+ kWaitTimeout);
+ ASSERT_TRUE(caller->pc()->pending_local_description());
+
+ // SetRemoteDescription(offer)
+ auto callee_set_remote_description_observer =
+ MockSetSessionDescriptionObserver::Create();
+ callee->pc()->SetRemoteDescription(
+ callee_set_remote_description_observer.get(),
+ CloneSessionDescription(caller->pc()->pending_local_description())
+ .release());
+
+ // SetLocalDescription(), implicitly creating an answer.
+ auto callee_set_local_description_observer =
+ MockSetSessionDescriptionObserver::Create();
+ callee->pc()->SetLocalDescription(
+ callee_set_local_description_observer.get());
+ EXPECT_TRUE_WAIT(callee_set_local_description_observer->called(),
+ kWaitTimeout);
+ // Chaining guarantees SetRemoteDescription() happened before
+ // SetLocalDescription().
+ EXPECT_TRUE(callee_set_remote_description_observer->called());
+ EXPECT_TRUE(callee->pc()->current_local_description());
+
+ // SetRemoteDescription(answer)
+ auto caller_set_remote_description_observer =
+ MockSetSessionDescriptionObserver::Create();
+ caller->pc()->SetRemoteDescription(
+ caller_set_remote_description_observer.get(),
+ CloneSessionDescription(callee->pc()->current_local_description())
+ .release());
+ EXPECT_TRUE_WAIT(caller_set_remote_description_observer->called(),
+ kWaitTimeout);
+
+ EXPECT_EQ(PeerConnection::kStable, caller->signaling_state());
+ EXPECT_EQ(PeerConnection::kStable, callee->signaling_state());
+}
+
+TEST_P(PeerConnectionSignalingTest,
+ ParameterlessSetLocalDescriptionCloseBeforeCreatingOffer) {
+ auto caller = CreatePeerConnectionWithAudioVideo();
+
+ auto observer = MockSetSessionDescriptionObserver::Create();
+ caller->pc()->Close();
+ caller->pc()->SetLocalDescription(observer.get());
+
+ // The operation should fail asynchronously.
+ EXPECT_FALSE(observer->called());
+ EXPECT_TRUE_WAIT(observer->called(), kWaitTimeout);
+ EXPECT_FALSE(observer->result());
+ // This did not affect the signaling state.
+ EXPECT_EQ(PeerConnection::kClosed, caller->pc()->signaling_state());
+ EXPECT_EQ(
+ "SetLocalDescription failed to create session description - "
+ "SetLocalDescription called when PeerConnection is closed.",
+ observer->error());
+}
+
+TEST_P(PeerConnectionSignalingTest,
+ ParameterlessSetLocalDescriptionCloseWhileCreatingOffer) {
+ auto caller = CreatePeerConnectionWithAudioVideo();
+
+ auto observer = MockSetSessionDescriptionObserver::Create();
+ caller->pc()->SetLocalDescription(observer.get());
+ caller->pc()->Close();
+
+ // The operation should fail asynchronously.
+ EXPECT_FALSE(observer->called());
+ EXPECT_TRUE_WAIT(observer->called(), kWaitTimeout);
+ EXPECT_FALSE(observer->result());
+ // This did not affect the signaling state.
+ EXPECT_EQ(PeerConnection::kClosed, caller->pc()->signaling_state());
+ EXPECT_EQ(
+ "SetLocalDescription failed to create session description - "
+ "CreateOffer failed because the session was shut down",
+ observer->error());
+}
+
+TEST_P(PeerConnectionSignalingTest, UnsupportedContentType) {
+ auto caller = CreatePeerConnection();
+
+ // Call setRemoteDescription with a m= line we don't understand.
+ std::string sdp =
+ "v=0\r\n"
+ "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "m=bogus 9 FOO 0 8\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=mid:bogusmid\r\n";
+ std::unique_ptr<webrtc::SessionDescriptionInterface> remote_description =
+ webrtc::CreateSessionDescription(SdpType::kOffer, sdp, nullptr);
+
+ EXPECT_TRUE(caller->SetRemoteDescription(std::move(remote_description)));
+
+ // Assert we respond back with something meaningful.
+ auto answer = caller->CreateAnswer();
+ ASSERT_EQ(answer->description()->contents().size(), 1u);
+ EXPECT_NE(answer->description()
+ ->contents()[0]
+ .media_description()
+ ->as_unsupported(),
+ nullptr);
+ EXPECT_EQ(answer->description()
+ ->contents()[0]
+ .media_description()
+ ->as_unsupported()
+ ->media_type(),
+ "bogus");
+ EXPECT_TRUE(answer->description()->contents()[0].rejected);
+ EXPECT_EQ(answer->description()->contents()[0].mid(), "bogusmid");
+ EXPECT_EQ(
+ answer->description()->contents()[0].media_description()->protocol(),
+ "FOO");
+ EXPECT_FALSE(
+ answer->description()->contents()[0].media_description()->has_codecs());
+
+ EXPECT_TRUE(caller->SetLocalDescription(std::move(answer)));
+
+ // Assert we keep this in susequent offers.
+ auto offer = caller->CreateOffer();
+ EXPECT_EQ(offer->description()
+ ->contents()[0]
+ .media_description()
+ ->as_unsupported()
+ ->media_type(),
+ "bogus");
+ EXPECT_TRUE(offer->description()->contents()[0].rejected);
+ EXPECT_EQ(offer->description()->contents()[0].media_description()->protocol(),
+ "FOO");
+ EXPECT_EQ(offer->description()->contents()[0].mid(), "bogusmid");
+ EXPECT_FALSE(
+ offer->description()->contents()[0].media_description()->has_codecs());
+ EXPECT_TRUE(caller->SetLocalDescription(std::move(offer)));
+}
+
+TEST_P(PeerConnectionSignalingTest, ReceiveFlexFec) {
+ auto caller = CreatePeerConnection();
+
+ std::string sdp =
+ "v=0\r\n"
+ "o=- 8403615332048243445 2 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "a=group:BUNDLE 0\r\n"
+ "m=video 9 UDP/TLS/RTP/SAVPF 102 122\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=rtcp:9 IN IP4 0.0.0.0\r\n"
+ "a=ice-ufrag:IZeV\r\n"
+ "a=ice-pwd:uaZhQD4rYM/Tta2qWBT1Bbt4\r\n"
+ "a=ice-options:trickle\r\n"
+ "a=fingerprint:sha-256 "
+ "D8:6C:3D:FA:23:E2:2C:63:11:2D:D0:86:BE:C4:D0:65:F9:42:F7:1C:06:04:27:E6:"
+ "1C:2C:74:01:8D:50:67:23\r\n"
+ "a=setup:actpass\r\n"
+ "a=mid:0\r\n"
+ "a=sendrecv\r\n"
+ "a=msid:stream track\r\n"
+ "a=rtcp-mux\r\n"
+ "a=rtcp-rsize\r\n"
+ "a=rtpmap:102 VP8/90000\r\n"
+ "a=rtcp-fb:102 goog-remb\r\n"
+ "a=rtcp-fb:102 transport-cc\r\n"
+ "a=rtcp-fb:102 ccm fir\r\n"
+ "a=rtcp-fb:102 nack\r\n"
+ "a=rtcp-fb:102 nack pli\r\n"
+ "a=rtpmap:122 flexfec-03/90000\r\n"
+ "a=fmtp:122 repair-window=10000000\r\n"
+ "a=ssrc-group:FEC-FR 1224551896 1953032773\r\n"
+ "a=ssrc:1224551896 cname:/exJcmhSLpyu9FgV\r\n"
+ "a=ssrc:1953032773 cname:/exJcmhSLpyu9FgV\r\n";
+ std::unique_ptr<webrtc::SessionDescriptionInterface> remote_description =
+ webrtc::CreateSessionDescription(SdpType::kOffer, sdp, nullptr);
+
+ EXPECT_TRUE(caller->SetRemoteDescription(std::move(remote_description)));
+
+ auto answer = caller->CreateAnswer();
+ ASSERT_EQ(answer->description()->contents().size(), 1u);
+ ASSERT_NE(
+ answer->description()->contents()[0].media_description()->as_video(),
+ nullptr);
+ auto codecs = answer->description()
+ ->contents()[0]
+ .media_description()
+ ->as_video()
+ ->codecs();
+ ASSERT_EQ(codecs.size(), 2u);
+ EXPECT_EQ(codecs[1].name, "flexfec-03");
+
+ EXPECT_TRUE(caller->SetLocalDescription(std::move(answer)));
+}
+
+TEST_P(PeerConnectionSignalingTest, ReceiveFlexFecReoffer) {
+ auto caller = CreatePeerConnection();
+
+ std::string sdp =
+ "v=0\r\n"
+ "o=- 8403615332048243445 2 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "a=group:BUNDLE 0\r\n"
+ "m=video 9 UDP/TLS/RTP/SAVPF 102 35\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=rtcp:9 IN IP4 0.0.0.0\r\n"
+ "a=ice-ufrag:IZeV\r\n"
+ "a=ice-pwd:uaZhQD4rYM/Tta2qWBT1Bbt4\r\n"
+ "a=ice-options:trickle\r\n"
+ "a=fingerprint:sha-256 "
+ "D8:6C:3D:FA:23:E2:2C:63:11:2D:D0:86:BE:C4:D0:65:F9:42:F7:1C:06:04:27:E6:"
+ "1C:2C:74:01:8D:50:67:23\r\n"
+ "a=setup:actpass\r\n"
+ "a=mid:0\r\n"
+ "a=sendrecv\r\n"
+ "a=msid:stream track\r\n"
+ "a=rtcp-mux\r\n"
+ "a=rtcp-rsize\r\n"
+ "a=rtpmap:102 VP8/90000\r\n"
+ "a=rtcp-fb:102 goog-remb\r\n"
+ "a=rtcp-fb:102 transport-cc\r\n"
+ "a=rtcp-fb:102 ccm fir\r\n"
+ "a=rtcp-fb:102 nack\r\n"
+ "a=rtcp-fb:102 nack pli\r\n"
+ "a=rtpmap:35 flexfec-03/90000\r\n"
+ "a=fmtp:35 repair-window=10000000\r\n"
+ "a=ssrc-group:FEC-FR 1224551896 1953032773\r\n"
+ "a=ssrc:1224551896 cname:/exJcmhSLpyu9FgV\r\n"
+ "a=ssrc:1953032773 cname:/exJcmhSLpyu9FgV\r\n";
+ std::unique_ptr<webrtc::SessionDescriptionInterface> remote_description =
+ webrtc::CreateSessionDescription(SdpType::kOffer, sdp, nullptr);
+
+ EXPECT_TRUE(caller->SetRemoteDescription(std::move(remote_description)));
+
+ auto answer = caller->CreateAnswer();
+ ASSERT_EQ(answer->description()->contents().size(), 1u);
+ ASSERT_NE(
+ answer->description()->contents()[0].media_description()->as_video(),
+ nullptr);
+ auto codecs = answer->description()
+ ->contents()[0]
+ .media_description()
+ ->as_video()
+ ->codecs();
+ ASSERT_EQ(codecs.size(), 2u);
+ EXPECT_EQ(codecs[1].name, "flexfec-03");
+ EXPECT_EQ(codecs[1].id, 35);
+
+ EXPECT_TRUE(caller->SetLocalDescription(std::move(answer)));
+
+ // This generates a collision for AV1 which needs to be remapped.
+ auto offer = caller->CreateOffer(RTCOfferAnswerOptions());
+ auto offer_codecs = offer->description()
+ ->contents()[0]
+ .media_description()
+ ->as_video()
+ ->codecs();
+ auto flexfec_it = std::find_if(
+ offer_codecs.begin(), offer_codecs.end(),
+ [](const cricket::Codec& codec) { return codec.name == "flexfec-03"; });
+ ASSERT_EQ(flexfec_it->id, 35);
+ auto av1_it = std::find_if(
+ offer_codecs.begin(), offer_codecs.end(),
+ [](const cricket::Codec& codec) { return codec.name == "AV1"; });
+ if (av1_it != offer_codecs.end()) {
+ ASSERT_NE(av1_it->id, 35);
+ }
+}
+
+TEST_P(PeerConnectionSignalingTest, MidAttributeMaxLength) {
+ auto caller = CreatePeerConnection();
+
+ std::string sdp =
+ "v=0\r\n"
+ "o=- 8403615332048243445 2 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "m=video 9 UDP/TLS/RTP/SAVPF 102\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=rtcp:9 IN IP4 0.0.0.0\r\n"
+ "a=ice-ufrag:IZeV\r\n"
+ "a=ice-pwd:uaZhQD4rYM/Tta2qWBT1Bbt4\r\n"
+ "a=ice-options:trickle\r\n"
+ "a=fingerprint:sha-256 "
+ "D8:6C:3D:FA:23:E2:2C:63:11:2D:D0:86:BE:C4:D0:65:F9:42:F7:1C:06:04:27:E6:"
+ "1C:2C:74:01:8D:50:67:23\r\n"
+ "a=setup:actpass\r\n"
+ // Too long mid attribute.
+ "a=mid:0123456789012345678901234567890123\r\n"
+ "a=sendrecv\r\n"
+ "a=msid:stream track\r\n"
+ "a=rtcp-mux\r\n"
+ "a=rtcp-rsize\r\n"
+ "a=rtpmap:102 VP8/90000\r\n"
+ "a=rtcp-fb:102 goog-remb\r\n"
+ "a=rtcp-fb:102 transport-cc\r\n"
+ "a=rtcp-fb:102 ccm fir\r\n"
+ "a=rtcp-fb:102 nack\r\n"
+ "a=rtcp-fb:102 nack pli\r\n"
+ "a=ssrc:1224551896 cname:/exJcmhSLpyu9FgV\r\n";
+ std::unique_ptr<webrtc::SessionDescriptionInterface> remote_description =
+ webrtc::CreateSessionDescription(SdpType::kOffer, sdp, nullptr);
+
+ EXPECT_FALSE(caller->SetRemoteDescription(std::move(remote_description)));
+}
+
+INSTANTIATE_TEST_SUITE_P(PeerConnectionSignalingTest,
+ PeerConnectionSignalingTest,
+ Values(SdpSemantics::kPlanB_DEPRECATED,
+ SdpSemantics::kUnifiedPlan));
+
+class PeerConnectionSignalingUnifiedPlanTest
+ : public PeerConnectionSignalingBaseTest {
+ protected:
+ PeerConnectionSignalingUnifiedPlanTest()
+ : PeerConnectionSignalingBaseTest(SdpSemantics::kUnifiedPlan) {}
+};
+
+// We verify that SetLocalDescription() executed immediately by verifying that
+// the transceiver mid values got assigned. SLD executing immeditately is not
+// unique to Unified Plan, but the transceivers used to verify this are only
+// available in Unified Plan.
+TEST_F(PeerConnectionSignalingUnifiedPlanTest,
+ SetLocalDescriptionExecutesImmediatelyUsingOldObserver) {
+ auto caller = CreatePeerConnectionWithAudioVideo();
+
+ // This offer will cause transceiver mids to get assigned.
+ auto offer = caller->CreateOffer(RTCOfferAnswerOptions());
+
+ // By not waiting for the observer's callback we can verify that the operation
+ // executed immediately. The old observer is invoked in a posted message, so
+ // waiting for it would not ensure synchronicity.
+ RTC_DCHECK(!caller->pc()->GetTransceivers()[0]->mid().has_value());
+ caller->pc()->SetLocalDescription(
+ rtc::make_ref_counted<MockSetSessionDescriptionObserver>().get(),
+ offer.release());
+ EXPECT_TRUE(caller->pc()->GetTransceivers()[0]->mid().has_value());
+}
+
+TEST_F(PeerConnectionSignalingUnifiedPlanTest,
+ SetLocalDescriptionExecutesImmediatelyUsingNewObserver) {
+ auto caller = CreatePeerConnectionWithAudioVideo();
+
+ // This offer will cause transceiver mids to get assigned.
+ auto offer = caller->CreateOffer(RTCOfferAnswerOptions());
+
+ // Verify that mids were assigned without waiting for the observer. (However,
+ // the new observer should also be invoked synchronously - as is ensured by
+ // other tests.)
+ RTC_DCHECK(!caller->pc()->GetTransceivers()[0]->mid().has_value());
+ caller->pc()->SetLocalDescription(
+ std::move(offer),
+ rtc::make_ref_counted<FakeSetLocalDescriptionObserver>());
+ EXPECT_TRUE(caller->pc()->GetTransceivers()[0]->mid().has_value());
+}
+
+TEST_F(PeerConnectionSignalingUnifiedPlanTest,
+ SetLocalDescriptionExecutesImmediatelyInsideCreateOfferCallback) {
+ auto caller = CreatePeerConnectionWithAudioVideo();
+
+ // This offer will cause transceiver mids to get assigned.
+ auto offer = caller->CreateOffer(RTCOfferAnswerOptions());
+
+ auto offer_observer =
+ rtc::make_ref_counted<ExecuteFunctionOnCreateSessionDescriptionObserver>(
+ [pc = caller->pc()](SessionDescriptionInterface* desc) {
+ // By not waiting for the observer's callback we can verify that the
+ // operation executed immediately.
+ RTC_DCHECK(!pc->GetTransceivers()[0]->mid().has_value());
+ pc->SetLocalDescription(
+ rtc::make_ref_counted<MockSetSessionDescriptionObserver>()
+ .get(),
+ desc);
+ EXPECT_TRUE(pc->GetTransceivers()[0]->mid().has_value());
+ });
+ caller->pc()->CreateOffer(offer_observer.get(), RTCOfferAnswerOptions());
+ EXPECT_TRUE_WAIT(offer_observer->was_called(), kWaitTimeout);
+}
+
+// Test that transports are shown in the sender/receiver API after offer/answer.
+// This only works in Unified Plan.
+TEST_F(PeerConnectionSignalingUnifiedPlanTest,
+ DtlsTransportsInstantiateInOfferAnswer) {
+ auto caller = CreatePeerConnectionWithAudioVideo();
+ auto callee = CreatePeerConnection();
+
+ EXPECT_FALSE(HasDtlsTransport(caller));
+ EXPECT_FALSE(HasDtlsTransport(callee));
+ auto offer = caller->CreateOffer(RTCOfferAnswerOptions());
+ caller->SetLocalDescription(CloneSessionDescription(offer.get()));
+ EXPECT_TRUE(HasDtlsTransport(caller));
+ callee->SetRemoteDescription(std::move(offer));
+ EXPECT_FALSE(HasDtlsTransport(callee));
+ auto answer = callee->CreateAnswer(RTCOfferAnswerOptions());
+ callee->SetLocalDescription(CloneSessionDescription(answer.get()));
+ EXPECT_TRUE(HasDtlsTransport(callee));
+ caller->SetRemoteDescription(std::move(answer));
+ EXPECT_TRUE(HasDtlsTransport(caller));
+
+ ASSERT_EQ(SignalingState::kStable, caller->signaling_state());
+}
+
+TEST_F(PeerConnectionSignalingUnifiedPlanTest, DtlsTransportsMergeWhenBundled) {
+ auto caller = CreatePeerConnectionWithAudioVideo();
+ auto callee = CreatePeerConnection();
+
+ EXPECT_FALSE(HasDtlsTransport(caller));
+ EXPECT_FALSE(HasDtlsTransport(callee));
+ auto offer = caller->CreateOffer(RTCOfferAnswerOptions());
+ caller->SetLocalDescription(CloneSessionDescription(offer.get()));
+ EXPECT_EQ(2, NumberOfDtlsTransports(caller));
+ callee->SetRemoteDescription(std::move(offer));
+ auto answer = callee->CreateAnswer(RTCOfferAnswerOptions());
+ callee->SetLocalDescription(CloneSessionDescription(answer.get()));
+ caller->SetRemoteDescription(std::move(answer));
+ EXPECT_EQ(1, NumberOfDtlsTransports(caller));
+
+ ASSERT_EQ(SignalingState::kStable, caller->signaling_state());
+}
+
+TEST_F(PeerConnectionSignalingUnifiedPlanTest,
+ DtlsTransportsAreSeparateeWhenUnbundled) {
+ auto caller = CreatePeerConnectionWithAudioVideo();
+ auto callee = CreatePeerConnection();
+
+ EXPECT_FALSE(HasDtlsTransport(caller));
+ EXPECT_FALSE(HasDtlsTransport(callee));
+ RTCOfferAnswerOptions unbundle_options;
+ unbundle_options.use_rtp_mux = false;
+ auto offer = caller->CreateOffer(unbundle_options);
+ caller->SetLocalDescription(CloneSessionDescription(offer.get()));
+ EXPECT_EQ(2, NumberOfDtlsTransports(caller));
+ callee->SetRemoteDescription(std::move(offer));
+ auto answer = callee->CreateAnswer(RTCOfferAnswerOptions());
+ callee->SetLocalDescription(CloneSessionDescription(answer.get()));
+ EXPECT_EQ(2, NumberOfDtlsTransports(callee));
+ caller->SetRemoteDescription(std::move(answer));
+ EXPECT_EQ(2, NumberOfDtlsTransports(caller));
+
+ ASSERT_EQ(SignalingState::kStable, caller->signaling_state());
+}
+
+TEST_F(PeerConnectionSignalingUnifiedPlanTest,
+ ShouldFireNegotiationNeededWhenNoChangesArePending) {
+ auto caller = CreatePeerConnection();
+ EXPECT_FALSE(caller->observer()->has_negotiation_needed_event());
+ auto transceiver =
+ caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO, RtpTransceiverInit());
+ EXPECT_TRUE(caller->observer()->has_negotiation_needed_event());
+ EXPECT_TRUE(caller->pc()->ShouldFireNegotiationNeededEvent(
+ caller->observer()->latest_negotiation_needed_event()));
+}
+
+TEST_F(PeerConnectionSignalingUnifiedPlanTest,
+ SuppressNegotiationNeededWhenOperationChainIsNotEmpty) {
+ auto caller = CreatePeerConnection();
+ EXPECT_FALSE(caller->observer()->has_negotiation_needed_event());
+ auto transceiver =
+ caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO, RtpTransceiverInit());
+ EXPECT_TRUE(caller->observer()->has_negotiation_needed_event());
+
+ auto observer = rtc::make_ref_counted<MockCreateSessionDescriptionObserver>();
+ caller->pc()->CreateOffer(observer.get(), RTCOfferAnswerOptions());
+ // For this test to work, the operation has to be pending, i.e. the observer
+ // has not yet been invoked.
+ EXPECT_FALSE(observer->called());
+ // Because the Operations Chain is not empty, the event is now suppressed.
+ EXPECT_FALSE(caller->pc()->ShouldFireNegotiationNeededEvent(
+ caller->observer()->latest_negotiation_needed_event()));
+ caller->observer()->clear_latest_negotiation_needed_event();
+
+ // When the Operations Chain becomes empty again, a new negotiation needed
+ // event will be generated that is not suppressed.
+ EXPECT_TRUE_WAIT(observer->called(), kWaitTimeout);
+ EXPECT_TRUE(caller->observer()->has_negotiation_needed_event());
+ EXPECT_TRUE(caller->pc()->ShouldFireNegotiationNeededEvent(
+ caller->observer()->latest_negotiation_needed_event()));
+}
+
+TEST_F(PeerConnectionSignalingUnifiedPlanTest,
+ SuppressNegotiationNeededWhenSignalingStateIsNotStable) {
+ auto caller = CreatePeerConnection();
+ auto callee = CreatePeerConnection();
+ auto offer = caller->CreateOffer(RTCOfferAnswerOptions());
+
+ EXPECT_FALSE(caller->observer()->has_negotiation_needed_event());
+ auto transceiver =
+ callee->AddTransceiver(cricket::MEDIA_TYPE_AUDIO, RtpTransceiverInit());
+ EXPECT_TRUE(callee->observer()->has_negotiation_needed_event());
+
+ // Change signaling state (to "have-remote-offer") by setting a remote offer.
+ callee->SetRemoteDescription(std::move(offer));
+ // Because the signaling state is not "stable", the event is now suppressed.
+ EXPECT_FALSE(callee->pc()->ShouldFireNegotiationNeededEvent(
+ callee->observer()->latest_negotiation_needed_event()));
+ callee->observer()->clear_latest_negotiation_needed_event();
+
+ // Upon rolling back to "stable", a new negotiation needed event will be
+ // generated that is not suppressed.
+ callee->SetLocalDescription(CreateSessionDescription(SdpType::kRollback, ""));
+ EXPECT_TRUE(callee->observer()->has_negotiation_needed_event());
+ EXPECT_TRUE(callee->pc()->ShouldFireNegotiationNeededEvent(
+ callee->observer()->latest_negotiation_needed_event()));
+}
+
+TEST_F(PeerConnectionSignalingUnifiedPlanTest, RtxReofferApt) {
+ auto callee = CreatePeerConnection();
+
+ std::string sdp =
+ "v=0\r\n"
+ "o=- 8403615332048243445 2 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "m=video 9 UDP/TLS/RTP/SAVPF 102\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=rtcp:9 IN IP4 0.0.0.0\r\n"
+ "a=ice-ufrag:IZeV\r\n"
+ "a=ice-pwd:uaZhQD4rYM/Tta2qWBT1Bbt4\r\n"
+ "a=ice-options:trickle\r\n"
+ "a=fingerprint:sha-256 "
+ "D8:6C:3D:FA:23:E2:2C:63:11:2D:D0:86:BE:C4:D0:65:F9:42:F7:1C:06:04:27:E6:"
+ "1C:2C:74:01:8D:50:67:23\r\n"
+ "a=setup:actpass\r\n"
+ "a=mid:0\r\n"
+ "a=sendrecv\r\n"
+ "a=msid:stream track\r\n"
+ "a=rtcp-mux\r\n"
+ "a=rtcp-rsize\r\n"
+ "a=rtpmap:102 VP8/90000\r\n"
+ "a=rtcp-fb:102 goog-remb\r\n"
+ "a=rtcp-fb:102 transport-cc\r\n"
+ "a=rtcp-fb:102 ccm fir\r\n"
+ "a=rtcp-fb:102 nack\r\n"
+ "a=rtcp-fb:102 nack pli\r\n"
+ "a=ssrc:1224551896 cname:/exJcmhSLpyu9FgV\r\n";
+ std::unique_ptr<webrtc::SessionDescriptionInterface> remote_description =
+ webrtc::CreateSessionDescription(SdpType::kOffer, sdp, nullptr);
+
+ EXPECT_TRUE(callee->SetRemoteDescription(std::move(remote_description)));
+
+ auto answer = callee->CreateAnswer(RTCOfferAnswerOptions());
+ EXPECT_TRUE(
+ callee->SetLocalDescription(CloneSessionDescription(answer.get())));
+
+ callee->pc()->GetTransceivers()[0]->StopStandard();
+ auto reoffer = callee->CreateOffer(RTCOfferAnswerOptions());
+ auto codecs = reoffer->description()
+ ->contents()[0]
+ .media_description()
+ ->as_video()
+ ->codecs();
+ ASSERT_GT(codecs.size(), 2u);
+ EXPECT_EQ(codecs[0].name, "VP8");
+ EXPECT_EQ(codecs[1].name, "rtx");
+ auto apt_it = codecs[1].params.find("apt");
+ ASSERT_NE(apt_it, codecs[1].params.end());
+ // The apt should match the id from the remote offer.
+ EXPECT_EQ(apt_it->second, rtc::ToString(codecs[0].id));
+ EXPECT_EQ(apt_it->second, "102");
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/peer_connection_simulcast_unittest.cc b/third_party/libwebrtc/pc/peer_connection_simulcast_unittest.cc
new file mode 100644
index 0000000000..6b6a96c473
--- /dev/null
+++ b/third_party/libwebrtc/pc/peer_connection_simulcast_unittest.cc
@@ -0,0 +1,629 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <algorithm>
+#include <iterator>
+#include <map>
+#include <memory>
+#include <ostream> // no-presubmit-check TODO(webrtc:8982)
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "absl/algorithm/container.h"
+#include "absl/strings/match.h"
+#include "absl/strings/string_view.h"
+#include "api/audio/audio_mixer.h"
+#include "api/audio_codecs/builtin_audio_decoder_factory.h"
+#include "api/audio_codecs/builtin_audio_encoder_factory.h"
+#include "api/audio_codecs/opus_audio_decoder_factory.h"
+#include "api/audio_codecs/opus_audio_encoder_factory.h"
+#include "api/create_peerconnection_factory.h"
+#include "api/jsep.h"
+#include "api/media_types.h"
+#include "api/peer_connection_interface.h"
+#include "api/rtc_error.h"
+#include "api/rtp_parameters.h"
+#include "api/rtp_sender_interface.h"
+#include "api/rtp_transceiver_direction.h"
+#include "api/rtp_transceiver_interface.h"
+#include "api/scoped_refptr.h"
+#include "api/uma_metrics.h"
+#include "api/video/video_codec_constants.h"
+#include "api/video_codecs/video_decoder_factory_template.h"
+#include "api/video_codecs/video_decoder_factory_template_dav1d_adapter.h"
+#include "api/video_codecs/video_decoder_factory_template_libvpx_vp8_adapter.h"
+#include "api/video_codecs/video_decoder_factory_template_libvpx_vp9_adapter.h"
+#include "api/video_codecs/video_decoder_factory_template_open_h264_adapter.h"
+#include "api/video_codecs/video_encoder_factory_template.h"
+#include "api/video_codecs/video_encoder_factory_template_libaom_av1_adapter.h"
+#include "api/video_codecs/video_encoder_factory_template_libvpx_vp8_adapter.h"
+#include "api/video_codecs/video_encoder_factory_template_libvpx_vp9_adapter.h"
+#include "api/video_codecs/video_encoder_factory_template_open_h264_adapter.h"
+#include "media/base/media_constants.h"
+#include "media/base/rid_description.h"
+#include "media/base/stream_params.h"
+#include "modules/audio_device/include/audio_device.h"
+#include "modules/audio_processing/include/audio_processing.h"
+#include "pc/channel_interface.h"
+#include "pc/peer_connection_wrapper.h"
+#include "pc/sdp_utils.h"
+#include "pc/session_description.h"
+#include "pc/simulcast_description.h"
+#include "pc/test/fake_audio_capture_module.h"
+#include "pc/test/mock_peer_connection_observers.h"
+#include "pc/test/simulcast_layer_util.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/gunit.h"
+#include "rtc_base/strings/string_builder.h"
+#include "rtc_base/thread.h"
+#include "rtc_base/unique_id_generator.h"
+#include "system_wrappers/include/metrics.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+
+using ::testing::Contains;
+using ::testing::Each;
+using ::testing::ElementsAre;
+using ::testing::ElementsAreArray;
+using ::testing::Eq;
+using ::testing::Field;
+using ::testing::IsEmpty;
+using ::testing::Le;
+using ::testing::Ne;
+using ::testing::Pair;
+using ::testing::Property;
+using ::testing::SizeIs;
+using ::testing::StartsWith;
+
+using cricket::MediaContentDescription;
+using cricket::RidDescription;
+using cricket::SimulcastDescription;
+using cricket::SimulcastLayer;
+using cricket::StreamParams;
+
+namespace cricket {
+
+std::ostream& operator<<( // no-presubmit-check TODO(webrtc:8982)
+ std::ostream& os, // no-presubmit-check TODO(webrtc:8982)
+ const SimulcastLayer& layer) {
+ if (layer.is_paused) {
+ os << "~";
+ }
+ return os << layer.rid;
+}
+
+} // namespace cricket
+
+namespace {
+
+#if RTC_METRICS_ENABLED
+std::vector<SimulcastLayer> CreateLayers(int num_layers, bool active) {
+ rtc::UniqueStringGenerator rid_generator;
+ std::vector<std::string> rids;
+ for (int i = 0; i < num_layers; ++i) {
+ rids.push_back(rid_generator.GenerateString());
+ }
+ return webrtc::CreateLayers(rids, active);
+}
+#endif
+
+} // namespace
+
+namespace webrtc {
+
+class PeerConnectionSimulcastTests : public ::testing::Test {
+ public:
+ PeerConnectionSimulcastTests()
+ : pc_factory_(CreatePeerConnectionFactory(
+ rtc::Thread::Current(),
+ rtc::Thread::Current(),
+ rtc::Thread::Current(),
+ FakeAudioCaptureModule::Create(),
+ CreateBuiltinAudioEncoderFactory(),
+ CreateBuiltinAudioDecoderFactory(),
+ std::make_unique<
+ VideoEncoderFactoryTemplate<LibvpxVp8EncoderTemplateAdapter,
+ LibvpxVp9EncoderTemplateAdapter,
+ OpenH264EncoderTemplateAdapter,
+ LibaomAv1EncoderTemplateAdapter>>(),
+ std::make_unique<
+ VideoDecoderFactoryTemplate<LibvpxVp8DecoderTemplateAdapter,
+ LibvpxVp9DecoderTemplateAdapter,
+ OpenH264DecoderTemplateAdapter,
+ Dav1dDecoderTemplateAdapter>>(),
+ nullptr,
+ nullptr)) {}
+
+ rtc::scoped_refptr<PeerConnectionInterface> CreatePeerConnection(
+ MockPeerConnectionObserver* observer) {
+ PeerConnectionInterface::RTCConfiguration config;
+ config.sdp_semantics = SdpSemantics::kUnifiedPlan;
+ PeerConnectionDependencies pcd(observer);
+ auto result =
+ pc_factory_->CreatePeerConnectionOrError(config, std::move(pcd));
+ EXPECT_TRUE(result.ok());
+ observer->SetPeerConnectionInterface(result.value().get());
+ return result.MoveValue();
+ }
+
+ std::unique_ptr<PeerConnectionWrapper> CreatePeerConnectionWrapper() {
+ auto observer = std::make_unique<MockPeerConnectionObserver>();
+ auto pc = CreatePeerConnection(observer.get());
+ return std::make_unique<PeerConnectionWrapper>(pc_factory_, pc,
+ std::move(observer));
+ }
+
+ void ExchangeOfferAnswer(PeerConnectionWrapper* local,
+ PeerConnectionWrapper* remote,
+ const std::vector<SimulcastLayer>& answer_layers) {
+ auto offer = local->CreateOfferAndSetAsLocal();
+ // Remove simulcast as the second peer connection won't support it.
+ RemoveSimulcast(offer.get());
+ std::string err;
+ EXPECT_TRUE(remote->SetRemoteDescription(std::move(offer), &err)) << err;
+ auto answer = remote->CreateAnswerAndSetAsLocal();
+ // Setup the answer to look like a server response.
+ auto mcd_answer = answer->description()->contents()[0].media_description();
+ auto& receive_layers = mcd_answer->simulcast_description().receive_layers();
+ for (const SimulcastLayer& layer : answer_layers) {
+ receive_layers.AddLayer(layer);
+ }
+ EXPECT_TRUE(local->SetRemoteDescription(std::move(answer), &err)) << err;
+ }
+
+ rtc::scoped_refptr<RtpTransceiverInterface> AddTransceiver(
+ PeerConnectionWrapper* pc,
+ const std::vector<SimulcastLayer>& layers,
+ cricket::MediaType media_type = cricket::MEDIA_TYPE_VIDEO) {
+ auto init = CreateTransceiverInit(layers);
+ return pc->AddTransceiver(media_type, init);
+ }
+
+ void AddRequestToReceiveSimulcast(const std::vector<SimulcastLayer>& layers,
+ SessionDescriptionInterface* sd) {
+ auto mcd = sd->description()->contents()[0].media_description();
+ SimulcastDescription simulcast;
+ auto& receive_layers = simulcast.receive_layers();
+ for (const SimulcastLayer& layer : layers) {
+ receive_layers.AddLayer(layer);
+ }
+ mcd->set_simulcast_description(simulcast);
+ }
+
+ void ValidateTransceiverParameters(
+ rtc::scoped_refptr<RtpTransceiverInterface> transceiver,
+ const std::vector<SimulcastLayer>& layers) {
+ auto parameters = transceiver->sender()->GetParameters();
+ std::vector<SimulcastLayer> result_layers;
+ absl::c_transform(parameters.encodings, std::back_inserter(result_layers),
+ [](const RtpEncodingParameters& encoding) {
+ return SimulcastLayer(encoding.rid, !encoding.active);
+ });
+ EXPECT_THAT(result_layers, ElementsAreArray(layers));
+ }
+
+ private:
+ rtc::scoped_refptr<PeerConnectionFactoryInterface> pc_factory_;
+};
+
+#if RTC_METRICS_ENABLED
+// This class is used to test the metrics emitted for simulcast.
+class PeerConnectionSimulcastMetricsTests
+ : public PeerConnectionSimulcastTests,
+ public ::testing::WithParamInterface<int> {
+ protected:
+ PeerConnectionSimulcastMetricsTests() { webrtc::metrics::Reset(); }
+};
+#endif
+
+// Validates that RIDs are supported arguments when adding a transceiver.
+TEST_F(PeerConnectionSimulcastTests, CanCreateTransceiverWithRid) {
+ auto pc = CreatePeerConnectionWrapper();
+ auto layers = CreateLayers({"f"}, true);
+ auto transceiver = AddTransceiver(pc.get(), layers);
+ ASSERT_TRUE(transceiver);
+ auto parameters = transceiver->sender()->GetParameters();
+ // Single RID should be removed.
+ EXPECT_THAT(parameters.encodings,
+ ElementsAre(Field("rid", &RtpEncodingParameters::rid, Eq(""))));
+}
+
+TEST_F(PeerConnectionSimulcastTests, CanCreateTransceiverWithSimulcast) {
+ auto pc = CreatePeerConnectionWrapper();
+ auto layers = CreateLayers({"f", "h", "q"}, true);
+ auto transceiver = AddTransceiver(pc.get(), layers);
+ ASSERT_TRUE(transceiver);
+ ValidateTransceiverParameters(transceiver, layers);
+}
+
+TEST_F(PeerConnectionSimulcastTests, RidsAreAutogeneratedIfNotProvided) {
+ auto pc = CreatePeerConnectionWrapper();
+ auto init = CreateTransceiverInit(CreateLayers({"f", "h", "q"}, true));
+ for (RtpEncodingParameters& parameters : init.send_encodings) {
+ parameters.rid = "";
+ }
+ auto transceiver = pc->AddTransceiver(cricket::MEDIA_TYPE_VIDEO, init);
+ auto parameters = transceiver->sender()->GetParameters();
+ ASSERT_EQ(3u, parameters.encodings.size());
+ EXPECT_THAT(parameters.encodings,
+ Each(Field("rid", &RtpEncodingParameters::rid, Ne(""))));
+}
+
+// Validates that an error is returned when there is a mix of supplied and not
+// supplied RIDs in a call to AddTransceiver.
+TEST_F(PeerConnectionSimulcastTests, MustSupplyAllOrNoRidsInSimulcast) {
+ auto pc_wrapper = CreatePeerConnectionWrapper();
+ auto pc = pc_wrapper->pc();
+ // Cannot create a layer with empty RID. Remove the RID after init is created.
+ auto layers = CreateLayers({"f", "h", "remove"}, true);
+ auto init = CreateTransceiverInit(layers);
+ init.send_encodings[2].rid = "";
+ auto error = pc->AddTransceiver(cricket::MEDIA_TYPE_VIDEO, init);
+ EXPECT_EQ(RTCErrorType::INVALID_PARAMETER, error.error().type());
+}
+
+// Validates that an error is returned when illegal RIDs are supplied.
+TEST_F(PeerConnectionSimulcastTests, ChecksForIllegalRidValues) {
+ auto pc_wrapper = CreatePeerConnectionWrapper();
+ auto pc = pc_wrapper->pc();
+ auto layers = CreateLayers({"f", "h", "~q"}, true);
+ auto init = CreateTransceiverInit(layers);
+ auto error = pc->AddTransceiver(cricket::MEDIA_TYPE_VIDEO, init);
+ EXPECT_EQ(RTCErrorType::INVALID_PARAMETER, error.error().type());
+}
+
+// Validates that a single RID is removed from the encoding layer.
+TEST_F(PeerConnectionSimulcastTests, SingleRidIsRemovedFromSessionDescription) {
+ auto pc = CreatePeerConnectionWrapper();
+ auto transceiver = AddTransceiver(pc.get(), CreateLayers({"1"}, true));
+ auto offer = pc->CreateOfferAndSetAsLocal();
+ ASSERT_TRUE(offer);
+ auto contents = offer->description()->contents();
+ ASSERT_EQ(1u, contents.size());
+ EXPECT_THAT(contents[0].media_description()->streams(),
+ ElementsAre(Property(&StreamParams::has_rids, false)));
+}
+
+TEST_F(PeerConnectionSimulcastTests, SimulcastLayersRemovedFromTail) {
+ static_assert(
+ kMaxSimulcastStreams < 8,
+ "Test assumes that the platform does not allow 8 simulcast layers");
+ auto pc = CreatePeerConnectionWrapper();
+ auto layers = CreateLayers({"1", "2", "3", "4", "5", "6", "7", "8"}, true);
+ std::vector<SimulcastLayer> expected_layers;
+ std::copy_n(layers.begin(), kMaxSimulcastStreams,
+ std::back_inserter(expected_layers));
+ auto transceiver = AddTransceiver(pc.get(), layers);
+ ValidateTransceiverParameters(transceiver, expected_layers);
+}
+
+// Checks that an offfer to send simulcast contains a SimulcastDescription.
+TEST_F(PeerConnectionSimulcastTests, SimulcastAppearsInSessionDescription) {
+ auto pc = CreatePeerConnectionWrapper();
+ std::vector<std::string> rids({"f", "h", "q"});
+ auto layers = CreateLayers(rids, true);
+ auto transceiver = AddTransceiver(pc.get(), layers);
+ auto offer = pc->CreateOffer();
+ ASSERT_TRUE(offer);
+ auto contents = offer->description()->contents();
+ ASSERT_EQ(1u, contents.size());
+ auto content = contents[0];
+ auto mcd = content.media_description();
+ ASSERT_TRUE(mcd->HasSimulcast());
+ auto simulcast = mcd->simulcast_description();
+ EXPECT_THAT(simulcast.receive_layers(), IsEmpty());
+ // The size is validated separately because GetAllLayers() flattens the list.
+ EXPECT_THAT(simulcast.send_layers(), SizeIs(3));
+ std::vector<SimulcastLayer> result = simulcast.send_layers().GetAllLayers();
+ EXPECT_THAT(result, ElementsAreArray(layers));
+ auto streams = mcd->streams();
+ ASSERT_EQ(1u, streams.size());
+ auto stream = streams[0];
+ EXPECT_FALSE(stream.has_ssrcs());
+ EXPECT_TRUE(stream.has_rids());
+ std::vector<std::string> result_rids;
+ absl::c_transform(stream.rids(), std::back_inserter(result_rids),
+ [](const RidDescription& rid) { return rid.rid; });
+ EXPECT_THAT(result_rids, ElementsAreArray(rids));
+}
+
+// Checks that Simulcast layers propagate to the sender parameters.
+TEST_F(PeerConnectionSimulcastTests, SimulcastLayersAreSetInSender) {
+ auto local = CreatePeerConnectionWrapper();
+ auto remote = CreatePeerConnectionWrapper();
+ auto layers = CreateLayers({"f", "h", "q"}, true);
+ auto transceiver = AddTransceiver(local.get(), layers);
+ auto offer = local->CreateOfferAndSetAsLocal();
+ {
+ SCOPED_TRACE("after create offer");
+ ValidateTransceiverParameters(transceiver, layers);
+ }
+ // Remove simulcast as the second peer connection won't support it.
+ auto simulcast = RemoveSimulcast(offer.get());
+ std::string error;
+ EXPECT_TRUE(remote->SetRemoteDescription(std::move(offer), &error)) << error;
+ auto answer = remote->CreateAnswerAndSetAsLocal();
+
+ // Setup an answer that mimics a server accepting simulcast.
+ auto mcd_answer = answer->description()->contents()[0].media_description();
+ mcd_answer->mutable_streams().clear();
+ auto simulcast_layers = simulcast.send_layers().GetAllLayers();
+ auto& receive_layers = mcd_answer->simulcast_description().receive_layers();
+ for (const auto& layer : simulcast_layers) {
+ receive_layers.AddLayer(layer);
+ }
+ EXPECT_TRUE(local->SetRemoteDescription(std::move(answer), &error)) << error;
+ {
+ SCOPED_TRACE("after set remote");
+ ValidateTransceiverParameters(transceiver, layers);
+ }
+}
+
+// Checks that paused Simulcast layers propagate to the sender parameters.
+TEST_F(PeerConnectionSimulcastTests, PausedSimulcastLayersAreDisabledInSender) {
+ auto local = CreatePeerConnectionWrapper();
+ auto remote = CreatePeerConnectionWrapper();
+ auto layers = CreateLayers({"f", "h", "q"}, {true, false, true});
+ auto server_layers = CreateLayers({"f", "h", "q"}, {true, false, false});
+ RTC_DCHECK_EQ(layers.size(), server_layers.size());
+ auto transceiver = AddTransceiver(local.get(), layers);
+ auto offer = local->CreateOfferAndSetAsLocal();
+ {
+ SCOPED_TRACE("after create offer");
+ ValidateTransceiverParameters(transceiver, layers);
+ }
+
+ // Remove simulcast as the second peer connection won't support it.
+ RemoveSimulcast(offer.get());
+ std::string error;
+ EXPECT_TRUE(remote->SetRemoteDescription(std::move(offer), &error)) << error;
+ auto answer = remote->CreateAnswerAndSetAsLocal();
+
+ // Setup an answer that mimics a server accepting simulcast.
+ auto mcd_answer = answer->description()->contents()[0].media_description();
+ mcd_answer->mutable_streams().clear();
+ auto& receive_layers = mcd_answer->simulcast_description().receive_layers();
+ for (const SimulcastLayer& layer : server_layers) {
+ receive_layers.AddLayer(layer);
+ }
+ EXPECT_TRUE(local->SetRemoteDescription(std::move(answer), &error)) << error;
+ {
+ SCOPED_TRACE("after set remote");
+ ValidateTransceiverParameters(transceiver, server_layers);
+ }
+}
+
+// Checks that when Simulcast is not supported by the remote party, then all
+// the layers (except the first) are removed.
+TEST_F(PeerConnectionSimulcastTests, SimulcastRejectedRemovesExtraLayers) {
+ auto local = CreatePeerConnectionWrapper();
+ auto remote = CreatePeerConnectionWrapper();
+ auto layers = CreateLayers({"1", "2", "3", "4"}, true);
+ auto transceiver = AddTransceiver(local.get(), layers);
+ ExchangeOfferAnswer(local.get(), remote.get(), {});
+ auto parameters = transceiver->sender()->GetParameters();
+ // Should only have the first layer.
+ EXPECT_THAT(parameters.encodings,
+ ElementsAre(Field("rid", &RtpEncodingParameters::rid, Eq("1"))));
+}
+
+// Checks that if Simulcast is supported by remote party, but some layers are
+// rejected, then only rejected layers are removed from the sender.
+TEST_F(PeerConnectionSimulcastTests, RejectedSimulcastLayersAreDeactivated) {
+ auto local = CreatePeerConnectionWrapper();
+ auto remote = CreatePeerConnectionWrapper();
+ auto layers = CreateLayers({"1", "2", "3"}, true);
+ auto expected_layers = CreateLayers({"2", "3"}, true);
+ auto transceiver = AddTransceiver(local.get(), layers);
+ auto offer = local->CreateOfferAndSetAsLocal();
+ {
+ SCOPED_TRACE("after create offer");
+ ValidateTransceiverParameters(transceiver, layers);
+ }
+ // Remove simulcast as the second peer connection won't support it.
+ auto removed_simulcast = RemoveSimulcast(offer.get());
+ std::string error;
+ EXPECT_TRUE(remote->SetRemoteDescription(std::move(offer), &error)) << error;
+ auto answer = remote->CreateAnswerAndSetAsLocal();
+ auto mcd_answer = answer->description()->contents()[0].media_description();
+ // Setup the answer to look like a server response.
+ // Remove one of the layers to reject it in the answer.
+ auto simulcast_layers = removed_simulcast.send_layers().GetAllLayers();
+ simulcast_layers.erase(simulcast_layers.begin());
+ auto& receive_layers = mcd_answer->simulcast_description().receive_layers();
+ for (const auto& layer : simulcast_layers) {
+ receive_layers.AddLayer(layer);
+ }
+ ASSERT_TRUE(mcd_answer->HasSimulcast());
+ EXPECT_TRUE(local->SetRemoteDescription(std::move(answer), &error)) << error;
+ {
+ SCOPED_TRACE("after set remote");
+ ValidateTransceiverParameters(transceiver, expected_layers);
+ }
+}
+
+// Checks that simulcast is set up correctly when the server sends an offer
+// requesting to receive simulcast.
+TEST_F(PeerConnectionSimulcastTests, ServerSendsOfferToReceiveSimulcast) {
+ auto local = CreatePeerConnectionWrapper();
+ auto remote = CreatePeerConnectionWrapper();
+ auto layers = CreateLayers({"f", "h", "q"}, true);
+ AddTransceiver(local.get(), layers);
+ auto offer = local->CreateOfferAndSetAsLocal();
+ // Remove simulcast as a sender and set it up as a receiver.
+ RemoveSimulcast(offer.get());
+ AddRequestToReceiveSimulcast(layers, offer.get());
+ std::string error;
+ EXPECT_TRUE(remote->SetRemoteDescription(std::move(offer), &error)) << error;
+ auto transceiver = remote->pc()->GetTransceivers()[0];
+ transceiver->SetDirectionWithError(RtpTransceiverDirection::kSendRecv);
+ EXPECT_TRUE(remote->CreateAnswerAndSetAsLocal());
+ ValidateTransceiverParameters(transceiver, layers);
+}
+
+// Checks that SetRemoteDescription doesn't attempt to associate a transceiver
+// when simulcast is requested by the server.
+TEST_F(PeerConnectionSimulcastTests, TransceiverIsNotRecycledWithSimulcast) {
+ auto local = CreatePeerConnectionWrapper();
+ auto remote = CreatePeerConnectionWrapper();
+ auto layers = CreateLayers({"f", "h", "q"}, true);
+ AddTransceiver(local.get(), layers);
+ auto offer = local->CreateOfferAndSetAsLocal();
+ // Remove simulcast as a sender and set it up as a receiver.
+ RemoveSimulcast(offer.get());
+ AddRequestToReceiveSimulcast(layers, offer.get());
+ // Call AddTrack so that a transceiver is created.
+ remote->AddVideoTrack("fake_track");
+ std::string error;
+ EXPECT_TRUE(remote->SetRemoteDescription(std::move(offer), &error)) << error;
+ auto transceivers = remote->pc()->GetTransceivers();
+ ASSERT_EQ(2u, transceivers.size());
+ auto transceiver = transceivers[1];
+ transceiver->SetDirectionWithError(RtpTransceiverDirection::kSendRecv);
+ EXPECT_TRUE(remote->CreateAnswerAndSetAsLocal());
+ ValidateTransceiverParameters(transceiver, layers);
+}
+
+// Checks that if the number of layers changes during negotiation, then any
+// outstanding get/set parameters transaction is invalidated.
+TEST_F(PeerConnectionSimulcastTests, ParametersAreInvalidatedWhenLayersChange) {
+ auto local = CreatePeerConnectionWrapper();
+ auto remote = CreatePeerConnectionWrapper();
+ auto layers = CreateLayers({"1", "2", "3"}, true);
+ auto transceiver = AddTransceiver(local.get(), layers);
+ auto parameters = transceiver->sender()->GetParameters();
+ ASSERT_EQ(3u, parameters.encodings.size());
+ // Response will reject simulcast altogether.
+ ExchangeOfferAnswer(local.get(), remote.get(), {});
+ auto result = transceiver->sender()->SetParameters(parameters);
+ EXPECT_EQ(RTCErrorType::INVALID_STATE, result.type());
+}
+
+// Checks that even though negotiation modifies the sender's parameters, an
+// outstanding get/set parameters transaction is not invalidated.
+// This test negotiates twice because initial parameters before negotiation
+// is missing critical information and cannot be set on the sender.
+TEST_F(PeerConnectionSimulcastTests,
+ NegotiationDoesNotInvalidateParameterTransactions) {
+ auto local = CreatePeerConnectionWrapper();
+ auto remote = CreatePeerConnectionWrapper();
+ auto layers = CreateLayers({"1", "2", "3"}, true);
+ auto expected_layers = CreateLayers({"1", "2", "3"}, false);
+ auto transceiver = AddTransceiver(local.get(), layers);
+ ExchangeOfferAnswer(local.get(), remote.get(), expected_layers);
+
+ // Verify that negotiation does not invalidate the parameters.
+ auto parameters = transceiver->sender()->GetParameters();
+ ExchangeOfferAnswer(local.get(), remote.get(), expected_layers);
+
+ auto result = transceiver->sender()->SetParameters(parameters);
+ EXPECT_TRUE(result.ok());
+ ValidateTransceiverParameters(transceiver, expected_layers);
+}
+
+// Tests that a simulcast answer is rejected if the RID extension is not
+// negotiated.
+TEST_F(PeerConnectionSimulcastTests, NegotiationDoesNotHaveRidExtensionFails) {
+ auto local = CreatePeerConnectionWrapper();
+ auto remote = CreatePeerConnectionWrapper();
+ auto layers = CreateLayers({"1", "2", "3"}, true);
+ auto expected_layers = CreateLayers({"1"}, true);
+ auto transceiver = AddTransceiver(local.get(), layers);
+ auto offer = local->CreateOfferAndSetAsLocal();
+ // Remove simulcast as the second peer connection won't support it.
+ RemoveSimulcast(offer.get());
+ std::string err;
+ EXPECT_TRUE(remote->SetRemoteDescription(std::move(offer), &err)) << err;
+ auto answer = remote->CreateAnswerAndSetAsLocal();
+ // Setup the answer to look like a server response.
+ // Drop the RID header extension.
+ auto mcd_answer = answer->description()->contents()[0].media_description();
+ auto& receive_layers = mcd_answer->simulcast_description().receive_layers();
+ for (const SimulcastLayer& layer : layers) {
+ receive_layers.AddLayer(layer);
+ }
+ cricket::RtpHeaderExtensions extensions;
+ for (auto extension : mcd_answer->rtp_header_extensions()) {
+ if (extension.uri != RtpExtension::kRidUri) {
+ extensions.push_back(extension);
+ }
+ }
+ mcd_answer->set_rtp_header_extensions(extensions);
+ EXPECT_EQ(layers.size(), mcd_answer->simulcast_description()
+ .receive_layers()
+ .GetAllLayers()
+ .size());
+ EXPECT_FALSE(local->SetRemoteDescription(std::move(answer), &err)) << err;
+}
+
+TEST_F(PeerConnectionSimulcastTests, SimulcastAudioRejected) {
+ auto local = CreatePeerConnectionWrapper();
+ auto remote = CreatePeerConnectionWrapper();
+ auto layers = CreateLayers({"1", "2", "3", "4"}, true);
+ auto transceiver =
+ AddTransceiver(local.get(), layers, cricket::MEDIA_TYPE_AUDIO);
+ // Should only have the first layer.
+ auto parameters = transceiver->sender()->GetParameters();
+ EXPECT_EQ(1u, parameters.encodings.size());
+ EXPECT_THAT(parameters.encodings,
+ ElementsAre(Field("rid", &RtpEncodingParameters::rid, Eq(""))));
+ ExchangeOfferAnswer(local.get(), remote.get(), {});
+ // Still have a single layer after negotiation
+ parameters = transceiver->sender()->GetParameters();
+ EXPECT_EQ(1u, parameters.encodings.size());
+ EXPECT_THAT(parameters.encodings,
+ ElementsAre(Field("rid", &RtpEncodingParameters::rid, Eq(""))));
+}
+
+// Check that modifying the offer to remove simulcast and at the same
+// time leaving in a RID line does not cause an exception.
+TEST_F(PeerConnectionSimulcastTests, SimulcastSldModificationRejected) {
+ auto local = CreatePeerConnectionWrapper();
+ auto remote = CreatePeerConnectionWrapper();
+ auto layers = CreateLayers({"1", "2", "3"}, true);
+ AddTransceiver(local.get(), layers);
+ auto offer = local->CreateOffer();
+ std::string as_string;
+ EXPECT_TRUE(offer->ToString(&as_string));
+ auto simulcast_marker = "a=rid:3 send\r\na=simulcast:send 1;2;3\r\n";
+ auto pos = as_string.find(simulcast_marker);
+ EXPECT_NE(pos, std::string::npos);
+ as_string.erase(pos, strlen(simulcast_marker));
+ SdpParseError parse_error;
+ auto modified_offer =
+ CreateSessionDescription(SdpType::kOffer, as_string, &parse_error);
+ EXPECT_TRUE(modified_offer);
+ EXPECT_TRUE(local->SetLocalDescription(std::move(modified_offer)));
+}
+
+#if RTC_METRICS_ENABLED
+
+const int kMaxLayersInMetricsTest = 8;
+
+// Checks that the number of send encodings is logged in a metric.
+TEST_P(PeerConnectionSimulcastMetricsTests, NumberOfSendEncodingsIsLogged) {
+ auto local = CreatePeerConnectionWrapper();
+ auto num_layers = GetParam();
+ auto layers = ::CreateLayers(num_layers, true);
+ AddTransceiver(local.get(), layers);
+ EXPECT_EQ(1, metrics::NumSamples(
+ "WebRTC.PeerConnection.Simulcast.NumberOfSendEncodings"));
+ EXPECT_EQ(1, metrics::NumEvents(
+ "WebRTC.PeerConnection.Simulcast.NumberOfSendEncodings",
+ num_layers));
+}
+
+INSTANTIATE_TEST_SUITE_P(NumberOfSendEncodings,
+ PeerConnectionSimulcastMetricsTests,
+ ::testing::Range(0, kMaxLayersInMetricsTest));
+#endif
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/peer_connection_svc_integrationtest.cc b/third_party/libwebrtc/pc/peer_connection_svc_integrationtest.cc
new file mode 100644
index 0000000000..672f3eef99
--- /dev/null
+++ b/third_party/libwebrtc/pc/peer_connection_svc_integrationtest.cc
@@ -0,0 +1,308 @@
+/*
+ * Copyright 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Integration tests for PeerConnection.
+// These tests exercise a full stack for the SVC extension.
+
+#include <stdint.h>
+
+#include <functional>
+#include <vector>
+
+#include "absl/strings/match.h"
+#include "api/rtc_error.h"
+#include "api/rtp_parameters.h"
+#include "api/rtp_transceiver_interface.h"
+#include "api/scoped_refptr.h"
+#include "pc/test/integration_test_helpers.h"
+#include "rtc_base/gunit.h"
+#include "rtc_base/helpers.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+
+namespace {
+
+class PeerConnectionSVCIntegrationTest
+ : public PeerConnectionIntegrationBaseTest {
+ protected:
+ PeerConnectionSVCIntegrationTest()
+ : PeerConnectionIntegrationBaseTest(SdpSemantics::kUnifiedPlan) {}
+
+ RTCError SetCodecPreferences(
+ rtc::scoped_refptr<webrtc::RtpTransceiverInterface> transceiver,
+ absl::string_view codec_name) {
+ webrtc::RtpCapabilities capabilities =
+ caller()->pc_factory()->GetRtpSenderCapabilities(
+ cricket::MEDIA_TYPE_VIDEO);
+ std::vector<RtpCodecCapability> codecs;
+ for (const webrtc::RtpCodecCapability& codec_capability :
+ capabilities.codecs) {
+ if (codec_capability.name == codec_name)
+ codecs.push_back(codec_capability);
+ }
+ return transceiver->SetCodecPreferences(codecs);
+ }
+};
+
+TEST_F(PeerConnectionSVCIntegrationTest, AddTransceiverAcceptsL1T1) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ webrtc::RtpTransceiverInit init;
+ webrtc::RtpEncodingParameters encoding_parameters;
+ encoding_parameters.scalability_mode = "L1T1";
+ init.send_encodings.push_back(encoding_parameters);
+ auto transceiver_or_error =
+ caller()->pc()->AddTransceiver(caller()->CreateLocalVideoTrack(), init);
+ EXPECT_TRUE(transceiver_or_error.ok());
+}
+
+TEST_F(PeerConnectionSVCIntegrationTest, AddTransceiverAcceptsL3T3) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ webrtc::RtpTransceiverInit init;
+ webrtc::RtpEncodingParameters encoding_parameters;
+ encoding_parameters.scalability_mode = "L3T3";
+ init.send_encodings.push_back(encoding_parameters);
+ auto transceiver_or_error =
+ caller()->pc()->AddTransceiver(caller()->CreateLocalVideoTrack(), init);
+ EXPECT_TRUE(transceiver_or_error.ok());
+}
+
+TEST_F(PeerConnectionSVCIntegrationTest,
+ AddTransceiverRejectsUnknownScalabilityMode) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ webrtc::RtpTransceiverInit init;
+ webrtc::RtpEncodingParameters encoding_parameters;
+ encoding_parameters.scalability_mode = "FOOBAR";
+ init.send_encodings.push_back(encoding_parameters);
+ auto transceiver_or_error =
+ caller()->pc()->AddTransceiver(caller()->CreateLocalVideoTrack(), init);
+ EXPECT_FALSE(transceiver_or_error.ok());
+ EXPECT_EQ(transceiver_or_error.error().type(),
+ webrtc::RTCErrorType::UNSUPPORTED_OPERATION);
+}
+
+TEST_F(PeerConnectionSVCIntegrationTest, SetParametersAcceptsL1T3WithVP8) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+
+ webrtc::RtpCapabilities capabilities =
+ caller()->pc_factory()->GetRtpSenderCapabilities(
+ cricket::MEDIA_TYPE_VIDEO);
+ std::vector<RtpCodecCapability> vp8_codec;
+ for (const webrtc::RtpCodecCapability& codec_capability :
+ capabilities.codecs) {
+ if (codec_capability.name == cricket::kVp8CodecName)
+ vp8_codec.push_back(codec_capability);
+ }
+
+ webrtc::RtpTransceiverInit init;
+ webrtc::RtpEncodingParameters encoding_parameters;
+ init.send_encodings.push_back(encoding_parameters);
+ auto transceiver_or_error =
+ caller()->pc()->AddTransceiver(caller()->CreateLocalVideoTrack(), init);
+ ASSERT_TRUE(transceiver_or_error.ok());
+ auto transceiver = transceiver_or_error.MoveValue();
+ EXPECT_TRUE(transceiver->SetCodecPreferences(vp8_codec).ok());
+
+ webrtc::RtpParameters parameters = transceiver->sender()->GetParameters();
+ ASSERT_EQ(parameters.encodings.size(), 1u);
+ parameters.encodings[0].scalability_mode = "L1T3";
+ auto result = transceiver->sender()->SetParameters(parameters);
+ EXPECT_TRUE(result.ok());
+}
+
+TEST_F(PeerConnectionSVCIntegrationTest, SetParametersRejectsL3T3WithVP8) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+
+ webrtc::RtpTransceiverInit init;
+ webrtc::RtpEncodingParameters encoding_parameters;
+ init.send_encodings.push_back(encoding_parameters);
+ auto transceiver_or_error =
+ caller()->pc()->AddTransceiver(caller()->CreateLocalVideoTrack(), init);
+ ASSERT_TRUE(transceiver_or_error.ok());
+ auto transceiver = transceiver_or_error.MoveValue();
+ EXPECT_TRUE(SetCodecPreferences(transceiver, cricket::kVp8CodecName).ok());
+
+ webrtc::RtpParameters parameters = transceiver->sender()->GetParameters();
+ ASSERT_EQ(parameters.encodings.size(), 1u);
+ parameters.encodings[0].scalability_mode = "L3T3";
+ auto result = transceiver->sender()->SetParameters(parameters);
+ EXPECT_FALSE(result.ok());
+ EXPECT_EQ(result.type(), webrtc::RTCErrorType::INVALID_MODIFICATION);
+}
+
+TEST_F(PeerConnectionSVCIntegrationTest,
+ SetParametersAcceptsL1T3WithVP8AfterNegotiation) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+
+ webrtc::RtpTransceiverInit init;
+ webrtc::RtpEncodingParameters encoding_parameters;
+ init.send_encodings.push_back(encoding_parameters);
+ auto transceiver_or_error =
+ caller()->pc()->AddTransceiver(caller()->CreateLocalVideoTrack(), init);
+ ASSERT_TRUE(transceiver_or_error.ok());
+ auto transceiver = transceiver_or_error.MoveValue();
+ EXPECT_TRUE(SetCodecPreferences(transceiver, cricket::kVp8CodecName).ok());
+
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+
+ webrtc::RtpParameters parameters = transceiver->sender()->GetParameters();
+ ASSERT_EQ(parameters.encodings.size(), 1u);
+ parameters.encodings[0].scalability_mode = "L1T3";
+ auto result = transceiver->sender()->SetParameters(parameters);
+ EXPECT_TRUE(result.ok());
+}
+
+TEST_F(PeerConnectionSVCIntegrationTest,
+ SetParametersAcceptsL3T3WithVP9AfterNegotiation) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+
+ webrtc::RtpTransceiverInit init;
+ webrtc::RtpEncodingParameters encoding_parameters;
+ init.send_encodings.push_back(encoding_parameters);
+ auto transceiver_or_error =
+ caller()->pc()->AddTransceiver(caller()->CreateLocalVideoTrack(), init);
+ ASSERT_TRUE(transceiver_or_error.ok());
+ auto transceiver = transceiver_or_error.MoveValue();
+ EXPECT_TRUE(SetCodecPreferences(transceiver, cricket::kVp9CodecName).ok());
+
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+
+ webrtc::RtpParameters parameters = transceiver->sender()->GetParameters();
+ ASSERT_EQ(parameters.encodings.size(), 1u);
+ parameters.encodings[0].scalability_mode = "L3T3";
+ auto result = transceiver->sender()->SetParameters(parameters);
+ EXPECT_TRUE(result.ok());
+}
+
+TEST_F(PeerConnectionSVCIntegrationTest,
+ SetParametersRejectsL3T3WithVP8AfterNegotiation) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+
+ webrtc::RtpTransceiverInit init;
+ webrtc::RtpEncodingParameters encoding_parameters;
+ init.send_encodings.push_back(encoding_parameters);
+ auto transceiver_or_error =
+ caller()->pc()->AddTransceiver(caller()->CreateLocalVideoTrack(), init);
+ ASSERT_TRUE(transceiver_or_error.ok());
+ auto transceiver = transceiver_or_error.MoveValue();
+ EXPECT_TRUE(SetCodecPreferences(transceiver, cricket::kVp8CodecName).ok());
+
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+
+ webrtc::RtpParameters parameters = transceiver->sender()->GetParameters();
+ ASSERT_EQ(parameters.encodings.size(), 1u);
+ parameters.encodings[0].scalability_mode = "L3T3";
+ auto result = transceiver->sender()->SetParameters(parameters);
+ EXPECT_FALSE(result.ok());
+ EXPECT_EQ(result.type(), webrtc::RTCErrorType::INVALID_MODIFICATION);
+}
+
+TEST_F(PeerConnectionSVCIntegrationTest,
+ SetParametersRejectsInvalidModeWithVP9AfterNegotiation) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+
+ webrtc::RtpTransceiverInit init;
+ webrtc::RtpEncodingParameters encoding_parameters;
+ init.send_encodings.push_back(encoding_parameters);
+ auto transceiver_or_error =
+ caller()->pc()->AddTransceiver(caller()->CreateLocalVideoTrack(), init);
+ ASSERT_TRUE(transceiver_or_error.ok());
+ auto transceiver = transceiver_or_error.MoveValue();
+ EXPECT_TRUE(SetCodecPreferences(transceiver, cricket::kVp9CodecName).ok());
+
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+
+ webrtc::RtpParameters parameters = transceiver->sender()->GetParameters();
+ ASSERT_EQ(parameters.encodings.size(), 1u);
+ parameters.encodings[0].scalability_mode = "FOOBAR";
+ auto result = transceiver->sender()->SetParameters(parameters);
+ EXPECT_FALSE(result.ok());
+ EXPECT_EQ(result.type(), webrtc::RTCErrorType::INVALID_MODIFICATION);
+}
+
+TEST_F(PeerConnectionSVCIntegrationTest, FallbackToL1Tx) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+
+ webrtc::RtpTransceiverInit init;
+ webrtc::RtpEncodingParameters encoding_parameters;
+ init.send_encodings.push_back(encoding_parameters);
+ auto transceiver_or_error =
+ caller()->pc()->AddTransceiver(caller()->CreateLocalVideoTrack(), init);
+ ASSERT_TRUE(transceiver_or_error.ok());
+ auto caller_transceiver = transceiver_or_error.MoveValue();
+
+ webrtc::RtpCapabilities capabilities =
+ caller()->pc_factory()->GetRtpSenderCapabilities(
+ cricket::MEDIA_TYPE_VIDEO);
+ std::vector<RtpCodecCapability> send_codecs = capabilities.codecs;
+ // Only keep VP9 in the caller
+ send_codecs.erase(std::partition(send_codecs.begin(), send_codecs.end(),
+ [](const auto& codec) -> bool {
+ return codec.name ==
+ cricket::kVp9CodecName;
+ }),
+ send_codecs.end());
+ ASSERT_FALSE(send_codecs.empty());
+ caller_transceiver->SetCodecPreferences(send_codecs);
+
+ // L3T3 should be supported by VP9
+ webrtc::RtpParameters parameters =
+ caller_transceiver->sender()->GetParameters();
+ ASSERT_EQ(parameters.encodings.size(), 1u);
+ parameters.encodings[0].scalability_mode = "L3T3";
+ auto result = caller_transceiver->sender()->SetParameters(parameters);
+ EXPECT_TRUE(result.ok());
+
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+
+ parameters = caller_transceiver->sender()->GetParameters();
+ ASSERT_TRUE(parameters.encodings[0].scalability_mode.has_value());
+ EXPECT_TRUE(
+ absl::StartsWith(*parameters.encodings[0].scalability_mode, "L3T3"));
+
+ // Keep only VP8 in the caller
+ send_codecs = capabilities.codecs;
+ send_codecs.erase(std::partition(send_codecs.begin(), send_codecs.end(),
+ [](const auto& codec) -> bool {
+ return codec.name ==
+ cricket::kVp8CodecName;
+ }),
+ send_codecs.end());
+ ASSERT_FALSE(send_codecs.empty());
+ caller_transceiver->SetCodecPreferences(send_codecs);
+
+ // Renegotiate to force the new codec list to be used
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+
+ // Fallback should happen and L3T3 is not used anymore
+ parameters = caller_transceiver->sender()->GetParameters();
+ ASSERT_TRUE(parameters.encodings[0].scalability_mode.has_value());
+ EXPECT_TRUE(
+ absl::StartsWith(*parameters.encodings[0].scalability_mode, "L1T"));
+}
+} // namespace
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/peer_connection_wrapper.cc b/third_party/libwebrtc/pc/peer_connection_wrapper.cc
new file mode 100644
index 0000000000..44f4256b10
--- /dev/null
+++ b/third_party/libwebrtc/pc/peer_connection_wrapper.cc
@@ -0,0 +1,350 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/peer_connection_wrapper.h"
+
+#include <stdint.h>
+
+#include <utility>
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "api/function_view.h"
+#include "api/set_remote_description_observer_interface.h"
+#include "pc/sdp_utils.h"
+#include "pc/test/fake_video_track_source.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/gunit.h"
+#include "rtc_base/logging.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+
+using RTCOfferAnswerOptions = PeerConnectionInterface::RTCOfferAnswerOptions;
+
+namespace {
+const uint32_t kDefaultTimeout = 10000U;
+}
+
+PeerConnectionWrapper::PeerConnectionWrapper(
+ rtc::scoped_refptr<PeerConnectionFactoryInterface> pc_factory,
+ rtc::scoped_refptr<PeerConnectionInterface> pc,
+ std::unique_ptr<MockPeerConnectionObserver> observer)
+ : pc_factory_(std::move(pc_factory)),
+ observer_(std::move(observer)),
+ pc_(std::move(pc)) {
+ RTC_DCHECK(pc_factory_);
+ RTC_DCHECK(pc_);
+ RTC_DCHECK(observer_);
+ observer_->SetPeerConnectionInterface(pc_.get());
+}
+
+PeerConnectionWrapper::~PeerConnectionWrapper() {
+ if (pc_)
+ pc_->Close();
+}
+
+PeerConnectionFactoryInterface* PeerConnectionWrapper::pc_factory() {
+ return pc_factory_.get();
+}
+
+PeerConnectionInterface* PeerConnectionWrapper::pc() {
+ return pc_.get();
+}
+
+MockPeerConnectionObserver* PeerConnectionWrapper::observer() {
+ return observer_.get();
+}
+
+std::unique_ptr<SessionDescriptionInterface>
+PeerConnectionWrapper::CreateOffer() {
+ return CreateOffer(RTCOfferAnswerOptions());
+}
+
+std::unique_ptr<SessionDescriptionInterface> PeerConnectionWrapper::CreateOffer(
+ const PeerConnectionInterface::RTCOfferAnswerOptions& options,
+ std::string* error_out) {
+ return CreateSdp(
+ [this, options](CreateSessionDescriptionObserver* observer) {
+ pc()->CreateOffer(observer, options);
+ },
+ error_out);
+}
+
+std::unique_ptr<SessionDescriptionInterface>
+PeerConnectionWrapper::CreateOfferAndSetAsLocal() {
+ return CreateOfferAndSetAsLocal(RTCOfferAnswerOptions());
+}
+
+std::unique_ptr<SessionDescriptionInterface>
+PeerConnectionWrapper::CreateOfferAndSetAsLocal(
+ const PeerConnectionInterface::RTCOfferAnswerOptions& options) {
+ auto offer = CreateOffer(options);
+ if (!offer) {
+ return nullptr;
+ }
+ EXPECT_TRUE(SetLocalDescription(CloneSessionDescription(offer.get())));
+ return offer;
+}
+
+std::unique_ptr<SessionDescriptionInterface>
+PeerConnectionWrapper::CreateAnswer() {
+ return CreateAnswer(RTCOfferAnswerOptions());
+}
+
+std::unique_ptr<SessionDescriptionInterface>
+PeerConnectionWrapper::CreateAnswer(
+ const PeerConnectionInterface::RTCOfferAnswerOptions& options,
+ std::string* error_out) {
+ return CreateSdp(
+ [this, options](CreateSessionDescriptionObserver* observer) {
+ pc()->CreateAnswer(observer, options);
+ },
+ error_out);
+}
+
+std::unique_ptr<SessionDescriptionInterface>
+PeerConnectionWrapper::CreateAnswerAndSetAsLocal() {
+ return CreateAnswerAndSetAsLocal(RTCOfferAnswerOptions());
+}
+
+std::unique_ptr<SessionDescriptionInterface>
+PeerConnectionWrapper::CreateAnswerAndSetAsLocal(
+ const PeerConnectionInterface::RTCOfferAnswerOptions& options) {
+ auto answer = CreateAnswer(options);
+ if (!answer) {
+ return nullptr;
+ }
+ EXPECT_TRUE(SetLocalDescription(CloneSessionDescription(answer.get())));
+ return answer;
+}
+
+std::unique_ptr<SessionDescriptionInterface>
+PeerConnectionWrapper::CreateRollback() {
+ return CreateSessionDescription(SdpType::kRollback, "");
+}
+
+std::unique_ptr<SessionDescriptionInterface> PeerConnectionWrapper::CreateSdp(
+ rtc::FunctionView<void(CreateSessionDescriptionObserver*)> fn,
+ std::string* error_out) {
+ auto observer = rtc::make_ref_counted<MockCreateSessionDescriptionObserver>();
+ fn(observer.get());
+ EXPECT_EQ_WAIT(true, observer->called(), kDefaultTimeout);
+ if (error_out && !observer->result()) {
+ *error_out = observer->error();
+ }
+ return observer->MoveDescription();
+}
+
+bool PeerConnectionWrapper::SetLocalDescription(
+ std::unique_ptr<SessionDescriptionInterface> desc,
+ std::string* error_out) {
+ return SetSdp(
+ [this, &desc](SetSessionDescriptionObserver* observer) {
+ pc()->SetLocalDescription(observer, desc.release());
+ },
+ error_out);
+}
+
+bool PeerConnectionWrapper::SetRemoteDescription(
+ std::unique_ptr<SessionDescriptionInterface> desc,
+ std::string* error_out) {
+ return SetSdp(
+ [this, &desc](SetSessionDescriptionObserver* observer) {
+ pc()->SetRemoteDescription(observer, desc.release());
+ },
+ error_out);
+}
+
+bool PeerConnectionWrapper::SetRemoteDescription(
+ std::unique_ptr<SessionDescriptionInterface> desc,
+ RTCError* error_out) {
+ auto observer = rtc::make_ref_counted<FakeSetRemoteDescriptionObserver>();
+ pc()->SetRemoteDescription(std::move(desc), observer);
+ EXPECT_EQ_WAIT(true, observer->called(), kDefaultTimeout);
+ bool ok = observer->error().ok();
+ if (error_out)
+ *error_out = std::move(observer->error());
+ return ok;
+}
+
+bool PeerConnectionWrapper::SetSdp(
+ rtc::FunctionView<void(SetSessionDescriptionObserver*)> fn,
+ std::string* error_out) {
+ auto observer = rtc::make_ref_counted<MockSetSessionDescriptionObserver>();
+ fn(observer.get());
+ EXPECT_EQ_WAIT(true, observer->called(), kDefaultTimeout);
+ if (error_out && !observer->result()) {
+ *error_out = observer->error();
+ }
+ return observer->result();
+}
+
+bool PeerConnectionWrapper::ExchangeOfferAnswerWith(
+ PeerConnectionWrapper* answerer) {
+ return ExchangeOfferAnswerWith(answerer, RTCOfferAnswerOptions(),
+ RTCOfferAnswerOptions());
+}
+
+bool PeerConnectionWrapper::ExchangeOfferAnswerWith(
+ PeerConnectionWrapper* answerer,
+ const PeerConnectionInterface::RTCOfferAnswerOptions& offer_options,
+ const PeerConnectionInterface::RTCOfferAnswerOptions& answer_options) {
+ RTC_DCHECK(answerer);
+ if (answerer == this) {
+ RTC_LOG(LS_ERROR) << "Cannot exchange offer/answer with ourself!";
+ return false;
+ }
+ auto offer = CreateOffer(offer_options);
+ EXPECT_TRUE(offer);
+ if (!offer) {
+ return false;
+ }
+ bool set_local_offer =
+ SetLocalDescription(CloneSessionDescription(offer.get()));
+ EXPECT_TRUE(set_local_offer);
+ if (!set_local_offer) {
+ return false;
+ }
+ bool set_remote_offer = answerer->SetRemoteDescription(std::move(offer));
+ EXPECT_TRUE(set_remote_offer);
+ if (!set_remote_offer) {
+ return false;
+ }
+ auto answer = answerer->CreateAnswer(answer_options);
+ EXPECT_TRUE(answer);
+ if (!answer) {
+ return false;
+ }
+ bool set_local_answer =
+ answerer->SetLocalDescription(CloneSessionDescription(answer.get()));
+ EXPECT_TRUE(set_local_answer);
+ if (!set_local_answer) {
+ return false;
+ }
+ bool set_remote_answer = SetRemoteDescription(std::move(answer));
+ EXPECT_TRUE(set_remote_answer);
+ return set_remote_answer;
+}
+
+rtc::scoped_refptr<RtpTransceiverInterface>
+PeerConnectionWrapper::AddTransceiver(cricket::MediaType media_type) {
+ RTCErrorOr<rtc::scoped_refptr<RtpTransceiverInterface>> result =
+ pc()->AddTransceiver(media_type);
+ EXPECT_EQ(RTCErrorType::NONE, result.error().type());
+ return result.MoveValue();
+}
+
+rtc::scoped_refptr<RtpTransceiverInterface>
+PeerConnectionWrapper::AddTransceiver(cricket::MediaType media_type,
+ const RtpTransceiverInit& init) {
+ RTCErrorOr<rtc::scoped_refptr<RtpTransceiverInterface>> result =
+ pc()->AddTransceiver(media_type, init);
+ EXPECT_EQ(RTCErrorType::NONE, result.error().type());
+ return result.MoveValue();
+}
+
+rtc::scoped_refptr<RtpTransceiverInterface>
+PeerConnectionWrapper::AddTransceiver(
+ rtc::scoped_refptr<MediaStreamTrackInterface> track) {
+ RTCErrorOr<rtc::scoped_refptr<RtpTransceiverInterface>> result =
+ pc()->AddTransceiver(track);
+ EXPECT_EQ(RTCErrorType::NONE, result.error().type());
+ return result.MoveValue();
+}
+
+rtc::scoped_refptr<RtpTransceiverInterface>
+PeerConnectionWrapper::AddTransceiver(
+ rtc::scoped_refptr<MediaStreamTrackInterface> track,
+ const RtpTransceiverInit& init) {
+ RTCErrorOr<rtc::scoped_refptr<RtpTransceiverInterface>> result =
+ pc()->AddTransceiver(track, init);
+ EXPECT_EQ(RTCErrorType::NONE, result.error().type());
+ return result.MoveValue();
+}
+
+rtc::scoped_refptr<AudioTrackInterface> PeerConnectionWrapper::CreateAudioTrack(
+ const std::string& label) {
+ return pc_factory()->CreateAudioTrack(label, nullptr);
+}
+
+rtc::scoped_refptr<VideoTrackInterface> PeerConnectionWrapper::CreateVideoTrack(
+ const std::string& label) {
+ return pc_factory()->CreateVideoTrack(FakeVideoTrackSource::Create(), label);
+}
+
+rtc::scoped_refptr<RtpSenderInterface> PeerConnectionWrapper::AddTrack(
+ rtc::scoped_refptr<MediaStreamTrackInterface> track,
+ const std::vector<std::string>& stream_ids) {
+ RTCErrorOr<rtc::scoped_refptr<RtpSenderInterface>> result =
+ pc()->AddTrack(track, stream_ids);
+ EXPECT_EQ(RTCErrorType::NONE, result.error().type());
+ return result.MoveValue();
+}
+
+rtc::scoped_refptr<RtpSenderInterface> PeerConnectionWrapper::AddTrack(
+ rtc::scoped_refptr<MediaStreamTrackInterface> track,
+ const std::vector<std::string>& stream_ids,
+ const std::vector<RtpEncodingParameters>& init_send_encodings) {
+ RTCErrorOr<rtc::scoped_refptr<RtpSenderInterface>> result =
+ pc()->AddTrack(track, stream_ids, init_send_encodings);
+ EXPECT_EQ(RTCErrorType::NONE, result.error().type());
+ return result.MoveValue();
+}
+
+rtc::scoped_refptr<RtpSenderInterface> PeerConnectionWrapper::AddAudioTrack(
+ const std::string& track_label,
+ const std::vector<std::string>& stream_ids) {
+ return AddTrack(CreateAudioTrack(track_label), stream_ids);
+}
+
+rtc::scoped_refptr<RtpSenderInterface> PeerConnectionWrapper::AddVideoTrack(
+ const std::string& track_label,
+ const std::vector<std::string>& stream_ids) {
+ return AddTrack(CreateVideoTrack(track_label), stream_ids);
+}
+
+rtc::scoped_refptr<DataChannelInterface>
+PeerConnectionWrapper::CreateDataChannel(
+ const std::string& label,
+ const absl::optional<DataChannelInit>& config) {
+ const DataChannelInit* config_ptr = config.has_value() ? &(*config) : nullptr;
+ auto result = pc()->CreateDataChannelOrError(label, config_ptr);
+ if (!result.ok()) {
+ RTC_LOG(LS_ERROR) << "CreateDataChannel failed: "
+ << ToString(result.error().type()) << " "
+ << result.error().message();
+ return nullptr;
+ }
+ return result.MoveValue();
+}
+
+PeerConnectionInterface::SignalingState
+PeerConnectionWrapper::signaling_state() {
+ return pc()->signaling_state();
+}
+
+bool PeerConnectionWrapper::IsIceGatheringDone() {
+ return observer()->ice_gathering_complete_;
+}
+
+bool PeerConnectionWrapper::IsIceConnected() {
+ return observer()->ice_connected_;
+}
+
+rtc::scoped_refptr<const webrtc::RTCStatsReport>
+PeerConnectionWrapper::GetStats() {
+ auto callback = rtc::make_ref_counted<MockRTCStatsCollectorCallback>();
+ pc()->GetStats(callback.get());
+ EXPECT_TRUE_WAIT(callback->called(), kDefaultTimeout);
+ return callback->report();
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/peer_connection_wrapper.h b/third_party/libwebrtc/pc/peer_connection_wrapper.h
new file mode 100644
index 0000000000..bf40bbcfb8
--- /dev/null
+++ b/third_party/libwebrtc/pc/peer_connection_wrapper.h
@@ -0,0 +1,203 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_PEER_CONNECTION_WRAPPER_H_
+#define PC_PEER_CONNECTION_WRAPPER_H_
+
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "api/data_channel_interface.h"
+#include "api/function_view.h"
+#include "api/jsep.h"
+#include "api/media_stream_interface.h"
+#include "api/media_types.h"
+#include "api/peer_connection_interface.h"
+#include "api/rtc_error.h"
+#include "api/rtp_sender_interface.h"
+#include "api/rtp_transceiver_interface.h"
+#include "api/scoped_refptr.h"
+#include "api/stats/rtc_stats_report.h"
+#include "pc/test/mock_peer_connection_observers.h"
+
+namespace webrtc {
+
+// Class that wraps a PeerConnection so that it is easier to use in unit tests.
+// Namely, gives a synchronous API for the event-callback-based API of
+// PeerConnection and provides an observer object that stores information from
+// PeerConnectionObserver callbacks.
+//
+// This is intended to be subclassed if additional information needs to be
+// stored with the PeerConnection (e.g., fake PeerConnection parameters so that
+// tests can be written against those interactions). The base
+// PeerConnectionWrapper should only have helper methods that are broadly
+// useful. More specific helper methods should be created in the test-specific
+// subclass.
+//
+// The wrapper is intended to be constructed by specialized factory methods on
+// a test fixture class then used as a local variable in each test case.
+class PeerConnectionWrapper {
+ public:
+ // Constructs a PeerConnectionWrapper from the given PeerConnection.
+ // The given PeerConnectionFactory should be the factory that created the
+ // PeerConnection and the MockPeerConnectionObserver should be the observer
+ // that is watching the PeerConnection.
+ PeerConnectionWrapper(
+ rtc::scoped_refptr<PeerConnectionFactoryInterface> pc_factory,
+ rtc::scoped_refptr<PeerConnectionInterface> pc,
+ std::unique_ptr<MockPeerConnectionObserver> observer);
+ virtual ~PeerConnectionWrapper();
+
+ PeerConnectionFactoryInterface* pc_factory();
+ PeerConnectionInterface* pc();
+ MockPeerConnectionObserver* observer();
+
+ // Calls the underlying PeerConnection's CreateOffer method and returns the
+ // resulting SessionDescription once it is available. If the method call
+ // failed, null is returned.
+ std::unique_ptr<SessionDescriptionInterface> CreateOffer(
+ const PeerConnectionInterface::RTCOfferAnswerOptions& options,
+ std::string* error_out = nullptr);
+ // Calls CreateOffer with default options.
+ std::unique_ptr<SessionDescriptionInterface> CreateOffer();
+ // Calls CreateOffer and sets a copy of the offer as the local description.
+ std::unique_ptr<SessionDescriptionInterface> CreateOfferAndSetAsLocal(
+ const PeerConnectionInterface::RTCOfferAnswerOptions& options);
+ // Calls CreateOfferAndSetAsLocal with default options.
+ std::unique_ptr<SessionDescriptionInterface> CreateOfferAndSetAsLocal();
+
+ // Calls the underlying PeerConnection's CreateAnswer method and returns the
+ // resulting SessionDescription once it is available. If the method call
+ // failed, null is returned.
+ std::unique_ptr<SessionDescriptionInterface> CreateAnswer(
+ const PeerConnectionInterface::RTCOfferAnswerOptions& options,
+ std::string* error_out = nullptr);
+ // Calls CreateAnswer with the default options.
+ std::unique_ptr<SessionDescriptionInterface> CreateAnswer();
+ // Calls CreateAnswer and sets a copy of the offer as the local description.
+ std::unique_ptr<SessionDescriptionInterface> CreateAnswerAndSetAsLocal(
+ const PeerConnectionInterface::RTCOfferAnswerOptions& options);
+ // Calls CreateAnswerAndSetAsLocal with default options.
+ std::unique_ptr<SessionDescriptionInterface> CreateAnswerAndSetAsLocal();
+ std::unique_ptr<SessionDescriptionInterface> CreateRollback();
+
+ // Calls the underlying PeerConnection's SetLocalDescription method with the
+ // given session description and waits for the success/failure response.
+ // Returns true if the description was successfully set.
+ bool SetLocalDescription(std::unique_ptr<SessionDescriptionInterface> desc,
+ std::string* error_out = nullptr);
+ // Calls the underlying PeerConnection's SetRemoteDescription method with the
+ // given session description and waits for the success/failure response.
+ // Returns true if the description was successfully set.
+ bool SetRemoteDescription(std::unique_ptr<SessionDescriptionInterface> desc,
+ std::string* error_out = nullptr);
+ bool SetRemoteDescription(std::unique_ptr<SessionDescriptionInterface> desc,
+ RTCError* error_out);
+
+ // Does a round of offer/answer with the local PeerConnectionWrapper
+ // generating the offer and the given PeerConnectionWrapper generating the
+ // answer.
+ // Equivalent to:
+ // 1. this->CreateOffer(offer_options)
+ // 2. this->SetLocalDescription(offer)
+ // 3. answerer->SetRemoteDescription(offer)
+ // 4. answerer->CreateAnswer(answer_options)
+ // 5. answerer->SetLocalDescription(answer)
+ // 6. this->SetRemoteDescription(answer)
+ // Returns true if all steps succeed, false otherwise.
+ // Suggested usage:
+ // ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+ bool ExchangeOfferAnswerWith(PeerConnectionWrapper* answerer);
+ bool ExchangeOfferAnswerWith(
+ PeerConnectionWrapper* answerer,
+ const PeerConnectionInterface::RTCOfferAnswerOptions& offer_options,
+ const PeerConnectionInterface::RTCOfferAnswerOptions& answer_options);
+
+ // The following are wrappers for the underlying PeerConnection's
+ // AddTransceiver method. They return the result of calling AddTransceiver
+ // with the given arguments, DCHECKing if there is an error.
+ rtc::scoped_refptr<RtpTransceiverInterface> AddTransceiver(
+ cricket::MediaType media_type);
+ rtc::scoped_refptr<RtpTransceiverInterface> AddTransceiver(
+ cricket::MediaType media_type,
+ const RtpTransceiverInit& init);
+ rtc::scoped_refptr<RtpTransceiverInterface> AddTransceiver(
+ rtc::scoped_refptr<MediaStreamTrackInterface> track);
+ rtc::scoped_refptr<RtpTransceiverInterface> AddTransceiver(
+ rtc::scoped_refptr<MediaStreamTrackInterface> track,
+ const RtpTransceiverInit& init);
+
+ // Returns a new dummy audio track with the given label.
+ rtc::scoped_refptr<AudioTrackInterface> CreateAudioTrack(
+ const std::string& label);
+
+ // Returns a new dummy video track with the given label.
+ rtc::scoped_refptr<VideoTrackInterface> CreateVideoTrack(
+ const std::string& label);
+
+ // Wrapper for the underlying PeerConnection's AddTrack method. DCHECKs if
+ // AddTrack fails.
+ rtc::scoped_refptr<RtpSenderInterface> AddTrack(
+ rtc::scoped_refptr<MediaStreamTrackInterface> track,
+ const std::vector<std::string>& stream_ids = {});
+
+ rtc::scoped_refptr<RtpSenderInterface> AddTrack(
+ rtc::scoped_refptr<MediaStreamTrackInterface> track,
+ const std::vector<std::string>& stream_ids,
+ const std::vector<RtpEncodingParameters>& init_send_encodings);
+
+ // Calls the underlying PeerConnection's AddTrack method with an audio media
+ // stream track not bound to any source.
+ rtc::scoped_refptr<RtpSenderInterface> AddAudioTrack(
+ const std::string& track_label,
+ const std::vector<std::string>& stream_ids = {});
+
+ // Calls the underlying PeerConnection's AddTrack method with a video media
+ // stream track fed by a FakeVideoTrackSource.
+ rtc::scoped_refptr<RtpSenderInterface> AddVideoTrack(
+ const std::string& track_label,
+ const std::vector<std::string>& stream_ids = {});
+
+ // Calls the underlying PeerConnection's CreateDataChannel method with default
+ // initialization parameters.
+ rtc::scoped_refptr<DataChannelInterface> CreateDataChannel(
+ const std::string& label,
+ const absl::optional<DataChannelInit>& config = absl::nullopt);
+
+ // Returns the signaling state of the underlying PeerConnection.
+ PeerConnectionInterface::SignalingState signaling_state();
+
+ // Returns true if ICE has finished gathering candidates.
+ bool IsIceGatheringDone();
+
+ // Returns true if ICE has established a connection.
+ bool IsIceConnected();
+
+ // Calls GetStats() on the underlying PeerConnection and returns the resulting
+ // report. If GetStats() fails, this method returns null and fails the test.
+ rtc::scoped_refptr<const RTCStatsReport> GetStats();
+
+ private:
+ std::unique_ptr<SessionDescriptionInterface> CreateSdp(
+ rtc::FunctionView<void(CreateSessionDescriptionObserver*)> fn,
+ std::string* error_out);
+ bool SetSdp(rtc::FunctionView<void(SetSessionDescriptionObserver*)> fn,
+ std::string* error_out);
+
+ rtc::scoped_refptr<PeerConnectionFactoryInterface> pc_factory_;
+ std::unique_ptr<MockPeerConnectionObserver> observer_;
+ rtc::scoped_refptr<PeerConnectionInterface> pc_;
+};
+
+} // namespace webrtc
+
+#endif // PC_PEER_CONNECTION_WRAPPER_H_
diff --git a/third_party/libwebrtc/pc/proxy.cc b/third_party/libwebrtc/pc/proxy.cc
new file mode 100644
index 0000000000..5f4e0b8832
--- /dev/null
+++ b/third_party/libwebrtc/pc/proxy.cc
@@ -0,0 +1,25 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/proxy.h"
+
+#include "rtc_base/trace_event.h"
+
+namespace webrtc {
+namespace proxy_internal {
+ScopedTrace::ScopedTrace(const char* class_and_method_name)
+ : class_and_method_name_(class_and_method_name) {
+ TRACE_EVENT_BEGIN0("webrtc", class_and_method_name_);
+}
+ScopedTrace::~ScopedTrace() {
+ TRACE_EVENT_END0("webrtc", class_and_method_name_);
+}
+} // namespace proxy_internal
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/proxy.h b/third_party/libwebrtc/pc/proxy.h
new file mode 100644
index 0000000000..f39b4a59e2
--- /dev/null
+++ b/third_party/libwebrtc/pc/proxy.h
@@ -0,0 +1,499 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This file contains Macros for creating proxies for webrtc MediaStream and
+// PeerConnection classes.
+
+// The proxied objects are initialized with either one or two thread
+// objects that operations can be proxied to: The primary and secondary
+// threads.
+// In common usage, the primary thread will be the PeerConnection's
+// signaling thread, and the secondary thread will be either the
+// PeerConnection's worker thread or the PeerConnection's network thread.
+
+//
+// Example usage:
+//
+// class TestInterface : public rtc::RefCountInterface {
+// public:
+// std::string FooA() = 0;
+// std::string FooB(bool arg1) const = 0;
+// std::string FooC(bool arg1) = 0;
+// };
+//
+// Note that return types can not be a const reference.
+//
+// class Test : public TestInterface {
+// ... implementation of the interface.
+// };
+//
+// BEGIN_PROXY_MAP(Test)
+// PROXY_PRIMARY_THREAD_DESTRUCTOR()
+// PROXY_METHOD0(std::string, FooA)
+// PROXY_CONSTMETHOD1(std::string, FooB, arg1)
+// PROXY_SECONDARY_METHOD1(std::string, FooC, arg1)
+// END_PROXY_MAP()
+//
+// Where the destructor and first two methods are invoked on the primary
+// thread, and the third is invoked on the secondary thread.
+//
+// The proxy can be created using
+//
+// TestProxy::Create(Thread* signaling_thread, Thread* worker_thread,
+// TestInterface*).
+//
+// The variant defined with BEGIN_PRIMARY_PROXY_MAP is unaware of
+// the secondary thread, and invokes all methods on the primary thread.
+//
+
+#ifndef PC_PROXY_H_
+#define PC_PROXY_H_
+
+#include <stddef.h>
+
+#include <memory>
+#include <string>
+#include <tuple>
+#include <type_traits>
+#include <utility>
+
+#include "api/scoped_refptr.h"
+#include "api/task_queue/task_queue_base.h"
+#include "rtc_base/event.h"
+#include "rtc_base/string_utils.h"
+#include "rtc_base/system/rtc_export.h"
+#include "rtc_base/thread.h"
+
+#if !defined(RTC_DISABLE_PROXY_TRACE_EVENTS) && !defined(WEBRTC_CHROMIUM_BUILD)
+#define RTC_DISABLE_PROXY_TRACE_EVENTS
+#endif
+
+namespace webrtc {
+namespace proxy_internal {
+
+// Class for tracing the lifetime of MethodCall::Marshal.
+class ScopedTrace {
+ public:
+ explicit ScopedTrace(const char* class_and_method_name);
+ ~ScopedTrace();
+
+ private:
+ [[maybe_unused]] const char* const class_and_method_name_;
+};
+} // namespace proxy_internal
+
+template <typename R>
+class ReturnType {
+ public:
+ template <typename C, typename M, typename... Args>
+ void Invoke(C* c, M m, Args&&... args) {
+ r_ = (c->*m)(std::forward<Args>(args)...);
+ }
+
+ R moved_result() { return std::move(r_); }
+
+ private:
+ R r_;
+};
+
+template <>
+class ReturnType<void> {
+ public:
+ template <typename C, typename M, typename... Args>
+ void Invoke(C* c, M m, Args&&... args) {
+ (c->*m)(std::forward<Args>(args)...);
+ }
+
+ void moved_result() {}
+};
+
+template <typename C, typename R, typename... Args>
+class MethodCall {
+ public:
+ typedef R (C::*Method)(Args...);
+ MethodCall(C* c, Method m, Args&&... args)
+ : c_(c),
+ m_(m),
+ args_(std::forward_as_tuple(std::forward<Args>(args)...)) {}
+
+ R Marshal(rtc::Thread* t) {
+ if (t->IsCurrent()) {
+ Invoke(std::index_sequence_for<Args...>());
+ } else {
+ t->PostTask([this] {
+ Invoke(std::index_sequence_for<Args...>());
+ event_.Set();
+ });
+ event_.Wait(rtc::Event::kForever);
+ }
+ return r_.moved_result();
+ }
+
+ private:
+ template <size_t... Is>
+ void Invoke(std::index_sequence<Is...>) {
+ r_.Invoke(c_, m_, std::move(std::get<Is>(args_))...);
+ }
+
+ C* c_;
+ Method m_;
+ ReturnType<R> r_;
+ std::tuple<Args&&...> args_;
+ rtc::Event event_;
+};
+
+template <typename C, typename R, typename... Args>
+class ConstMethodCall {
+ public:
+ typedef R (C::*Method)(Args...) const;
+ ConstMethodCall(const C* c, Method m, Args&&... args)
+ : c_(c),
+ m_(m),
+ args_(std::forward_as_tuple(std::forward<Args>(args)...)) {}
+
+ R Marshal(rtc::Thread* t) {
+ if (t->IsCurrent()) {
+ Invoke(std::index_sequence_for<Args...>());
+ } else {
+ t->PostTask([this] {
+ Invoke(std::index_sequence_for<Args...>());
+ event_.Set();
+ });
+ event_.Wait(rtc::Event::kForever);
+ }
+ return r_.moved_result();
+ }
+
+ private:
+ template <size_t... Is>
+ void Invoke(std::index_sequence<Is...>) {
+ r_.Invoke(c_, m_, std::move(std::get<Is>(args_))...);
+ }
+
+ const C* c_;
+ Method m_;
+ ReturnType<R> r_;
+ std::tuple<Args&&...> args_;
+ rtc::Event event_;
+};
+
+#define PROXY_STRINGIZE_IMPL(x) #x
+#define PROXY_STRINGIZE(x) PROXY_STRINGIZE_IMPL(x)
+
+// Helper macros to reduce code duplication.
+#define PROXY_MAP_BOILERPLATE(class_name) \
+ template <class INTERNAL_CLASS> \
+ class class_name##ProxyWithInternal; \
+ typedef class_name##ProxyWithInternal<class_name##Interface> \
+ class_name##Proxy; \
+ template <class INTERNAL_CLASS> \
+ class class_name##ProxyWithInternal : public class_name##Interface { \
+ protected: \
+ static constexpr char proxy_name_[] = #class_name "Proxy"; \
+ typedef class_name##Interface C; \
+ \
+ public: \
+ const INTERNAL_CLASS* internal() const { \
+ return c(); \
+ } \
+ INTERNAL_CLASS* internal() { \
+ return c(); \
+ }
+
+// clang-format off
+// clang-format would put the semicolon alone,
+// leading to a presubmit error (cpplint.py)
+#define END_PROXY_MAP(class_name) \
+ }; \
+ template <class INTERNAL_CLASS> \
+ constexpr char class_name##ProxyWithInternal<INTERNAL_CLASS>::proxy_name_[];
+// clang-format on
+
+#define PRIMARY_PROXY_MAP_BOILERPLATE(class_name) \
+ protected: \
+ class_name##ProxyWithInternal(rtc::Thread* primary_thread, \
+ rtc::scoped_refptr<INTERNAL_CLASS> c) \
+ : primary_thread_(primary_thread), c_(std::move(c)) {} \
+ \
+ private: \
+ mutable rtc::Thread* primary_thread_;
+
+#define SECONDARY_PROXY_MAP_BOILERPLATE(class_name) \
+ protected: \
+ class_name##ProxyWithInternal(rtc::Thread* primary_thread, \
+ rtc::Thread* secondary_thread, \
+ rtc::scoped_refptr<INTERNAL_CLASS> c) \
+ : primary_thread_(primary_thread), \
+ secondary_thread_(secondary_thread), \
+ c_(std::move(c)) {} \
+ \
+ private: \
+ mutable rtc::Thread* primary_thread_; \
+ mutable rtc::Thread* secondary_thread_;
+
+// Note that the destructor is protected so that the proxy can only be
+// destroyed via RefCountInterface.
+#define REFCOUNTED_PROXY_MAP_BOILERPLATE(class_name) \
+ protected: \
+ ~class_name##ProxyWithInternal() { \
+ MethodCall<class_name##ProxyWithInternal, void> call( \
+ this, &class_name##ProxyWithInternal::DestroyInternal); \
+ call.Marshal(destructor_thread()); \
+ } \
+ \
+ private: \
+ const INTERNAL_CLASS* c() const { \
+ return c_.get(); \
+ } \
+ INTERNAL_CLASS* c() { \
+ return c_.get(); \
+ } \
+ void DestroyInternal() { \
+ c_ = nullptr; \
+ } \
+ rtc::scoped_refptr<INTERNAL_CLASS> c_;
+
+// Note: This doesn't use a unique_ptr, because it intends to handle a corner
+// case where an object's deletion triggers a callback that calls back into
+// this proxy object. If relying on a unique_ptr to delete the object, its
+// inner pointer would be set to null before this reentrant callback would have
+// a chance to run, resulting in a segfault.
+#define OWNED_PROXY_MAP_BOILERPLATE(class_name) \
+ public: \
+ ~class_name##ProxyWithInternal() { \
+ MethodCall<class_name##ProxyWithInternal, void> call( \
+ this, &class_name##ProxyWithInternal::DestroyInternal); \
+ call.Marshal(destructor_thread()); \
+ } \
+ \
+ private: \
+ const INTERNAL_CLASS* c() const { \
+ return c_; \
+ } \
+ INTERNAL_CLASS* c() { \
+ return c_; \
+ } \
+ void DestroyInternal() { \
+ delete c_; \
+ } \
+ INTERNAL_CLASS* c_;
+
+#define BEGIN_PRIMARY_PROXY_MAP(class_name) \
+ PROXY_MAP_BOILERPLATE(class_name) \
+ PRIMARY_PROXY_MAP_BOILERPLATE(class_name) \
+ REFCOUNTED_PROXY_MAP_BOILERPLATE(class_name) \
+ public: \
+ static rtc::scoped_refptr<class_name##ProxyWithInternal> Create( \
+ rtc::Thread* primary_thread, rtc::scoped_refptr<INTERNAL_CLASS> c) { \
+ return rtc::make_ref_counted<class_name##ProxyWithInternal>( \
+ primary_thread, std::move(c)); \
+ }
+
+#define BEGIN_PROXY_MAP(class_name) \
+ PROXY_MAP_BOILERPLATE(class_name) \
+ SECONDARY_PROXY_MAP_BOILERPLATE(class_name) \
+ REFCOUNTED_PROXY_MAP_BOILERPLATE(class_name) \
+ public: \
+ static rtc::scoped_refptr<class_name##ProxyWithInternal> Create( \
+ rtc::Thread* primary_thread, rtc::Thread* secondary_thread, \
+ rtc::scoped_refptr<INTERNAL_CLASS> c) { \
+ return rtc::make_ref_counted<class_name##ProxyWithInternal>( \
+ primary_thread, secondary_thread, std::move(c)); \
+ }
+
+#define PROXY_PRIMARY_THREAD_DESTRUCTOR() \
+ private: \
+ rtc::Thread* destructor_thread() const { \
+ return primary_thread_; \
+ } \
+ \
+ public: // NOLINTNEXTLINE
+
+#define PROXY_SECONDARY_THREAD_DESTRUCTOR() \
+ private: \
+ rtc::Thread* destructor_thread() const { \
+ return secondary_thread_; \
+ } \
+ \
+ public: // NOLINTNEXTLINE
+
+#if defined(RTC_DISABLE_PROXY_TRACE_EVENTS)
+#define TRACE_BOILERPLATE(method) \
+ do { \
+ } while (0)
+#else // if defined(RTC_DISABLE_PROXY_TRACE_EVENTS)
+#define TRACE_BOILERPLATE(method) \
+ static constexpr auto class_and_method_name = \
+ rtc::MakeCompileTimeString(proxy_name_) \
+ .Concat(rtc::MakeCompileTimeString("::")) \
+ .Concat(rtc::MakeCompileTimeString(#method)); \
+ proxy_internal::ScopedTrace scoped_trace(class_and_method_name.string)
+
+#endif // if defined(RTC_DISABLE_PROXY_TRACE_EVENTS)
+
+#define PROXY_METHOD0(r, method) \
+ r method() override { \
+ TRACE_BOILERPLATE(method); \
+ MethodCall<C, r> call(c(), &C::method); \
+ return call.Marshal(primary_thread_); \
+ }
+
+#define PROXY_CONSTMETHOD0(r, method) \
+ r method() const override { \
+ TRACE_BOILERPLATE(method); \
+ ConstMethodCall<C, r> call(c(), &C::method); \
+ return call.Marshal(primary_thread_); \
+ }
+
+#define PROXY_METHOD1(r, method, t1) \
+ r method(t1 a1) override { \
+ TRACE_BOILERPLATE(method); \
+ MethodCall<C, r, t1> call(c(), &C::method, std::move(a1)); \
+ return call.Marshal(primary_thread_); \
+ }
+
+#define PROXY_CONSTMETHOD1(r, method, t1) \
+ r method(t1 a1) const override { \
+ TRACE_BOILERPLATE(method); \
+ ConstMethodCall<C, r, t1> call(c(), &C::method, std::move(a1)); \
+ return call.Marshal(primary_thread_); \
+ }
+
+#define PROXY_METHOD2(r, method, t1, t2) \
+ r method(t1 a1, t2 a2) override { \
+ TRACE_BOILERPLATE(method); \
+ MethodCall<C, r, t1, t2> call(c(), &C::method, std::move(a1), \
+ std::move(a2)); \
+ return call.Marshal(primary_thread_); \
+ }
+
+#define PROXY_METHOD3(r, method, t1, t2, t3) \
+ r method(t1 a1, t2 a2, t3 a3) override { \
+ TRACE_BOILERPLATE(method); \
+ MethodCall<C, r, t1, t2, t3> call(c(), &C::method, std::move(a1), \
+ std::move(a2), std::move(a3)); \
+ return call.Marshal(primary_thread_); \
+ }
+
+#define PROXY_METHOD4(r, method, t1, t2, t3, t4) \
+ r method(t1 a1, t2 a2, t3 a3, t4 a4) override { \
+ TRACE_BOILERPLATE(method); \
+ MethodCall<C, r, t1, t2, t3, t4> call(c(), &C::method, std::move(a1), \
+ std::move(a2), std::move(a3), \
+ std::move(a4)); \
+ return call.Marshal(primary_thread_); \
+ }
+
+#define PROXY_METHOD5(r, method, t1, t2, t3, t4, t5) \
+ r method(t1 a1, t2 a2, t3 a3, t4 a4, t5 a5) override { \
+ TRACE_BOILERPLATE(method); \
+ MethodCall<C, r, t1, t2, t3, t4, t5> call(c(), &C::method, std::move(a1), \
+ std::move(a2), std::move(a3), \
+ std::move(a4), std::move(a5)); \
+ return call.Marshal(primary_thread_); \
+ }
+
+// Define methods which should be invoked on the secondary thread.
+#define PROXY_SECONDARY_METHOD0(r, method) \
+ r method() override { \
+ TRACE_BOILERPLATE(method); \
+ MethodCall<C, r> call(c(), &C::method); \
+ return call.Marshal(secondary_thread_); \
+ }
+
+#define PROXY_SECONDARY_CONSTMETHOD0(r, method) \
+ r method() const override { \
+ TRACE_BOILERPLATE(method); \
+ ConstMethodCall<C, r> call(c(), &C::method); \
+ return call.Marshal(secondary_thread_); \
+ }
+
+#define PROXY_SECONDARY_METHOD1(r, method, t1) \
+ r method(t1 a1) override { \
+ TRACE_BOILERPLATE(method); \
+ MethodCall<C, r, t1> call(c(), &C::method, std::move(a1)); \
+ return call.Marshal(secondary_thread_); \
+ }
+
+#define PROXY_SECONDARY_CONSTMETHOD1(r, method, t1) \
+ r method(t1 a1) const override { \
+ TRACE_BOILERPLATE(method); \
+ ConstMethodCall<C, r, t1> call(c(), &C::method, std::move(a1)); \
+ return call.Marshal(secondary_thread_); \
+ }
+
+#define PROXY_SECONDARY_METHOD2(r, method, t1, t2) \
+ r method(t1 a1, t2 a2) override { \
+ TRACE_BOILERPLATE(method); \
+ MethodCall<C, r, t1, t2> call(c(), &C::method, std::move(a1), \
+ std::move(a2)); \
+ return call.Marshal(secondary_thread_); \
+ }
+
+#define PROXY_SECONDARY_CONSTMETHOD2(r, method, t1, t2) \
+ r method(t1 a1, t2 a2) const override { \
+ TRACE_BOILERPLATE(method); \
+ ConstMethodCall<C, r, t1, t2> call(c(), &C::method, std::move(a1), \
+ std::move(a2)); \
+ return call.Marshal(secondary_thread_); \
+ }
+
+#define PROXY_SECONDARY_METHOD3(r, method, t1, t2, t3) \
+ r method(t1 a1, t2 a2, t3 a3) override { \
+ TRACE_BOILERPLATE(method); \
+ MethodCall<C, r, t1, t2, t3> call(c(), &C::method, std::move(a1), \
+ std::move(a2), std::move(a3)); \
+ return call.Marshal(secondary_thread_); \
+ }
+
+#define PROXY_SECONDARY_CONSTMETHOD3(r, method, t1, t2) \
+ r method(t1 a1, t2 a2, t3 a3) const override { \
+ TRACE_BOILERPLATE(method); \
+ ConstMethodCall<C, r, t1, t2, t3> call(c(), &C::method, std::move(a1), \
+ std::move(a2), std::move(a3)); \
+ return call.Marshal(secondary_thread_); \
+ }
+
+// For use when returning purely const state (set during construction).
+// Use with caution. This method should only be used when the return value will
+// always be the same.
+#define BYPASS_PROXY_CONSTMETHOD0(r, method) \
+ r method() const override { \
+ TRACE_BOILERPLATE(method); \
+ return c_->method(); \
+ }
+// Allows a custom implementation of a method where the otherwise proxied
+// implementation can do a more efficient, yet thread-safe, job than the proxy
+// can do by default or when more flexibility is needed than can be provided
+// by a proxy.
+// Note that calls to these methods should be expected to be made from unknown
+// threads.
+#define BYPASS_PROXY_METHOD0(r, method) \
+ r method() override { \
+ TRACE_BOILERPLATE(method); \
+ return c_->method(); \
+ }
+
+// The 1 argument version of `BYPASS_PROXY_METHOD0`.
+#define BYPASS_PROXY_METHOD1(r, method, t1) \
+ r method(t1 a1) override { \
+ TRACE_BOILERPLATE(method); \
+ return c_->method(std::move(a1)); \
+ }
+
+// The 2 argument version of `BYPASS_PROXY_METHOD0`.
+#define BYPASS_PROXY_METHOD2(r, method, t1, t2) \
+ r method(t1 a1, t2 a2) override { \
+ TRACE_BOILERPLATE(method); \
+ return c_->method(std::move(a1), std::move(a2)); \
+ }
+} // namespace webrtc
+
+#endif // PC_PROXY_H_
diff --git a/third_party/libwebrtc/pc/proxy_unittest.cc b/third_party/libwebrtc/pc/proxy_unittest.cc
new file mode 100644
index 0000000000..ebfde9fecf
--- /dev/null
+++ b/third_party/libwebrtc/pc/proxy_unittest.cc
@@ -0,0 +1,260 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/proxy.h"
+
+#include <memory>
+#include <string>
+
+#include "api/make_ref_counted.h"
+#include "rtc_base/gunit.h"
+#include "rtc_base/ref_count.h"
+#include "test/gmock.h"
+
+using ::testing::_;
+using ::testing::DoAll;
+using ::testing::Exactly;
+using ::testing::InvokeWithoutArgs;
+using ::testing::Return;
+
+namespace webrtc {
+
+// Interface used for testing here.
+class FakeInterface : public rtc::RefCountInterface {
+ public:
+ virtual void VoidMethod0() = 0;
+ virtual std::string Method0() = 0;
+ virtual std::string ConstMethod0() const = 0;
+ virtual std::string Method1(std::string s) = 0;
+ virtual std::string ConstMethod1(std::string s) const = 0;
+ virtual std::string Method2(std::string s1, std::string s2) = 0;
+
+ protected:
+ virtual ~FakeInterface() {}
+};
+
+// Implementation of the test interface.
+class Fake : public FakeInterface {
+ public:
+ static rtc::scoped_refptr<Fake> Create() {
+ return rtc::make_ref_counted<Fake>();
+ }
+ // Used to verify destructor is called on the correct thread.
+ MOCK_METHOD(void, Destroy, ());
+
+ MOCK_METHOD(void, VoidMethod0, (), (override));
+ MOCK_METHOD(std::string, Method0, (), (override));
+ MOCK_METHOD(std::string, ConstMethod0, (), (const, override));
+
+ MOCK_METHOD(std::string, Method1, (std::string), (override));
+ MOCK_METHOD(std::string, ConstMethod1, (std::string), (const, override));
+
+ MOCK_METHOD(std::string, Method2, (std::string, std::string), (override));
+
+ protected:
+ Fake() {}
+ ~Fake() { Destroy(); }
+};
+
+// Proxies for the test interface.
+BEGIN_PROXY_MAP(Fake)
+PROXY_SECONDARY_THREAD_DESTRUCTOR()
+PROXY_METHOD0(void, VoidMethod0)
+PROXY_METHOD0(std::string, Method0)
+PROXY_CONSTMETHOD0(std::string, ConstMethod0)
+PROXY_SECONDARY_METHOD1(std::string, Method1, std::string)
+PROXY_CONSTMETHOD1(std::string, ConstMethod1, std::string)
+PROXY_SECONDARY_METHOD2(std::string, Method2, std::string, std::string)
+END_PROXY_MAP(Fake)
+
+// Preprocessor hack to get a proxy class a name different than FakeProxy.
+#define FakeProxy FakeSignalingProxy
+#define FakeProxyWithInternal FakeSignalingProxyWithInternal
+BEGIN_PRIMARY_PROXY_MAP(Fake)
+PROXY_PRIMARY_THREAD_DESTRUCTOR()
+PROXY_METHOD0(void, VoidMethod0)
+PROXY_METHOD0(std::string, Method0)
+PROXY_CONSTMETHOD0(std::string, ConstMethod0)
+PROXY_METHOD1(std::string, Method1, std::string)
+PROXY_CONSTMETHOD1(std::string, ConstMethod1, std::string)
+PROXY_METHOD2(std::string, Method2, std::string, std::string)
+END_PROXY_MAP(Fake)
+#undef FakeProxy
+
+class SignalingProxyTest : public ::testing::Test {
+ public:
+ // Checks that the functions are called on the right thread.
+ void CheckSignalingThread() { EXPECT_TRUE(signaling_thread_->IsCurrent()); }
+
+ protected:
+ void SetUp() override {
+ signaling_thread_ = rtc::Thread::Create();
+ ASSERT_TRUE(signaling_thread_->Start());
+ fake_ = Fake::Create();
+ fake_signaling_proxy_ =
+ FakeSignalingProxy::Create(signaling_thread_.get(), fake_);
+ }
+
+ protected:
+ std::unique_ptr<rtc::Thread> signaling_thread_;
+ rtc::scoped_refptr<FakeInterface> fake_signaling_proxy_;
+ rtc::scoped_refptr<Fake> fake_;
+};
+
+TEST_F(SignalingProxyTest, SignalingThreadDestructor) {
+ EXPECT_CALL(*fake_, Destroy())
+ .Times(Exactly(1))
+ .WillOnce(
+ InvokeWithoutArgs(this, &SignalingProxyTest::CheckSignalingThread));
+ fake_ = nullptr;
+ fake_signaling_proxy_ = nullptr;
+}
+
+TEST_F(SignalingProxyTest, VoidMethod0) {
+ EXPECT_CALL(*fake_, VoidMethod0())
+ .Times(Exactly(1))
+ .WillOnce(
+ InvokeWithoutArgs(this, &SignalingProxyTest::CheckSignalingThread));
+ fake_signaling_proxy_->VoidMethod0();
+}
+
+TEST_F(SignalingProxyTest, Method0) {
+ EXPECT_CALL(*fake_, Method0())
+ .Times(Exactly(1))
+ .WillOnce(DoAll(
+ InvokeWithoutArgs(this, &SignalingProxyTest::CheckSignalingThread),
+ Return("Method0")));
+ EXPECT_EQ("Method0", fake_signaling_proxy_->Method0());
+}
+
+TEST_F(SignalingProxyTest, ConstMethod0) {
+ EXPECT_CALL(*fake_, ConstMethod0())
+ .Times(Exactly(1))
+ .WillOnce(DoAll(
+ InvokeWithoutArgs(this, &SignalingProxyTest::CheckSignalingThread),
+ Return("ConstMethod0")));
+ EXPECT_EQ("ConstMethod0", fake_signaling_proxy_->ConstMethod0());
+}
+
+TEST_F(SignalingProxyTest, Method1) {
+ const std::string arg1 = "arg1";
+ EXPECT_CALL(*fake_, Method1(arg1))
+ .Times(Exactly(1))
+ .WillOnce(DoAll(
+ InvokeWithoutArgs(this, &SignalingProxyTest::CheckSignalingThread),
+ Return("Method1")));
+ EXPECT_EQ("Method1", fake_signaling_proxy_->Method1(arg1));
+}
+
+TEST_F(SignalingProxyTest, ConstMethod1) {
+ const std::string arg1 = "arg1";
+ EXPECT_CALL(*fake_, ConstMethod1(arg1))
+ .Times(Exactly(1))
+ .WillOnce(DoAll(
+ InvokeWithoutArgs(this, &SignalingProxyTest::CheckSignalingThread),
+ Return("ConstMethod1")));
+ EXPECT_EQ("ConstMethod1", fake_signaling_proxy_->ConstMethod1(arg1));
+}
+
+TEST_F(SignalingProxyTest, Method2) {
+ const std::string arg1 = "arg1";
+ const std::string arg2 = "arg2";
+ EXPECT_CALL(*fake_, Method2(arg1, arg2))
+ .Times(Exactly(1))
+ .WillOnce(DoAll(
+ InvokeWithoutArgs(this, &SignalingProxyTest::CheckSignalingThread),
+ Return("Method2")));
+ EXPECT_EQ("Method2", fake_signaling_proxy_->Method2(arg1, arg2));
+}
+
+class ProxyTest : public ::testing::Test {
+ public:
+ // Checks that the functions are called on the right thread.
+ void CheckSignalingThread() { EXPECT_TRUE(signaling_thread_->IsCurrent()); }
+ void CheckWorkerThread() { EXPECT_TRUE(worker_thread_->IsCurrent()); }
+
+ protected:
+ void SetUp() override {
+ signaling_thread_ = rtc::Thread::Create();
+ worker_thread_ = rtc::Thread::Create();
+ ASSERT_TRUE(signaling_thread_->Start());
+ ASSERT_TRUE(worker_thread_->Start());
+ fake_ = Fake::Create();
+ fake_proxy_ =
+ FakeProxy::Create(signaling_thread_.get(), worker_thread_.get(), fake_);
+ }
+
+ protected:
+ std::unique_ptr<rtc::Thread> signaling_thread_;
+ std::unique_ptr<rtc::Thread> worker_thread_;
+ rtc::scoped_refptr<FakeInterface> fake_proxy_;
+ rtc::scoped_refptr<Fake> fake_;
+};
+
+TEST_F(ProxyTest, WorkerThreadDestructor) {
+ EXPECT_CALL(*fake_, Destroy())
+ .Times(Exactly(1))
+ .WillOnce(InvokeWithoutArgs(this, &ProxyTest::CheckWorkerThread));
+ fake_ = nullptr;
+ fake_proxy_ = nullptr;
+}
+
+TEST_F(ProxyTest, VoidMethod0) {
+ EXPECT_CALL(*fake_, VoidMethod0())
+ .Times(Exactly(1))
+ .WillOnce(InvokeWithoutArgs(this, &ProxyTest::CheckSignalingThread));
+ fake_proxy_->VoidMethod0();
+}
+
+TEST_F(ProxyTest, Method0) {
+ EXPECT_CALL(*fake_, Method0())
+ .Times(Exactly(1))
+ .WillOnce(DoAll(InvokeWithoutArgs(this, &ProxyTest::CheckSignalingThread),
+ Return("Method0")));
+ EXPECT_EQ("Method0", fake_proxy_->Method0());
+}
+
+TEST_F(ProxyTest, ConstMethod0) {
+ EXPECT_CALL(*fake_, ConstMethod0())
+ .Times(Exactly(1))
+ .WillOnce(DoAll(InvokeWithoutArgs(this, &ProxyTest::CheckSignalingThread),
+ Return("ConstMethod0")));
+ EXPECT_EQ("ConstMethod0", fake_proxy_->ConstMethod0());
+}
+
+TEST_F(ProxyTest, WorkerMethod1) {
+ const std::string arg1 = "arg1";
+ EXPECT_CALL(*fake_, Method1(arg1))
+ .Times(Exactly(1))
+ .WillOnce(DoAll(InvokeWithoutArgs(this, &ProxyTest::CheckWorkerThread),
+ Return("Method1")));
+ EXPECT_EQ("Method1", fake_proxy_->Method1(arg1));
+}
+
+TEST_F(ProxyTest, ConstMethod1) {
+ const std::string arg1 = "arg1";
+ EXPECT_CALL(*fake_, ConstMethod1(arg1))
+ .Times(Exactly(1))
+ .WillOnce(DoAll(InvokeWithoutArgs(this, &ProxyTest::CheckSignalingThread),
+ Return("ConstMethod1")));
+ EXPECT_EQ("ConstMethod1", fake_proxy_->ConstMethod1(arg1));
+}
+
+TEST_F(ProxyTest, WorkerMethod2) {
+ const std::string arg1 = "arg1";
+ const std::string arg2 = "arg2";
+ EXPECT_CALL(*fake_, Method2(arg1, arg2))
+ .Times(Exactly(1))
+ .WillOnce(DoAll(InvokeWithoutArgs(this, &ProxyTest::CheckWorkerThread),
+ Return("Method2")));
+ EXPECT_EQ("Method2", fake_proxy_->Method2(arg1, arg2));
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/remote_audio_source.cc b/third_party/libwebrtc/pc/remote_audio_source.cc
new file mode 100644
index 0000000000..a516c57617
--- /dev/null
+++ b/third_party/libwebrtc/pc/remote_audio_source.cc
@@ -0,0 +1,184 @@
+/*
+ * Copyright 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/remote_audio_source.h"
+
+#include <stddef.h>
+
+#include <memory>
+#include <string>
+#include <utility>
+
+#include "absl/algorithm/container.h"
+#include "api/scoped_refptr.h"
+#include "api/sequence_checker.h"
+#include "api/task_queue/task_queue_base.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/strings/string_format.h"
+#include "rtc_base/trace_event.h"
+
+namespace webrtc {
+
+// This proxy is passed to the underlying media engine to receive audio data as
+// they come in. The data will then be passed back up to the RemoteAudioSource
+// which will fan it out to all the sinks that have been added to it.
+class RemoteAudioSource::AudioDataProxy : public AudioSinkInterface {
+ public:
+ explicit AudioDataProxy(RemoteAudioSource* source) : source_(source) {
+ RTC_DCHECK(source);
+ }
+
+ AudioDataProxy() = delete;
+ AudioDataProxy(const AudioDataProxy&) = delete;
+ AudioDataProxy& operator=(const AudioDataProxy&) = delete;
+
+ ~AudioDataProxy() override { source_->OnAudioChannelGone(); }
+
+ // AudioSinkInterface implementation.
+ void OnData(const AudioSinkInterface::Data& audio) override {
+ source_->OnData(audio);
+ }
+
+ private:
+ const rtc::scoped_refptr<RemoteAudioSource> source_;
+};
+
+RemoteAudioSource::RemoteAudioSource(
+ TaskQueueBase* worker_thread,
+ OnAudioChannelGoneAction on_audio_channel_gone_action)
+ : main_thread_(TaskQueueBase::Current()),
+ worker_thread_(worker_thread),
+ on_audio_channel_gone_action_(on_audio_channel_gone_action),
+ state_(MediaSourceInterface::kInitializing) {
+ RTC_DCHECK(main_thread_);
+ RTC_DCHECK(worker_thread_);
+}
+
+RemoteAudioSource::~RemoteAudioSource() {
+ RTC_DCHECK(audio_observers_.empty());
+ if (!sinks_.empty()) {
+ RTC_LOG(LS_WARNING)
+ << "RemoteAudioSource destroyed while sinks_ is non-empty.";
+ }
+}
+
+void RemoteAudioSource::Start(
+ cricket::VoiceMediaReceiveChannelInterface* media_channel,
+ absl::optional<uint32_t> ssrc) {
+ RTC_DCHECK_RUN_ON(worker_thread_);
+
+ // Register for callbacks immediately before AddSink so that we always get
+ // notified when a channel goes out of scope (signaled when "AudioDataProxy"
+ // is destroyed).
+ RTC_DCHECK(media_channel);
+ ssrc ? media_channel->SetRawAudioSink(*ssrc,
+ std::make_unique<AudioDataProxy>(this))
+ : media_channel->SetDefaultRawAudioSink(
+ std::make_unique<AudioDataProxy>(this));
+}
+
+void RemoteAudioSource::Stop(
+ cricket::VoiceMediaReceiveChannelInterface* media_channel,
+ absl::optional<uint32_t> ssrc) {
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ RTC_DCHECK(media_channel);
+ ssrc ? media_channel->SetRawAudioSink(*ssrc, nullptr)
+ : media_channel->SetDefaultRawAudioSink(nullptr);
+}
+
+void RemoteAudioSource::SetState(SourceState new_state) {
+ RTC_DCHECK_RUN_ON(main_thread_);
+ if (state_ != new_state) {
+ state_ = new_state;
+ FireOnChanged();
+ }
+}
+
+MediaSourceInterface::SourceState RemoteAudioSource::state() const {
+ RTC_DCHECK_RUN_ON(main_thread_);
+ return state_;
+}
+
+bool RemoteAudioSource::remote() const {
+ RTC_DCHECK_RUN_ON(main_thread_);
+ return true;
+}
+
+void RemoteAudioSource::SetVolume(double volume) {
+ RTC_DCHECK_GE(volume, 0);
+ RTC_DCHECK_LE(volume, 10);
+ RTC_LOG(LS_INFO) << rtc::StringFormat("RAS::%s({volume=%.2f})", __func__,
+ volume);
+ for (auto* observer : audio_observers_) {
+ observer->OnSetVolume(volume);
+ }
+}
+
+void RemoteAudioSource::RegisterAudioObserver(AudioObserver* observer) {
+ RTC_DCHECK(observer != NULL);
+ RTC_DCHECK(!absl::c_linear_search(audio_observers_, observer));
+ audio_observers_.push_back(observer);
+}
+
+void RemoteAudioSource::UnregisterAudioObserver(AudioObserver* observer) {
+ RTC_DCHECK(observer != NULL);
+ audio_observers_.remove(observer);
+}
+
+void RemoteAudioSource::AddSink(AudioTrackSinkInterface* sink) {
+ RTC_DCHECK_RUN_ON(main_thread_);
+ RTC_DCHECK(sink);
+
+ MutexLock lock(&sink_lock_);
+ RTC_DCHECK(!absl::c_linear_search(sinks_, sink));
+ sinks_.push_back(sink);
+}
+
+void RemoteAudioSource::RemoveSink(AudioTrackSinkInterface* sink) {
+ RTC_DCHECK_RUN_ON(main_thread_);
+ RTC_DCHECK(sink);
+
+ MutexLock lock(&sink_lock_);
+ sinks_.remove(sink);
+}
+
+void RemoteAudioSource::OnData(const AudioSinkInterface::Data& audio) {
+ // Called on the externally-owned audio callback thread, via/from webrtc.
+ TRACE_EVENT0("webrtc", "RemoteAudioSource::OnData");
+ MutexLock lock(&sink_lock_);
+ for (auto* sink : sinks_) {
+ // When peerconnection acts as an audio source, it should not provide
+ // absolute capture timestamp.
+ sink->OnData(audio.data, 16, audio.sample_rate, audio.channels,
+ audio.samples_per_channel,
+ /*absolute_capture_timestamp_ms=*/absl::nullopt);
+ }
+}
+
+void RemoteAudioSource::OnAudioChannelGone() {
+ if (on_audio_channel_gone_action_ != OnAudioChannelGoneAction::kEnd) {
+ return;
+ }
+ // Called when the audio channel is deleted. It may be the worker thread or
+ // may be a different task queue.
+ // This object needs to live long enough for the cleanup logic in the posted
+ // task to run, so take a reference to it. Sometimes the task may not be
+ // processed (because the task queue was destroyed shortly after this call),
+ // but that is fine because the task queue destructor will take care of
+ // destroying task which will release the reference on RemoteAudioSource.
+ rtc::scoped_refptr<RemoteAudioSource> thiz(this);
+ main_thread_->PostTask([thiz = std::move(thiz)] {
+ thiz->sinks_.clear();
+ thiz->SetState(MediaSourceInterface::kEnded);
+ });
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/remote_audio_source.h b/third_party/libwebrtc/pc/remote_audio_source.h
new file mode 100644
index 0000000000..0fac606ad4
--- /dev/null
+++ b/third_party/libwebrtc/pc/remote_audio_source.h
@@ -0,0 +1,91 @@
+/*
+ * Copyright 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_REMOTE_AUDIO_SOURCE_H_
+#define PC_REMOTE_AUDIO_SOURCE_H_
+
+#include <stdint.h>
+
+#include <list>
+#include <string>
+
+#include "absl/types/optional.h"
+#include "api/call/audio_sink.h"
+#include "api/media_stream_interface.h"
+#include "api/notifier.h"
+#include "api/task_queue/task_queue_base.h"
+#include "media/base/media_channel.h"
+#include "rtc_base/synchronization/mutex.h"
+
+namespace webrtc {
+
+// This class implements the audio source used by the remote audio track.
+// This class works by configuring itself as a sink with the underlying media
+// engine, then when receiving data will fan out to all added sinks.
+class RemoteAudioSource : public Notifier<AudioSourceInterface> {
+ public:
+ // In Unified Plan, receivers map to m= sections and their tracks and sources
+ // survive SSRCs being reconfigured. The life cycle of the remote audio source
+ // is associated with the life cycle of the m= section, and thus even if an
+ // audio channel is destroyed the RemoteAudioSource should kSurvive.
+ //
+ // In Plan B however, remote audio sources map 1:1 with an SSRCs and if an
+ // audio channel is destroyed, the RemoteAudioSource should kEnd.
+ enum class OnAudioChannelGoneAction {
+ kSurvive,
+ kEnd,
+ };
+
+ explicit RemoteAudioSource(
+ TaskQueueBase* worker_thread,
+ OnAudioChannelGoneAction on_audio_channel_gone_action);
+
+ // Register and unregister remote audio source with the underlying media
+ // engine.
+ void Start(cricket::VoiceMediaReceiveChannelInterface* media_channel,
+ absl::optional<uint32_t> ssrc);
+ void Stop(cricket::VoiceMediaReceiveChannelInterface* media_channel,
+ absl::optional<uint32_t> ssrc);
+ void SetState(SourceState new_state);
+
+ // MediaSourceInterface implementation.
+ MediaSourceInterface::SourceState state() const override;
+ bool remote() const override;
+
+ // AudioSourceInterface implementation.
+ void SetVolume(double volume) override;
+ void RegisterAudioObserver(AudioObserver* observer) override;
+ void UnregisterAudioObserver(AudioObserver* observer) override;
+
+ void AddSink(AudioTrackSinkInterface* sink) override;
+ void RemoveSink(AudioTrackSinkInterface* sink) override;
+
+ protected:
+ ~RemoteAudioSource() override;
+
+ private:
+ // These are callbacks from the media engine.
+ class AudioDataProxy;
+
+ void OnData(const AudioSinkInterface::Data& audio);
+ void OnAudioChannelGone();
+
+ TaskQueueBase* const main_thread_;
+ TaskQueueBase* const worker_thread_;
+ const OnAudioChannelGoneAction on_audio_channel_gone_action_;
+ std::list<AudioObserver*> audio_observers_;
+ Mutex sink_lock_;
+ std::list<AudioTrackSinkInterface*> sinks_;
+ SourceState state_;
+};
+
+} // namespace webrtc
+
+#endif // PC_REMOTE_AUDIO_SOURCE_H_
diff --git a/third_party/libwebrtc/pc/rtc_stats_collector.cc b/third_party/libwebrtc/pc/rtc_stats_collector.cc
new file mode 100644
index 0000000000..0797ba2a76
--- /dev/null
+++ b/third_party/libwebrtc/pc/rtc_stats_collector.cc
@@ -0,0 +1,2201 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/rtc_stats_collector.h"
+
+#include <stdint.h>
+#include <stdio.h>
+
+#include <cstdint>
+#include <map>
+#include <memory>
+#include <set>
+#include <string>
+#include <type_traits>
+#include <utility>
+#include <vector>
+
+#include "absl/functional/bind_front.h"
+#include "absl/strings/string_view.h"
+#include "api/array_view.h"
+#include "api/candidate.h"
+#include "api/dtls_transport_interface.h"
+#include "api/media_stream_interface.h"
+#include "api/media_types.h"
+#include "api/rtp_parameters.h"
+#include "api/sequence_checker.h"
+#include "api/stats/rtc_stats.h"
+#include "api/stats/rtcstats_objects.h"
+#include "api/units/time_delta.h"
+#include "api/video/video_content_type.h"
+#include "api/video_codecs/scalability_mode.h"
+#include "common_video/include/quality_limitation_reason.h"
+#include "media/base/media_channel.h"
+#include "media/base/media_channel_impl.h"
+#include "modules/audio_device/include/audio_device.h"
+#include "modules/audio_processing/include/audio_processing_statistics.h"
+#include "modules/rtp_rtcp/include/report_block_data.h"
+#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
+#include "p2p/base/connection_info.h"
+#include "p2p/base/ice_transport_internal.h"
+#include "p2p/base/p2p_constants.h"
+#include "p2p/base/port.h"
+#include "pc/channel_interface.h"
+#include "pc/data_channel_utils.h"
+#include "pc/rtc_stats_traversal.h"
+#include "pc/rtp_receiver_proxy.h"
+#include "pc/rtp_sender_proxy.h"
+#include "pc/webrtc_sdp.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/ip_address.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/network_constants.h"
+#include "rtc_base/rtc_certificate.h"
+#include "rtc_base/socket_address.h"
+#include "rtc_base/ssl_stream_adapter.h"
+#include "rtc_base/string_encode.h"
+#include "rtc_base/strings/string_builder.h"
+#include "rtc_base/time_utils.h"
+#include "rtc_base/trace_event.h"
+
+namespace webrtc {
+
+namespace {
+
+const char kDirectionInbound = 'I';
+const char kDirectionOutbound = 'O';
+
+const char* kAudioPlayoutSingletonId = "AP";
+
+// TODO(https://crbug.com/webrtc/10656): Consider making IDs less predictable.
+std::string RTCCertificateIDFromFingerprint(const std::string& fingerprint) {
+ return "CF" + fingerprint;
+}
+
+// `direction` is either kDirectionInbound or kDirectionOutbound.
+std::string RTCCodecStatsIDFromTransportAndCodecParameters(
+ const char direction,
+ const std::string& transport_id,
+ const RtpCodecParameters& codec_params) {
+ char buf[1024];
+ rtc::SimpleStringBuilder sb(buf);
+ sb << 'C' << direction << transport_id << '_' << codec_params.payload_type;
+ // TODO(https://crbug.com/webrtc/14420): If we stop supporting different FMTP
+ // lines for the same PT and transport, which should be illegal SDP, then we
+ // wouldn't need `fmtp` to be part of the ID here.
+ rtc::StringBuilder fmtp;
+ if (WriteFmtpParameters(codec_params.parameters, &fmtp)) {
+ sb << '_' << fmtp.Release();
+ }
+ return sb.str();
+}
+
+std::string RTCIceCandidatePairStatsIDFromConnectionInfo(
+ const cricket::ConnectionInfo& info) {
+ char buf[4096];
+ rtc::SimpleStringBuilder sb(buf);
+ sb << "CP" << info.local_candidate.id() << "_" << info.remote_candidate.id();
+ return sb.str();
+}
+
+std::string RTCTransportStatsIDFromTransportChannel(
+ const std::string& transport_name,
+ int channel_component) {
+ char buf[1024];
+ rtc::SimpleStringBuilder sb(buf);
+ sb << 'T' << transport_name << channel_component;
+ return sb.str();
+}
+
+std::string RTCInboundRtpStreamStatsIDFromSSRC(const std::string& transport_id,
+ cricket::MediaType media_type,
+ uint32_t ssrc) {
+ char buf[1024];
+ rtc::SimpleStringBuilder sb(buf);
+ sb << 'I' << transport_id
+ << (media_type == cricket::MEDIA_TYPE_AUDIO ? 'A' : 'V') << ssrc;
+ return sb.str();
+}
+
+std::string RTCOutboundRtpStreamStatsIDFromSSRC(const std::string& transport_id,
+ cricket::MediaType media_type,
+ uint32_t ssrc) {
+ char buf[1024];
+ rtc::SimpleStringBuilder sb(buf);
+ sb << 'O' << transport_id
+ << (media_type == cricket::MEDIA_TYPE_AUDIO ? 'A' : 'V') << ssrc;
+ return sb.str();
+}
+
+std::string RTCRemoteInboundRtpStreamStatsIdFromSourceSsrc(
+ cricket::MediaType media_type,
+ uint32_t source_ssrc) {
+ char buf[1024];
+ rtc::SimpleStringBuilder sb(buf);
+ sb << "RI" << (media_type == cricket::MEDIA_TYPE_AUDIO ? 'A' : 'V')
+ << source_ssrc;
+ return sb.str();
+}
+
+std::string RTCRemoteOutboundRTPStreamStatsIDFromSSRC(
+ cricket::MediaType media_type,
+ uint32_t source_ssrc) {
+ char buf[1024];
+ rtc::SimpleStringBuilder sb(buf);
+ sb << "RO" << (media_type == cricket::MEDIA_TYPE_AUDIO ? 'A' : 'V')
+ << source_ssrc;
+ return sb.str();
+}
+
+std::string RTCMediaSourceStatsIDFromKindAndAttachment(
+ cricket::MediaType media_type,
+ int attachment_id) {
+ char buf[1024];
+ rtc::SimpleStringBuilder sb(buf);
+ sb << 'S' << (media_type == cricket::MEDIA_TYPE_AUDIO ? 'A' : 'V')
+ << attachment_id;
+ return sb.str();
+}
+
+const char* CandidateTypeToRTCIceCandidateType(const std::string& type) {
+ if (type == cricket::LOCAL_PORT_TYPE)
+ return "host";
+ if (type == cricket::STUN_PORT_TYPE)
+ return "srflx";
+ if (type == cricket::PRFLX_PORT_TYPE)
+ return "prflx";
+ if (type == cricket::RELAY_PORT_TYPE)
+ return "relay";
+ RTC_DCHECK_NOTREACHED();
+ return nullptr;
+}
+
+const char* DataStateToRTCDataChannelState(
+ DataChannelInterface::DataState state) {
+ switch (state) {
+ case DataChannelInterface::kConnecting:
+ return "connecting";
+ case DataChannelInterface::kOpen:
+ return "open";
+ case DataChannelInterface::kClosing:
+ return "closing";
+ case DataChannelInterface::kClosed:
+ return "closed";
+ default:
+ RTC_DCHECK_NOTREACHED();
+ return nullptr;
+ }
+}
+
+const char* IceCandidatePairStateToRTCStatsIceCandidatePairState(
+ cricket::IceCandidatePairState state) {
+ switch (state) {
+ case cricket::IceCandidatePairState::WAITING:
+ return "waiting";
+ case cricket::IceCandidatePairState::IN_PROGRESS:
+ return "in-progress";
+ case cricket::IceCandidatePairState::SUCCEEDED:
+ return "succeeded";
+ case cricket::IceCandidatePairState::FAILED:
+ return "failed";
+ default:
+ RTC_DCHECK_NOTREACHED();
+ return nullptr;
+ }
+}
+
+const char* IceRoleToRTCIceRole(cricket::IceRole role) {
+ switch (role) {
+ case cricket::IceRole::ICEROLE_UNKNOWN:
+ return "unknown";
+ case cricket::IceRole::ICEROLE_CONTROLLED:
+ return "controlled";
+ case cricket::IceRole::ICEROLE_CONTROLLING:
+ return "controlling";
+ default:
+ RTC_DCHECK_NOTREACHED();
+ return nullptr;
+ }
+}
+
+const char* DtlsTransportStateToRTCDtlsTransportState(
+ DtlsTransportState state) {
+ switch (state) {
+ case DtlsTransportState::kNew:
+ return "new";
+ case DtlsTransportState::kConnecting:
+ return "connecting";
+ case DtlsTransportState::kConnected:
+ return "connected";
+ case DtlsTransportState::kClosed:
+ return "closed";
+ case DtlsTransportState::kFailed:
+ return "failed";
+ default:
+ RTC_CHECK_NOTREACHED();
+ return nullptr;
+ }
+}
+
+const char* IceTransportStateToRTCIceTransportState(IceTransportState state) {
+ switch (state) {
+ case IceTransportState::kNew:
+ return "new";
+ case IceTransportState::kChecking:
+ return "checking";
+ case IceTransportState::kConnected:
+ return "connected";
+ case IceTransportState::kCompleted:
+ return "completed";
+ case IceTransportState::kFailed:
+ return "failed";
+ case IceTransportState::kDisconnected:
+ return "disconnected";
+ case IceTransportState::kClosed:
+ return "closed";
+ default:
+ RTC_CHECK_NOTREACHED();
+ return nullptr;
+ }
+}
+
+const char* NetworkTypeToStatsType(rtc::AdapterType type) {
+ switch (type) {
+ case rtc::ADAPTER_TYPE_CELLULAR:
+ case rtc::ADAPTER_TYPE_CELLULAR_2G:
+ case rtc::ADAPTER_TYPE_CELLULAR_3G:
+ case rtc::ADAPTER_TYPE_CELLULAR_4G:
+ case rtc::ADAPTER_TYPE_CELLULAR_5G:
+ return "cellular";
+ case rtc::ADAPTER_TYPE_ETHERNET:
+ return "ethernet";
+ case rtc::ADAPTER_TYPE_WIFI:
+ return "wifi";
+ case rtc::ADAPTER_TYPE_VPN:
+ return "vpn";
+ case rtc::ADAPTER_TYPE_UNKNOWN:
+ case rtc::ADAPTER_TYPE_LOOPBACK:
+ case rtc::ADAPTER_TYPE_ANY:
+ return "unknown";
+ }
+ RTC_DCHECK_NOTREACHED();
+ return nullptr;
+}
+
+absl::string_view NetworkTypeToStatsNetworkAdapterType(rtc::AdapterType type) {
+ switch (type) {
+ case rtc::ADAPTER_TYPE_CELLULAR:
+ return "cellular";
+ case rtc::ADAPTER_TYPE_CELLULAR_2G:
+ return "cellular2g";
+ case rtc::ADAPTER_TYPE_CELLULAR_3G:
+ return "cellular3g";
+ case rtc::ADAPTER_TYPE_CELLULAR_4G:
+ return "cellular4g";
+ case rtc::ADAPTER_TYPE_CELLULAR_5G:
+ return "cellular5g";
+ case rtc::ADAPTER_TYPE_ETHERNET:
+ return "ethernet";
+ case rtc::ADAPTER_TYPE_WIFI:
+ return "wifi";
+ case rtc::ADAPTER_TYPE_UNKNOWN:
+ return "unknown";
+ case rtc::ADAPTER_TYPE_LOOPBACK:
+ return "loopback";
+ case rtc::ADAPTER_TYPE_ANY:
+ return "any";
+ case rtc::ADAPTER_TYPE_VPN:
+ /* should not be handled here. Vpn is modelled as a bool */
+ break;
+ }
+ RTC_DCHECK_NOTREACHED();
+ return {};
+}
+
+const char* QualityLimitationReasonToRTCQualityLimitationReason(
+ QualityLimitationReason reason) {
+ switch (reason) {
+ case QualityLimitationReason::kNone:
+ return "none";
+ case QualityLimitationReason::kCpu:
+ return "cpu";
+ case QualityLimitationReason::kBandwidth:
+ return "bandwidth";
+ case QualityLimitationReason::kOther:
+ return "other";
+ }
+ RTC_CHECK_NOTREACHED();
+}
+
+std::map<std::string, double>
+QualityLimitationDurationToRTCQualityLimitationDuration(
+ std::map<webrtc::QualityLimitationReason, int64_t> durations_ms) {
+ std::map<std::string, double> result;
+ // The internal duration is defined in milliseconds while the spec defines
+ // the value in seconds:
+ // https://w3c.github.io/webrtc-stats/#dom-rtcoutboundrtpstreamstats-qualitylimitationdurations
+ for (const auto& elem : durations_ms) {
+ result[QualityLimitationReasonToRTCQualityLimitationReason(elem.first)] =
+ elem.second / static_cast<double>(rtc::kNumMillisecsPerSec);
+ }
+ return result;
+}
+
+double DoubleAudioLevelFromIntAudioLevel(int audio_level) {
+ RTC_DCHECK_GE(audio_level, 0);
+ RTC_DCHECK_LE(audio_level, 32767);
+ return audio_level / 32767.0;
+}
+
+// Gets the `codecId` identified by `transport_id` and `codec_params`. If no
+// such `RTCCodecStats` exist yet, create it and add it to `report`.
+std::string GetCodecIdAndMaybeCreateCodecStats(
+ Timestamp timestamp,
+ const char direction,
+ const std::string& transport_id,
+ const RtpCodecParameters& codec_params,
+ RTCStatsReport* report) {
+ RTC_DCHECK_GE(codec_params.payload_type, 0);
+ RTC_DCHECK_LE(codec_params.payload_type, 127);
+ RTC_DCHECK(codec_params.clock_rate);
+ uint32_t payload_type = static_cast<uint32_t>(codec_params.payload_type);
+ std::string codec_id = RTCCodecStatsIDFromTransportAndCodecParameters(
+ direction, transport_id, codec_params);
+ if (report->Get(codec_id) != nullptr) {
+ // The `RTCCodecStats` already exists.
+ return codec_id;
+ }
+ // Create the `RTCCodecStats` that we want to reference.
+ auto codec_stats = std::make_unique<RTCCodecStats>(codec_id, timestamp);
+ codec_stats->payload_type = payload_type;
+ codec_stats->mime_type = codec_params.mime_type();
+ if (codec_params.clock_rate.has_value()) {
+ codec_stats->clock_rate = static_cast<uint32_t>(*codec_params.clock_rate);
+ }
+ if (codec_params.num_channels) {
+ codec_stats->channels = *codec_params.num_channels;
+ }
+
+ rtc::StringBuilder fmtp;
+ if (WriteFmtpParameters(codec_params.parameters, &fmtp)) {
+ codec_stats->sdp_fmtp_line = fmtp.Release();
+ }
+ codec_stats->transport_id = transport_id;
+ report->AddStats(std::move(codec_stats));
+ return codec_id;
+}
+
+// Provides the media independent counters (both audio and video).
+void SetInboundRTPStreamStatsFromMediaReceiverInfo(
+ const cricket::MediaReceiverInfo& media_receiver_info,
+ RTCInboundRtpStreamStats* inbound_stats) {
+ RTC_DCHECK(inbound_stats);
+ inbound_stats->ssrc = media_receiver_info.ssrc();
+ inbound_stats->packets_received =
+ static_cast<uint32_t>(media_receiver_info.packets_received);
+ inbound_stats->bytes_received =
+ static_cast<uint64_t>(media_receiver_info.payload_bytes_received);
+ inbound_stats->header_bytes_received = static_cast<uint64_t>(
+ media_receiver_info.header_and_padding_bytes_received);
+ if (media_receiver_info.retransmitted_bytes_received.has_value()) {
+ inbound_stats->retransmitted_bytes_received =
+ *media_receiver_info.retransmitted_bytes_received;
+ }
+ if (media_receiver_info.retransmitted_packets_received.has_value()) {
+ inbound_stats->retransmitted_packets_received =
+ *media_receiver_info.retransmitted_packets_received;
+ }
+ inbound_stats->packets_lost =
+ static_cast<int32_t>(media_receiver_info.packets_lost);
+ inbound_stats->jitter_buffer_delay =
+ media_receiver_info.jitter_buffer_delay_seconds;
+ inbound_stats->jitter_buffer_target_delay =
+ media_receiver_info.jitter_buffer_target_delay_seconds;
+ inbound_stats->jitter_buffer_minimum_delay =
+ media_receiver_info.jitter_buffer_minimum_delay_seconds;
+ inbound_stats->jitter_buffer_emitted_count =
+ media_receiver_info.jitter_buffer_emitted_count;
+ if (media_receiver_info.nacks_sent.has_value()) {
+ inbound_stats->nack_count = *media_receiver_info.nacks_sent;
+ }
+ if (media_receiver_info.fec_packets_received.has_value()) {
+ inbound_stats->fec_packets_received =
+ *media_receiver_info.fec_packets_received;
+ }
+ if (media_receiver_info.fec_packets_discarded.has_value()) {
+ inbound_stats->fec_packets_discarded =
+ *media_receiver_info.fec_packets_discarded;
+ }
+ if (media_receiver_info.fec_bytes_received.has_value()) {
+ inbound_stats->fec_bytes_received = *media_receiver_info.fec_bytes_received;
+ }
+}
+
+std::unique_ptr<RTCInboundRtpStreamStats> CreateInboundAudioStreamStats(
+ const cricket::VoiceMediaInfo& voice_media_info,
+ const cricket::VoiceReceiverInfo& voice_receiver_info,
+ const std::string& transport_id,
+ const std::string& mid,
+ Timestamp timestamp,
+ RTCStatsReport* report) {
+ auto inbound_audio = std::make_unique<RTCInboundRtpStreamStats>(
+ /*id=*/RTCInboundRtpStreamStatsIDFromSSRC(
+ transport_id, cricket::MEDIA_TYPE_AUDIO, voice_receiver_info.ssrc()),
+ timestamp);
+ SetInboundRTPStreamStatsFromMediaReceiverInfo(voice_receiver_info,
+ inbound_audio.get());
+ inbound_audio->transport_id = transport_id;
+ inbound_audio->mid = mid;
+ inbound_audio->kind = "audio";
+ if (voice_receiver_info.codec_payload_type.has_value()) {
+ auto codec_param_it = voice_media_info.receive_codecs.find(
+ *voice_receiver_info.codec_payload_type);
+ RTC_DCHECK(codec_param_it != voice_media_info.receive_codecs.end());
+ if (codec_param_it != voice_media_info.receive_codecs.end()) {
+ inbound_audio->codec_id = GetCodecIdAndMaybeCreateCodecStats(
+ inbound_audio->timestamp(), kDirectionInbound, transport_id,
+ codec_param_it->second, report);
+ }
+ }
+ inbound_audio->jitter = static_cast<double>(voice_receiver_info.jitter_ms) /
+ rtc::kNumMillisecsPerSec;
+ inbound_audio->total_samples_received =
+ voice_receiver_info.total_samples_received;
+ inbound_audio->concealed_samples = voice_receiver_info.concealed_samples;
+ inbound_audio->silent_concealed_samples =
+ voice_receiver_info.silent_concealed_samples;
+ inbound_audio->concealment_events = voice_receiver_info.concealment_events;
+ inbound_audio->inserted_samples_for_deceleration =
+ voice_receiver_info.inserted_samples_for_deceleration;
+ inbound_audio->removed_samples_for_acceleration =
+ voice_receiver_info.removed_samples_for_acceleration;
+ if (voice_receiver_info.audio_level >= 0) {
+ inbound_audio->audio_level =
+ DoubleAudioLevelFromIntAudioLevel(voice_receiver_info.audio_level);
+ }
+ inbound_audio->total_audio_energy = voice_receiver_info.total_output_energy;
+ inbound_audio->total_samples_duration =
+ voice_receiver_info.total_output_duration;
+ // `fir_count` and `pli_count` are only valid for video and are
+ // purposefully left undefined for audio.
+ if (voice_receiver_info.last_packet_received.has_value()) {
+ inbound_audio->last_packet_received_timestamp =
+ voice_receiver_info.last_packet_received->ms<double>();
+ }
+ if (voice_receiver_info.estimated_playout_ntp_timestamp_ms.has_value()) {
+ // TODO(bugs.webrtc.org/10529): Fix time origin.
+ inbound_audio->estimated_playout_timestamp = static_cast<double>(
+ *voice_receiver_info.estimated_playout_ntp_timestamp_ms);
+ }
+ inbound_audio->packets_discarded = voice_receiver_info.packets_discarded;
+ inbound_audio->jitter_buffer_flushes =
+ voice_receiver_info.jitter_buffer_flushes;
+ inbound_audio->delayed_packet_outage_samples =
+ voice_receiver_info.delayed_packet_outage_samples;
+ inbound_audio->relative_packet_arrival_delay =
+ voice_receiver_info.relative_packet_arrival_delay_seconds;
+ inbound_audio->interruption_count =
+ voice_receiver_info.interruption_count >= 0
+ ? voice_receiver_info.interruption_count
+ : 0;
+ inbound_audio->total_interruption_duration =
+ static_cast<double>(voice_receiver_info.total_interruption_duration_ms) /
+ rtc::kNumMillisecsPerSec;
+ return inbound_audio;
+}
+
+std::unique_ptr<RTCAudioPlayoutStats> CreateAudioPlayoutStats(
+ const AudioDeviceModule::Stats& audio_device_stats,
+ webrtc::Timestamp timestamp) {
+ auto stats = std::make_unique<RTCAudioPlayoutStats>(
+ /*id=*/kAudioPlayoutSingletonId, timestamp);
+ stats->synthesized_samples_duration =
+ audio_device_stats.synthesized_samples_duration_s;
+ stats->synthesized_samples_events =
+ audio_device_stats.synthesized_samples_events;
+ stats->total_samples_count = audio_device_stats.total_samples_count;
+ stats->total_samples_duration = audio_device_stats.total_samples_duration_s;
+ stats->total_playout_delay = audio_device_stats.total_playout_delay_s;
+ return stats;
+}
+
+std::unique_ptr<RTCRemoteOutboundRtpStreamStats>
+CreateRemoteOutboundAudioStreamStats(
+ const cricket::VoiceReceiverInfo& voice_receiver_info,
+ const std::string& mid,
+ const RTCInboundRtpStreamStats& inbound_audio_stats,
+ const std::string& transport_id) {
+ if (!voice_receiver_info.last_sender_report_timestamp_ms.has_value()) {
+ // Cannot create `RTCRemoteOutboundRtpStreamStats` when the RTCP SR arrival
+ // timestamp is not available - i.e., until the first sender report is
+ // received.
+ return nullptr;
+ }
+ RTC_DCHECK_GT(voice_receiver_info.sender_reports_reports_count, 0);
+
+ // Create.
+ auto stats = std::make_unique<RTCRemoteOutboundRtpStreamStats>(
+ /*id=*/RTCRemoteOutboundRTPStreamStatsIDFromSSRC(
+ cricket::MEDIA_TYPE_AUDIO, voice_receiver_info.ssrc()),
+ Timestamp::Millis(*voice_receiver_info.last_sender_report_timestamp_ms));
+
+ // Populate.
+ // - RTCRtpStreamStats.
+ stats->ssrc = voice_receiver_info.ssrc();
+ stats->kind = "audio";
+ stats->transport_id = transport_id;
+ if (inbound_audio_stats.codec_id.is_defined()) {
+ stats->codec_id = *inbound_audio_stats.codec_id;
+ }
+ // - RTCSentRtpStreamStats.
+ stats->packets_sent = voice_receiver_info.sender_reports_packets_sent;
+ stats->bytes_sent = voice_receiver_info.sender_reports_bytes_sent;
+ // - RTCRemoteOutboundRtpStreamStats.
+ stats->local_id = inbound_audio_stats.id();
+ // last_sender_report_remote_timestamp_ms is set together with
+ // last_sender_report_timestamp_ms.
+ RTC_DCHECK(
+ voice_receiver_info.last_sender_report_remote_timestamp_ms.has_value());
+ stats->remote_timestamp = static_cast<double>(
+ *voice_receiver_info.last_sender_report_remote_timestamp_ms);
+ stats->reports_sent = voice_receiver_info.sender_reports_reports_count;
+ if (voice_receiver_info.round_trip_time.has_value()) {
+ stats->round_trip_time =
+ voice_receiver_info.round_trip_time->seconds<double>();
+ }
+ stats->round_trip_time_measurements =
+ voice_receiver_info.round_trip_time_measurements;
+ stats->total_round_trip_time =
+ voice_receiver_info.total_round_trip_time.seconds<double>();
+
+ return stats;
+}
+
+std::unique_ptr<RTCInboundRtpStreamStats>
+CreateInboundRTPStreamStatsFromVideoReceiverInfo(
+ const std::string& transport_id,
+ const std::string& mid,
+ const cricket::VideoMediaInfo& video_media_info,
+ const cricket::VideoReceiverInfo& video_receiver_info,
+ Timestamp timestamp,
+ RTCStatsReport* report) {
+ auto inbound_video = std::make_unique<RTCInboundRtpStreamStats>(
+ RTCInboundRtpStreamStatsIDFromSSRC(
+ transport_id, cricket::MEDIA_TYPE_VIDEO, video_receiver_info.ssrc()),
+ timestamp);
+ SetInboundRTPStreamStatsFromMediaReceiverInfo(video_receiver_info,
+ inbound_video.get());
+ inbound_video->transport_id = transport_id;
+ inbound_video->mid = mid;
+ inbound_video->kind = "video";
+ if (video_receiver_info.codec_payload_type.has_value()) {
+ auto codec_param_it = video_media_info.receive_codecs.find(
+ *video_receiver_info.codec_payload_type);
+ RTC_DCHECK(codec_param_it != video_media_info.receive_codecs.end());
+ if (codec_param_it != video_media_info.receive_codecs.end()) {
+ inbound_video->codec_id = GetCodecIdAndMaybeCreateCodecStats(
+ inbound_video->timestamp(), kDirectionInbound, transport_id,
+ codec_param_it->second, report);
+ }
+ }
+ inbound_video->jitter = static_cast<double>(video_receiver_info.jitter_ms) /
+ rtc::kNumMillisecsPerSec;
+ inbound_video->fir_count =
+ static_cast<uint32_t>(video_receiver_info.firs_sent);
+ inbound_video->pli_count =
+ static_cast<uint32_t>(video_receiver_info.plis_sent);
+ inbound_video->frames_received = video_receiver_info.frames_received;
+ inbound_video->frames_decoded = video_receiver_info.frames_decoded;
+ inbound_video->frames_dropped = video_receiver_info.frames_dropped;
+ inbound_video->key_frames_decoded = video_receiver_info.key_frames_decoded;
+ if (video_receiver_info.frame_width > 0) {
+ inbound_video->frame_width =
+ static_cast<uint32_t>(video_receiver_info.frame_width);
+ }
+ if (video_receiver_info.frame_height > 0) {
+ inbound_video->frame_height =
+ static_cast<uint32_t>(video_receiver_info.frame_height);
+ }
+ if (video_receiver_info.framerate_decoded > 0) {
+ inbound_video->frames_per_second = video_receiver_info.framerate_decoded;
+ }
+ if (video_receiver_info.qp_sum.has_value()) {
+ inbound_video->qp_sum = *video_receiver_info.qp_sum;
+ }
+ if (video_receiver_info.timing_frame_info.has_value()) {
+ inbound_video->goog_timing_frame_info =
+ video_receiver_info.timing_frame_info->ToString();
+ }
+ inbound_video->total_decode_time =
+ video_receiver_info.total_decode_time.seconds<double>();
+ inbound_video->total_processing_delay =
+ video_receiver_info.total_processing_delay.seconds<double>();
+ inbound_video->total_assembly_time =
+ video_receiver_info.total_assembly_time.seconds<double>();
+ inbound_video->frames_assembled_from_multiple_packets =
+ video_receiver_info.frames_assembled_from_multiple_packets;
+ inbound_video->total_inter_frame_delay =
+ video_receiver_info.total_inter_frame_delay;
+ inbound_video->total_squared_inter_frame_delay =
+ video_receiver_info.total_squared_inter_frame_delay;
+ inbound_video->pause_count = video_receiver_info.pause_count;
+ inbound_video->total_pauses_duration =
+ static_cast<double>(video_receiver_info.total_pauses_duration_ms) /
+ rtc::kNumMillisecsPerSec;
+ inbound_video->freeze_count = video_receiver_info.freeze_count;
+ inbound_video->total_freezes_duration =
+ static_cast<double>(video_receiver_info.total_freezes_duration_ms) /
+ rtc::kNumMillisecsPerSec;
+ inbound_video->min_playout_delay =
+ static_cast<double>(video_receiver_info.min_playout_delay_ms) /
+ rtc::kNumMillisecsPerSec;
+ if (video_receiver_info.last_packet_received.has_value()) {
+ inbound_video->last_packet_received_timestamp =
+ video_receiver_info.last_packet_received->ms<double>();
+ }
+ if (video_receiver_info.estimated_playout_ntp_timestamp_ms.has_value()) {
+ // TODO(bugs.webrtc.org/10529): Fix time origin if needed.
+ inbound_video->estimated_playout_timestamp = static_cast<double>(
+ *video_receiver_info.estimated_playout_ntp_timestamp_ms);
+ }
+ // TODO(bugs.webrtc.org/10529): When info's `content_info` is optional
+ // support the "unspecified" value.
+ if (videocontenttypehelpers::IsScreenshare(video_receiver_info.content_type))
+ inbound_video->content_type = "screenshare";
+ if (video_receiver_info.decoder_implementation_name.has_value()) {
+ inbound_video->decoder_implementation =
+ *video_receiver_info.decoder_implementation_name;
+ }
+ if (video_receiver_info.power_efficient_decoder.has_value()) {
+ inbound_video->power_efficient_decoder =
+ *video_receiver_info.power_efficient_decoder;
+ }
+ for (const auto& ssrc_group : video_receiver_info.ssrc_groups) {
+ if (ssrc_group.semantics == cricket::kFidSsrcGroupSemantics &&
+ ssrc_group.ssrcs.size() == 2) {
+ inbound_video->rtx_ssrc = ssrc_group.ssrcs[1];
+ } else if (ssrc_group.semantics == cricket::kFecFrSsrcGroupSemantics &&
+ ssrc_group.ssrcs.size() == 2) {
+ // TODO(bugs.webrtc.org/15002): the ssrc-group might be >= 2 with
+ // multistream support.
+ inbound_video->fec_ssrc = ssrc_group.ssrcs[1];
+ }
+ }
+
+ return inbound_video;
+}
+
+// Provides the media independent counters and information (both audio and
+// video).
+void SetOutboundRTPStreamStatsFromMediaSenderInfo(
+ const cricket::MediaSenderInfo& media_sender_info,
+ RTCOutboundRtpStreamStats* outbound_stats) {
+ RTC_DCHECK(outbound_stats);
+ outbound_stats->ssrc = media_sender_info.ssrc();
+ outbound_stats->packets_sent =
+ static_cast<uint32_t>(media_sender_info.packets_sent);
+ outbound_stats->total_packet_send_delay =
+ media_sender_info.total_packet_send_delay.seconds<double>();
+ outbound_stats->retransmitted_packets_sent =
+ media_sender_info.retransmitted_packets_sent;
+ outbound_stats->bytes_sent =
+ static_cast<uint64_t>(media_sender_info.payload_bytes_sent);
+ outbound_stats->header_bytes_sent =
+ static_cast<uint64_t>(media_sender_info.header_and_padding_bytes_sent);
+ outbound_stats->retransmitted_bytes_sent =
+ media_sender_info.retransmitted_bytes_sent;
+ outbound_stats->nack_count = media_sender_info.nacks_received;
+ if (media_sender_info.active.has_value()) {
+ outbound_stats->active = *media_sender_info.active;
+ }
+}
+
+std::unique_ptr<RTCOutboundRtpStreamStats>
+CreateOutboundRTPStreamStatsFromVoiceSenderInfo(
+ const std::string& transport_id,
+ const std::string& mid,
+ const cricket::VoiceMediaInfo& voice_media_info,
+ const cricket::VoiceSenderInfo& voice_sender_info,
+ Timestamp timestamp,
+ RTCStatsReport* report) {
+ auto outbound_audio = std::make_unique<RTCOutboundRtpStreamStats>(
+ RTCOutboundRtpStreamStatsIDFromSSRC(
+ transport_id, cricket::MEDIA_TYPE_AUDIO, voice_sender_info.ssrc()),
+ timestamp);
+ SetOutboundRTPStreamStatsFromMediaSenderInfo(voice_sender_info,
+ outbound_audio.get());
+ outbound_audio->transport_id = transport_id;
+ outbound_audio->mid = mid;
+ outbound_audio->kind = "audio";
+ if (voice_sender_info.target_bitrate.has_value() &&
+ *voice_sender_info.target_bitrate > 0) {
+ outbound_audio->target_bitrate = *voice_sender_info.target_bitrate;
+ }
+ if (voice_sender_info.codec_payload_type.has_value()) {
+ auto codec_param_it = voice_media_info.send_codecs.find(
+ *voice_sender_info.codec_payload_type);
+ RTC_DCHECK(codec_param_it != voice_media_info.send_codecs.end());
+ if (codec_param_it != voice_media_info.send_codecs.end()) {
+ outbound_audio->codec_id = GetCodecIdAndMaybeCreateCodecStats(
+ outbound_audio->timestamp(), kDirectionOutbound, transport_id,
+ codec_param_it->second, report);
+ }
+ }
+ // `fir_count` and `pli_count` are only valid for video and are
+ // purposefully left undefined for audio.
+ return outbound_audio;
+}
+
+std::unique_ptr<RTCOutboundRtpStreamStats>
+CreateOutboundRTPStreamStatsFromVideoSenderInfo(
+ const std::string& transport_id,
+ const std::string& mid,
+ const cricket::VideoMediaInfo& video_media_info,
+ const cricket::VideoSenderInfo& video_sender_info,
+ Timestamp timestamp,
+ RTCStatsReport* report) {
+ auto outbound_video = std::make_unique<RTCOutboundRtpStreamStats>(
+ RTCOutboundRtpStreamStatsIDFromSSRC(
+ transport_id, cricket::MEDIA_TYPE_VIDEO, video_sender_info.ssrc()),
+ timestamp);
+ SetOutboundRTPStreamStatsFromMediaSenderInfo(video_sender_info,
+ outbound_video.get());
+ outbound_video->transport_id = transport_id;
+ outbound_video->mid = mid;
+ outbound_video->kind = "video";
+ if (video_sender_info.codec_payload_type.has_value()) {
+ auto codec_param_it = video_media_info.send_codecs.find(
+ *video_sender_info.codec_payload_type);
+ RTC_DCHECK(codec_param_it != video_media_info.send_codecs.end());
+ if (codec_param_it != video_media_info.send_codecs.end()) {
+ outbound_video->codec_id = GetCodecIdAndMaybeCreateCodecStats(
+ outbound_video->timestamp(), kDirectionOutbound, transport_id,
+ codec_param_it->second, report);
+ }
+ }
+ outbound_video->fir_count =
+ static_cast<uint32_t>(video_sender_info.firs_received);
+ outbound_video->pli_count =
+ static_cast<uint32_t>(video_sender_info.plis_received);
+ if (video_sender_info.qp_sum.has_value())
+ outbound_video->qp_sum = *video_sender_info.qp_sum;
+ if (video_sender_info.target_bitrate.has_value() &&
+ *video_sender_info.target_bitrate > 0) {
+ outbound_video->target_bitrate = *video_sender_info.target_bitrate;
+ }
+ outbound_video->frames_encoded = video_sender_info.frames_encoded;
+ outbound_video->key_frames_encoded = video_sender_info.key_frames_encoded;
+ outbound_video->total_encode_time =
+ static_cast<double>(video_sender_info.total_encode_time_ms) /
+ rtc::kNumMillisecsPerSec;
+ outbound_video->total_encoded_bytes_target =
+ video_sender_info.total_encoded_bytes_target;
+ if (video_sender_info.send_frame_width > 0) {
+ outbound_video->frame_width =
+ static_cast<uint32_t>(video_sender_info.send_frame_width);
+ }
+ if (video_sender_info.send_frame_height > 0) {
+ outbound_video->frame_height =
+ static_cast<uint32_t>(video_sender_info.send_frame_height);
+ }
+ if (video_sender_info.framerate_sent > 0) {
+ outbound_video->frames_per_second = video_sender_info.framerate_sent;
+ }
+ outbound_video->frames_sent = video_sender_info.frames_sent;
+ outbound_video->huge_frames_sent = video_sender_info.huge_frames_sent;
+ outbound_video->quality_limitation_reason =
+ QualityLimitationReasonToRTCQualityLimitationReason(
+ video_sender_info.quality_limitation_reason);
+ outbound_video->quality_limitation_durations =
+ QualityLimitationDurationToRTCQualityLimitationDuration(
+ video_sender_info.quality_limitation_durations_ms);
+ outbound_video->quality_limitation_resolution_changes =
+ video_sender_info.quality_limitation_resolution_changes;
+ // TODO(https://crbug.com/webrtc/10529): When info's `content_info` is
+ // optional, support the "unspecified" value.
+ if (videocontenttypehelpers::IsScreenshare(video_sender_info.content_type))
+ outbound_video->content_type = "screenshare";
+ if (video_sender_info.encoder_implementation_name.has_value()) {
+ outbound_video->encoder_implementation =
+ *video_sender_info.encoder_implementation_name;
+ }
+ if (video_sender_info.rid.has_value()) {
+ outbound_video->rid = *video_sender_info.rid;
+ }
+ if (video_sender_info.power_efficient_encoder.has_value()) {
+ outbound_video->power_efficient_encoder =
+ *video_sender_info.power_efficient_encoder;
+ }
+ if (video_sender_info.scalability_mode) {
+ outbound_video->scalability_mode = std::string(
+ ScalabilityModeToString(*video_sender_info.scalability_mode));
+ }
+ for (const auto& ssrc_group : video_sender_info.ssrc_groups) {
+ if (ssrc_group.semantics == cricket::kFidSsrcGroupSemantics &&
+ ssrc_group.ssrcs.size() == 2 &&
+ video_sender_info.ssrc() == ssrc_group.ssrcs[0]) {
+ outbound_video->rtx_ssrc = ssrc_group.ssrcs[1];
+ }
+ }
+ return outbound_video;
+}
+
+std::unique_ptr<RTCRemoteInboundRtpStreamStats>
+ProduceRemoteInboundRtpStreamStatsFromReportBlockData(
+ const std::string& transport_id,
+ const ReportBlockData& report_block,
+ cricket::MediaType media_type,
+ const std::map<std::string, RTCOutboundRtpStreamStats*>& outbound_rtps,
+ const RTCStatsReport& report) {
+ // RTCStats' timestamp generally refers to when the metric was sampled, but
+ // for "remote-[outbound/inbound]-rtp" it refers to the local time when the
+ // Report Block was received.
+ auto remote_inbound = std::make_unique<RTCRemoteInboundRtpStreamStats>(
+ RTCRemoteInboundRtpStreamStatsIdFromSourceSsrc(
+ media_type, report_block.source_ssrc()),
+ report_block.report_block_timestamp_utc());
+ remote_inbound->ssrc = report_block.source_ssrc();
+ remote_inbound->kind =
+ media_type == cricket::MEDIA_TYPE_AUDIO ? "audio" : "video";
+ remote_inbound->packets_lost = report_block.cumulative_lost();
+ remote_inbound->fraction_lost = report_block.fraction_lost();
+ if (report_block.num_rtts() > 0) {
+ remote_inbound->round_trip_time = report_block.last_rtt().seconds<double>();
+ }
+ remote_inbound->total_round_trip_time =
+ report_block.sum_rtts().seconds<double>();
+ remote_inbound->round_trip_time_measurements = report_block.num_rtts();
+
+ std::string local_id = RTCOutboundRtpStreamStatsIDFromSSRC(
+ transport_id, media_type, report_block.source_ssrc());
+ // Look up local stat from `outbound_rtps` where the pointers are non-const.
+ auto local_id_it = outbound_rtps.find(local_id);
+ if (local_id_it != outbound_rtps.end()) {
+ remote_inbound->local_id = local_id;
+ auto& outbound_rtp = *local_id_it->second;
+ outbound_rtp.remote_id = remote_inbound->id();
+ // The RTP/RTCP transport is obtained from the
+ // RTCOutboundRtpStreamStats's transport.
+ const auto* transport_from_id = report.Get(transport_id);
+ if (transport_from_id) {
+ const auto& transport = transport_from_id->cast_to<RTCTransportStats>();
+ // If RTP and RTCP are not multiplexed, there is a separate RTCP
+ // transport paired with the RTP transport, otherwise the same
+ // transport is used for RTCP and RTP.
+ remote_inbound->transport_id =
+ transport.rtcp_transport_stats_id.is_defined()
+ ? *transport.rtcp_transport_stats_id
+ : *outbound_rtp.transport_id;
+ }
+ // We're assuming the same codec is used on both ends. However if the
+ // codec is switched out on the fly we may have received a Report Block
+ // based on the previous codec and there is no way to tell which point in
+ // time the codec changed for the remote end.
+ const auto* codec_from_id = outbound_rtp.codec_id.is_defined()
+ ? report.Get(*outbound_rtp.codec_id)
+ : nullptr;
+ if (codec_from_id) {
+ remote_inbound->codec_id = *outbound_rtp.codec_id;
+ const auto& codec = codec_from_id->cast_to<RTCCodecStats>();
+ if (codec.clock_rate.is_defined()) {
+ remote_inbound->jitter =
+ report_block.jitter(*codec.clock_rate).seconds<double>();
+ }
+ }
+ }
+ return remote_inbound;
+}
+
+void ProduceCertificateStatsFromSSLCertificateStats(
+ Timestamp timestamp,
+ const rtc::SSLCertificateStats& certificate_stats,
+ RTCStatsReport* report) {
+ RTCCertificateStats* prev_certificate_stats = nullptr;
+ for (const rtc::SSLCertificateStats* s = &certificate_stats; s;
+ s = s->issuer.get()) {
+ std::string certificate_stats_id =
+ RTCCertificateIDFromFingerprint(s->fingerprint);
+ // It is possible for the same certificate to show up multiple times, e.g.
+ // if local and remote side use the same certificate in a loopback call.
+ // If the report already contains stats for this certificate, skip it.
+ if (report->Get(certificate_stats_id)) {
+ RTC_DCHECK_EQ(s, &certificate_stats);
+ break;
+ }
+ RTCCertificateStats* certificate_stats =
+ new RTCCertificateStats(certificate_stats_id, timestamp);
+ certificate_stats->fingerprint = s->fingerprint;
+ certificate_stats->fingerprint_algorithm = s->fingerprint_algorithm;
+ certificate_stats->base64_certificate = s->base64_certificate;
+ if (prev_certificate_stats)
+ prev_certificate_stats->issuer_certificate_id = certificate_stats->id();
+ report->AddStats(std::unique_ptr<RTCCertificateStats>(certificate_stats));
+ prev_certificate_stats = certificate_stats;
+ }
+}
+
+const std::string& ProduceIceCandidateStats(Timestamp timestamp,
+ const cricket::Candidate& candidate,
+ bool is_local,
+ const std::string& transport_id,
+ RTCStatsReport* report) {
+ std::string id = "I" + candidate.id();
+ const RTCStats* stats = report->Get(id);
+ if (!stats) {
+ std::unique_ptr<RTCIceCandidateStats> candidate_stats;
+ if (is_local) {
+ candidate_stats =
+ std::make_unique<RTCLocalIceCandidateStats>(std::move(id), timestamp);
+ } else {
+ candidate_stats = std::make_unique<RTCRemoteIceCandidateStats>(
+ std::move(id), timestamp);
+ }
+ candidate_stats->transport_id = transport_id;
+ if (is_local) {
+ candidate_stats->network_type =
+ NetworkTypeToStatsType(candidate.network_type());
+ const std::string& candidate_type = candidate.type();
+ const std::string& relay_protocol = candidate.relay_protocol();
+ const std::string& url = candidate.url();
+ if (candidate_type == cricket::RELAY_PORT_TYPE ||
+ (candidate_type == cricket::PRFLX_PORT_TYPE &&
+ !relay_protocol.empty())) {
+ RTC_DCHECK(relay_protocol.compare("udp") == 0 ||
+ relay_protocol.compare("tcp") == 0 ||
+ relay_protocol.compare("tls") == 0);
+ candidate_stats->relay_protocol = relay_protocol;
+ if (!url.empty()) {
+ candidate_stats->url = url;
+ }
+ } else if (candidate_type == cricket::STUN_PORT_TYPE) {
+ if (!url.empty()) {
+ candidate_stats->url = url;
+ }
+ }
+ if (candidate.network_type() == rtc::ADAPTER_TYPE_VPN) {
+ candidate_stats->vpn = true;
+ candidate_stats->network_adapter_type =
+ std::string(NetworkTypeToStatsNetworkAdapterType(
+ candidate.underlying_type_for_vpn()));
+ } else {
+ candidate_stats->vpn = false;
+ candidate_stats->network_adapter_type = std::string(
+ NetworkTypeToStatsNetworkAdapterType(candidate.network_type()));
+ }
+ } else {
+ // We don't expect to know the adapter type of remote candidates.
+ RTC_DCHECK_EQ(rtc::ADAPTER_TYPE_UNKNOWN, candidate.network_type());
+ RTC_DCHECK_EQ(0, candidate.relay_protocol().compare(""));
+ RTC_DCHECK_EQ(rtc::ADAPTER_TYPE_UNKNOWN,
+ candidate.underlying_type_for_vpn());
+ }
+ candidate_stats->ip = candidate.address().ipaddr().ToString();
+ candidate_stats->address = candidate.address().ipaddr().ToString();
+ candidate_stats->port = static_cast<int32_t>(candidate.address().port());
+ candidate_stats->protocol = candidate.protocol();
+ candidate_stats->candidate_type =
+ CandidateTypeToRTCIceCandidateType(candidate.type());
+ candidate_stats->priority = static_cast<int32_t>(candidate.priority());
+ candidate_stats->foundation = candidate.foundation();
+ auto related_address = candidate.related_address();
+ if (related_address.port() != 0) {
+ candidate_stats->related_address = related_address.ipaddr().ToString();
+ candidate_stats->related_port =
+ static_cast<int32_t>(related_address.port());
+ }
+ candidate_stats->username_fragment = candidate.username();
+ if (candidate.protocol() == "tcp") {
+ candidate_stats->tcp_type = candidate.tcptype();
+ }
+
+ stats = candidate_stats.get();
+ report->AddStats(std::move(candidate_stats));
+ }
+ RTC_DCHECK_EQ(stats->type(), is_local ? RTCLocalIceCandidateStats::kType
+ : RTCRemoteIceCandidateStats::kType);
+ return stats->id();
+}
+
+template <typename StatsType>
+void SetAudioProcessingStats(StatsType* stats,
+ const AudioProcessingStats& apm_stats) {
+ if (apm_stats.echo_return_loss.has_value()) {
+ stats->echo_return_loss = *apm_stats.echo_return_loss;
+ }
+ if (apm_stats.echo_return_loss_enhancement.has_value()) {
+ stats->echo_return_loss_enhancement =
+ *apm_stats.echo_return_loss_enhancement;
+ }
+}
+
+} // namespace
+
+rtc::scoped_refptr<RTCStatsReport>
+RTCStatsCollector::CreateReportFilteredBySelector(
+ bool filter_by_sender_selector,
+ rtc::scoped_refptr<const RTCStatsReport> report,
+ rtc::scoped_refptr<RtpSenderInternal> sender_selector,
+ rtc::scoped_refptr<RtpReceiverInternal> receiver_selector) {
+ std::vector<std::string> rtpstream_ids;
+ if (filter_by_sender_selector) {
+ // Filter mode: RTCStatsCollector::RequestInfo::kSenderSelector
+ if (sender_selector) {
+ // Find outbound-rtp(s) of the sender using ssrc lookup.
+ auto encodings = sender_selector->GetParametersInternal().encodings;
+ for (const auto* outbound_rtp :
+ report->GetStatsOfType<RTCOutboundRtpStreamStats>()) {
+ RTC_DCHECK(outbound_rtp->ssrc.is_defined());
+ auto it = std::find_if(encodings.begin(), encodings.end(),
+ [ssrc = *outbound_rtp->ssrc](
+ const RtpEncodingParameters& encoding) {
+ return encoding.ssrc == ssrc;
+ });
+ if (it != encodings.end()) {
+ rtpstream_ids.push_back(outbound_rtp->id());
+ }
+ }
+ }
+ } else {
+ // Filter mode: RTCStatsCollector::RequestInfo::kReceiverSelector
+ if (receiver_selector) {
+ // Find the inbound-rtp of the receiver using ssrc lookup.
+ absl::optional<uint32_t> ssrc;
+ worker_thread_->BlockingCall([&] { ssrc = receiver_selector->ssrc(); });
+ if (ssrc.has_value()) {
+ for (const auto* inbound_rtp :
+ report->GetStatsOfType<RTCInboundRtpStreamStats>()) {
+ RTC_DCHECK(inbound_rtp->ssrc.is_defined());
+ if (*inbound_rtp->ssrc == *ssrc) {
+ rtpstream_ids.push_back(inbound_rtp->id());
+ }
+ }
+ }
+ }
+ }
+ if (rtpstream_ids.empty())
+ return RTCStatsReport::Create(report->timestamp());
+ return TakeReferencedStats(report->Copy(), rtpstream_ids);
+}
+
+RTCStatsCollector::CertificateStatsPair
+RTCStatsCollector::CertificateStatsPair::Copy() const {
+ CertificateStatsPair copy;
+ copy.local = local ? local->Copy() : nullptr;
+ copy.remote = remote ? remote->Copy() : nullptr;
+ return copy;
+}
+
+RTCStatsCollector::RequestInfo::RequestInfo(
+ rtc::scoped_refptr<RTCStatsCollectorCallback> callback)
+ : RequestInfo(FilterMode::kAll, std::move(callback), nullptr, nullptr) {}
+
+RTCStatsCollector::RequestInfo::RequestInfo(
+ rtc::scoped_refptr<RtpSenderInternal> selector,
+ rtc::scoped_refptr<RTCStatsCollectorCallback> callback)
+ : RequestInfo(FilterMode::kSenderSelector,
+ std::move(callback),
+ std::move(selector),
+ nullptr) {}
+
+RTCStatsCollector::RequestInfo::RequestInfo(
+ rtc::scoped_refptr<RtpReceiverInternal> selector,
+ rtc::scoped_refptr<RTCStatsCollectorCallback> callback)
+ : RequestInfo(FilterMode::kReceiverSelector,
+ std::move(callback),
+ nullptr,
+ std::move(selector)) {}
+
+RTCStatsCollector::RequestInfo::RequestInfo(
+ RTCStatsCollector::RequestInfo::FilterMode filter_mode,
+ rtc::scoped_refptr<RTCStatsCollectorCallback> callback,
+ rtc::scoped_refptr<RtpSenderInternal> sender_selector,
+ rtc::scoped_refptr<RtpReceiverInternal> receiver_selector)
+ : filter_mode_(filter_mode),
+ callback_(std::move(callback)),
+ sender_selector_(std::move(sender_selector)),
+ receiver_selector_(std::move(receiver_selector)) {
+ RTC_DCHECK(callback_);
+ RTC_DCHECK(!sender_selector_ || !receiver_selector_);
+}
+
+rtc::scoped_refptr<RTCStatsCollector> RTCStatsCollector::Create(
+ PeerConnectionInternal* pc,
+ int64_t cache_lifetime_us) {
+ return rtc::make_ref_counted<RTCStatsCollector>(pc, cache_lifetime_us);
+}
+
+RTCStatsCollector::RTCStatsCollector(PeerConnectionInternal* pc,
+ int64_t cache_lifetime_us)
+ : pc_(pc),
+ signaling_thread_(pc->signaling_thread()),
+ worker_thread_(pc->worker_thread()),
+ network_thread_(pc->network_thread()),
+ num_pending_partial_reports_(0),
+ partial_report_timestamp_us_(0),
+ network_report_event_(true /* manual_reset */,
+ true /* initially_signaled */),
+ cache_timestamp_us_(0),
+ cache_lifetime_us_(cache_lifetime_us) {
+ RTC_DCHECK(pc_);
+ RTC_DCHECK(signaling_thread_);
+ RTC_DCHECK(worker_thread_);
+ RTC_DCHECK(network_thread_);
+ RTC_DCHECK_GE(cache_lifetime_us_, 0);
+}
+
+RTCStatsCollector::~RTCStatsCollector() {
+ RTC_DCHECK_EQ(num_pending_partial_reports_, 0);
+}
+
+void RTCStatsCollector::GetStatsReport(
+ rtc::scoped_refptr<RTCStatsCollectorCallback> callback) {
+ GetStatsReportInternal(RequestInfo(std::move(callback)));
+}
+
+void RTCStatsCollector::GetStatsReport(
+ rtc::scoped_refptr<RtpSenderInternal> selector,
+ rtc::scoped_refptr<RTCStatsCollectorCallback> callback) {
+ GetStatsReportInternal(RequestInfo(std::move(selector), std::move(callback)));
+}
+
+void RTCStatsCollector::GetStatsReport(
+ rtc::scoped_refptr<RtpReceiverInternal> selector,
+ rtc::scoped_refptr<RTCStatsCollectorCallback> callback) {
+ GetStatsReportInternal(RequestInfo(std::move(selector), std::move(callback)));
+}
+
+void RTCStatsCollector::GetStatsReportInternal(
+ RTCStatsCollector::RequestInfo request) {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ requests_.push_back(std::move(request));
+
+ // "Now" using a monotonically increasing timer.
+ int64_t cache_now_us = rtc::TimeMicros();
+ if (cached_report_ &&
+ cache_now_us - cache_timestamp_us_ <= cache_lifetime_us_) {
+ // We have a fresh cached report to deliver. Deliver asynchronously, since
+ // the caller may not be expecting a synchronous callback, and it avoids
+ // reentrancy problems.
+ signaling_thread_->PostTask(
+ absl::bind_front(&RTCStatsCollector::DeliverCachedReport,
+ rtc::scoped_refptr<RTCStatsCollector>(this),
+ cached_report_, std::move(requests_)));
+ } else if (!num_pending_partial_reports_) {
+ // Only start gathering stats if we're not already gathering stats. In the
+ // case of already gathering stats, `callback_` will be invoked when there
+ // are no more pending partial reports.
+
+ // "Now" using a system clock, relative to the UNIX epoch (Jan 1, 1970,
+ // UTC), in microseconds. The system clock could be modified and is not
+ // necessarily monotonically increasing.
+ Timestamp timestamp = Timestamp::Micros(rtc::TimeUTCMicros());
+
+ num_pending_partial_reports_ = 2;
+ partial_report_timestamp_us_ = cache_now_us;
+
+ // Prepare `transceiver_stats_infos_` and `call_stats_` for use in
+ // `ProducePartialResultsOnNetworkThread` and
+ // `ProducePartialResultsOnSignalingThread`.
+ PrepareTransceiverStatsInfosAndCallStats_s_w_n();
+ // Don't touch `network_report_` on the signaling thread until
+ // ProducePartialResultsOnNetworkThread() has signaled the
+ // `network_report_event_`.
+ network_report_event_.Reset();
+ rtc::scoped_refptr<RTCStatsCollector> collector(this);
+ network_thread_->PostTask([collector,
+ sctp_transport_name = pc_->sctp_transport_name(),
+ timestamp]() mutable {
+ collector->ProducePartialResultsOnNetworkThread(
+ timestamp, std::move(sctp_transport_name));
+ });
+ ProducePartialResultsOnSignalingThread(timestamp);
+ }
+}
+
+void RTCStatsCollector::ClearCachedStatsReport() {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ cached_report_ = nullptr;
+ MutexLock lock(&cached_certificates_mutex_);
+ cached_certificates_by_transport_.clear();
+}
+
+void RTCStatsCollector::WaitForPendingRequest() {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ // If a request is pending, blocks until the `network_report_event_` is
+ // signaled and then delivers the result. Otherwise this is a NO-OP.
+ MergeNetworkReport_s();
+}
+
+void RTCStatsCollector::ProducePartialResultsOnSignalingThread(
+ Timestamp timestamp) {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
+
+ partial_report_ = RTCStatsReport::Create(timestamp);
+
+ ProducePartialResultsOnSignalingThreadImpl(timestamp, partial_report_.get());
+
+ // ProducePartialResultsOnSignalingThread() is running synchronously on the
+ // signaling thread, so it is always the first partial result delivered on the
+ // signaling thread. The request is not complete until MergeNetworkReport_s()
+ // happens; we don't have to do anything here.
+ RTC_DCHECK_GT(num_pending_partial_reports_, 1);
+ --num_pending_partial_reports_;
+}
+
+void RTCStatsCollector::ProducePartialResultsOnSignalingThreadImpl(
+ Timestamp timestamp,
+ RTCStatsReport* partial_report) {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
+
+ ProduceMediaSourceStats_s(timestamp, partial_report);
+ ProducePeerConnectionStats_s(timestamp, partial_report);
+ ProduceAudioPlayoutStats_s(timestamp, partial_report);
+}
+
+void RTCStatsCollector::ProducePartialResultsOnNetworkThread(
+ Timestamp timestamp,
+ absl::optional<std::string> sctp_transport_name) {
+ TRACE_EVENT0("webrtc",
+ "RTCStatsCollector::ProducePartialResultsOnNetworkThread");
+ RTC_DCHECK_RUN_ON(network_thread_);
+ rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
+
+ // Touching `network_report_` on this thread is safe by this method because
+ // `network_report_event_` is reset before this method is invoked.
+ network_report_ = RTCStatsReport::Create(timestamp);
+
+ ProduceDataChannelStats_n(timestamp, network_report_.get());
+
+ std::set<std::string> transport_names;
+ if (sctp_transport_name) {
+ transport_names.emplace(std::move(*sctp_transport_name));
+ }
+
+ for (const auto& info : transceiver_stats_infos_) {
+ if (info.transport_name)
+ transport_names.insert(*info.transport_name);
+ }
+
+ std::map<std::string, cricket::TransportStats> transport_stats_by_name =
+ pc_->GetTransportStatsByNames(transport_names);
+ std::map<std::string, CertificateStatsPair> transport_cert_stats =
+ PrepareTransportCertificateStats_n(transport_stats_by_name);
+
+ ProducePartialResultsOnNetworkThreadImpl(timestamp, transport_stats_by_name,
+ transport_cert_stats,
+ network_report_.get());
+
+ // Signal that it is now safe to touch `network_report_` on the signaling
+ // thread, and post a task to merge it into the final results.
+ network_report_event_.Set();
+ rtc::scoped_refptr<RTCStatsCollector> collector(this);
+ signaling_thread_->PostTask(
+ [collector] { collector->MergeNetworkReport_s(); });
+}
+
+void RTCStatsCollector::ProducePartialResultsOnNetworkThreadImpl(
+ Timestamp timestamp,
+ const std::map<std::string, cricket::TransportStats>&
+ transport_stats_by_name,
+ const std::map<std::string, CertificateStatsPair>& transport_cert_stats,
+ RTCStatsReport* partial_report) {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
+
+ ProduceCertificateStats_n(timestamp, transport_cert_stats, partial_report);
+ ProduceIceCandidateAndPairStats_n(timestamp, transport_stats_by_name,
+ call_stats_, partial_report);
+ ProduceTransportStats_n(timestamp, transport_stats_by_name,
+ transport_cert_stats, partial_report);
+ ProduceRTPStreamStats_n(timestamp, transceiver_stats_infos_, partial_report);
+}
+
+void RTCStatsCollector::MergeNetworkReport_s() {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ // The `network_report_event_` must be signaled for it to be safe to touch
+ // `network_report_`. This is normally not blocking, but if
+ // WaitForPendingRequest() is called while a request is pending, we might have
+ // to wait until the network thread is done touching `network_report_`.
+ network_report_event_.Wait(rtc::Event::kForever);
+ if (!network_report_) {
+ // Normally, MergeNetworkReport_s() is executed because it is posted from
+ // the network thread. But if WaitForPendingRequest() is called while a
+ // request is pending, an early call to MergeNetworkReport_s() is made,
+ // merging the report and setting `network_report_` to null. If so, when the
+ // previously posted MergeNetworkReport_s() is later executed, the report is
+ // already null and nothing needs to be done here.
+ return;
+ }
+ RTC_DCHECK_GT(num_pending_partial_reports_, 0);
+ RTC_DCHECK(partial_report_);
+ partial_report_->TakeMembersFrom(network_report_);
+ network_report_ = nullptr;
+ --num_pending_partial_reports_;
+ // `network_report_` is currently the only partial report collected
+ // asynchronously, so `num_pending_partial_reports_` must now be 0 and we are
+ // ready to deliver the result.
+ RTC_DCHECK_EQ(num_pending_partial_reports_, 0);
+ cache_timestamp_us_ = partial_report_timestamp_us_;
+ cached_report_ = partial_report_;
+ partial_report_ = nullptr;
+ transceiver_stats_infos_.clear();
+ // Trace WebRTC Stats when getStats is called on Javascript.
+ // This allows access to WebRTC stats from trace logs. To enable them,
+ // select the "webrtc_stats" category when recording traces.
+ TRACE_EVENT_INSTANT1("webrtc_stats", "webrtc_stats", "report",
+ cached_report_->ToJson());
+
+ // Deliver report and clear `requests_`.
+ std::vector<RequestInfo> requests;
+ requests.swap(requests_);
+ DeliverCachedReport(cached_report_, std::move(requests));
+}
+
+void RTCStatsCollector::DeliverCachedReport(
+ rtc::scoped_refptr<const RTCStatsReport> cached_report,
+ std::vector<RTCStatsCollector::RequestInfo> requests) {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ RTC_DCHECK(!requests.empty());
+ RTC_DCHECK(cached_report);
+
+ for (const RequestInfo& request : requests) {
+ if (request.filter_mode() == RequestInfo::FilterMode::kAll) {
+ request.callback()->OnStatsDelivered(cached_report);
+ } else {
+ bool filter_by_sender_selector;
+ rtc::scoped_refptr<RtpSenderInternal> sender_selector;
+ rtc::scoped_refptr<RtpReceiverInternal> receiver_selector;
+ if (request.filter_mode() == RequestInfo::FilterMode::kSenderSelector) {
+ filter_by_sender_selector = true;
+ sender_selector = request.sender_selector();
+ } else {
+ RTC_DCHECK(request.filter_mode() ==
+ RequestInfo::FilterMode::kReceiverSelector);
+ filter_by_sender_selector = false;
+ receiver_selector = request.receiver_selector();
+ }
+ request.callback()->OnStatsDelivered(CreateReportFilteredBySelector(
+ filter_by_sender_selector, cached_report, sender_selector,
+ receiver_selector));
+ }
+ }
+}
+
+void RTCStatsCollector::ProduceCertificateStats_n(
+ Timestamp timestamp,
+ const std::map<std::string, CertificateStatsPair>& transport_cert_stats,
+ RTCStatsReport* report) const {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
+
+ for (const auto& transport_cert_stats_pair : transport_cert_stats) {
+ if (transport_cert_stats_pair.second.local) {
+ ProduceCertificateStatsFromSSLCertificateStats(
+ timestamp, *transport_cert_stats_pair.second.local.get(), report);
+ }
+ if (transport_cert_stats_pair.second.remote) {
+ ProduceCertificateStatsFromSSLCertificateStats(
+ timestamp, *transport_cert_stats_pair.second.remote.get(), report);
+ }
+ }
+}
+
+void RTCStatsCollector::ProduceDataChannelStats_n(
+ Timestamp timestamp,
+ RTCStatsReport* report) const {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
+ std::vector<DataChannelStats> data_stats = pc_->GetDataChannelStats();
+ for (const auto& stats : data_stats) {
+ auto data_channel_stats = std::make_unique<RTCDataChannelStats>(
+ "D" + rtc::ToString(stats.internal_id), timestamp);
+ data_channel_stats->label = std::move(stats.label);
+ data_channel_stats->protocol = std::move(stats.protocol);
+ if (stats.id >= 0) {
+ // Do not set this value before the DTLS handshake is finished
+ // and filter out the magic value -1.
+ data_channel_stats->data_channel_identifier = stats.id;
+ }
+ data_channel_stats->state = DataStateToRTCDataChannelState(stats.state);
+ data_channel_stats->messages_sent = stats.messages_sent;
+ data_channel_stats->bytes_sent = stats.bytes_sent;
+ data_channel_stats->messages_received = stats.messages_received;
+ data_channel_stats->bytes_received = stats.bytes_received;
+ report->AddStats(std::move(data_channel_stats));
+ }
+}
+
+void RTCStatsCollector::ProduceIceCandidateAndPairStats_n(
+ Timestamp timestamp,
+ const std::map<std::string, cricket::TransportStats>&
+ transport_stats_by_name,
+ const Call::Stats& call_stats,
+ RTCStatsReport* report) const {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
+
+ for (const auto& entry : transport_stats_by_name) {
+ const std::string& transport_name = entry.first;
+ const cricket::TransportStats& transport_stats = entry.second;
+ for (const auto& channel_stats : transport_stats.channel_stats) {
+ std::string transport_id = RTCTransportStatsIDFromTransportChannel(
+ transport_name, channel_stats.component);
+ for (const auto& info :
+ channel_stats.ice_transport_stats.connection_infos) {
+ auto candidate_pair_stats = std::make_unique<RTCIceCandidatePairStats>(
+ RTCIceCandidatePairStatsIDFromConnectionInfo(info), timestamp);
+
+ candidate_pair_stats->transport_id = transport_id;
+ candidate_pair_stats->local_candidate_id = ProduceIceCandidateStats(
+ timestamp, info.local_candidate, true, transport_id, report);
+ candidate_pair_stats->remote_candidate_id = ProduceIceCandidateStats(
+ timestamp, info.remote_candidate, false, transport_id, report);
+ candidate_pair_stats->state =
+ IceCandidatePairStateToRTCStatsIceCandidatePairState(info.state);
+ candidate_pair_stats->priority = info.priority;
+ candidate_pair_stats->nominated = info.nominated;
+ // TODO(hbos): This writable is different than the spec. It goes to
+ // false after a certain amount of time without a response passes.
+ // https://crbug.com/633550
+ candidate_pair_stats->writable = info.writable;
+ // Note that sent_total_packets includes discarded packets but
+ // sent_total_bytes does not.
+ candidate_pair_stats->packets_sent = static_cast<uint64_t>(
+ info.sent_total_packets - info.sent_discarded_packets);
+ candidate_pair_stats->packets_discarded_on_send =
+ static_cast<uint64_t>(info.sent_discarded_packets);
+ candidate_pair_stats->packets_received =
+ static_cast<uint64_t>(info.packets_received);
+ candidate_pair_stats->bytes_sent =
+ static_cast<uint64_t>(info.sent_total_bytes);
+ candidate_pair_stats->bytes_discarded_on_send =
+ static_cast<uint64_t>(info.sent_discarded_bytes);
+ candidate_pair_stats->bytes_received =
+ static_cast<uint64_t>(info.recv_total_bytes);
+ candidate_pair_stats->total_round_trip_time =
+ static_cast<double>(info.total_round_trip_time_ms) /
+ rtc::kNumMillisecsPerSec;
+ if (info.current_round_trip_time_ms.has_value()) {
+ candidate_pair_stats->current_round_trip_time =
+ static_cast<double>(*info.current_round_trip_time_ms) /
+ rtc::kNumMillisecsPerSec;
+ }
+ if (info.best_connection) {
+ // The bandwidth estimations we have are for the selected candidate
+ // pair ("info.best_connection").
+ RTC_DCHECK_GE(call_stats.send_bandwidth_bps, 0);
+ RTC_DCHECK_GE(call_stats.recv_bandwidth_bps, 0);
+ if (call_stats.send_bandwidth_bps > 0) {
+ candidate_pair_stats->available_outgoing_bitrate =
+ static_cast<double>(call_stats.send_bandwidth_bps);
+ }
+ if (call_stats.recv_bandwidth_bps > 0) {
+ candidate_pair_stats->available_incoming_bitrate =
+ static_cast<double>(call_stats.recv_bandwidth_bps);
+ }
+ }
+ candidate_pair_stats->requests_received =
+ static_cast<uint64_t>(info.recv_ping_requests);
+ candidate_pair_stats->requests_sent =
+ static_cast<uint64_t>(info.sent_ping_requests_total);
+ candidate_pair_stats->responses_received =
+ static_cast<uint64_t>(info.recv_ping_responses);
+ candidate_pair_stats->responses_sent =
+ static_cast<uint64_t>(info.sent_ping_responses);
+ RTC_DCHECK_GE(info.sent_ping_requests_total,
+ info.sent_ping_requests_before_first_response);
+ candidate_pair_stats->consent_requests_sent = static_cast<uint64_t>(
+ info.sent_ping_requests_total -
+ info.sent_ping_requests_before_first_response);
+
+ if (info.last_data_received.has_value()) {
+ candidate_pair_stats->last_packet_received_timestamp =
+ static_cast<double>(info.last_data_received->ms());
+ }
+ if (info.last_data_sent) {
+ candidate_pair_stats->last_packet_sent_timestamp =
+ static_cast<double>(info.last_data_sent->ms());
+ }
+
+ report->AddStats(std::move(candidate_pair_stats));
+ }
+
+ // Produce local candidate stats. If a transport exists these will already
+ // have been produced.
+ for (const auto& candidate_stats :
+ channel_stats.ice_transport_stats.candidate_stats_list) {
+ const auto& candidate = candidate_stats.candidate();
+ ProduceIceCandidateStats(timestamp, candidate, true, transport_id,
+ report);
+ }
+ }
+ }
+}
+
+void RTCStatsCollector::ProduceMediaSourceStats_s(
+ Timestamp timestamp,
+ RTCStatsReport* report) const {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
+
+ for (const RtpTransceiverStatsInfo& transceiver_stats_info :
+ transceiver_stats_infos_) {
+ const auto& track_media_info_map =
+ transceiver_stats_info.track_media_info_map;
+ for (const auto& sender : transceiver_stats_info.transceiver->senders()) {
+ const auto& sender_internal = sender->internal();
+ const auto& track = sender_internal->track();
+ if (!track)
+ continue;
+ // TODO(https://crbug.com/webrtc/10771): The same track could be attached
+ // to multiple senders which should result in multiple senders referencing
+ // the same media-source stats. When all media source related metrics are
+ // moved to the track's source (e.g. input frame rate is moved from
+ // cricket::VideoSenderInfo to VideoTrackSourceInterface::Stats and audio
+ // levels are moved to the corresponding audio track/source object), don't
+ // create separate media source stats objects on a per-attachment basis.
+ std::unique_ptr<RTCMediaSourceStats> media_source_stats;
+ if (track->kind() == MediaStreamTrackInterface::kAudioKind) {
+ AudioTrackInterface* audio_track =
+ static_cast<AudioTrackInterface*>(track.get());
+ auto audio_source_stats = std::make_unique<RTCAudioSourceStats>(
+ RTCMediaSourceStatsIDFromKindAndAttachment(
+ cricket::MEDIA_TYPE_AUDIO, sender_internal->AttachmentId()),
+ timestamp);
+ // TODO(https://crbug.com/webrtc/10771): We shouldn't need to have an
+ // SSRC assigned (there shouldn't need to exist a send-stream, created
+ // by an O/A exchange) in order to read audio media-source stats.
+ // TODO(https://crbug.com/webrtc/8694): SSRC 0 shouldn't be a magic
+ // value indicating no SSRC.
+ if (sender_internal->ssrc() != 0) {
+ auto* voice_sender_info =
+ track_media_info_map.GetVoiceSenderInfoBySsrc(
+ sender_internal->ssrc());
+ if (voice_sender_info) {
+ audio_source_stats->audio_level = DoubleAudioLevelFromIntAudioLevel(
+ voice_sender_info->audio_level);
+ audio_source_stats->total_audio_energy =
+ voice_sender_info->total_input_energy;
+ audio_source_stats->total_samples_duration =
+ voice_sender_info->total_input_duration;
+ SetAudioProcessingStats(audio_source_stats.get(),
+ voice_sender_info->apm_statistics);
+ }
+ }
+ // Audio processor may be attached to either the track or the send
+ // stream, so look in both places.
+ auto audio_processor(audio_track->GetAudioProcessor());
+ if (audio_processor.get()) {
+ // The `has_remote_tracks` argument is obsolete; makes no difference
+ // if it's set to true or false.
+ AudioProcessorInterface::AudioProcessorStatistics ap_stats =
+ audio_processor->GetStats(/*has_remote_tracks=*/false);
+ SetAudioProcessingStats(audio_source_stats.get(),
+ ap_stats.apm_statistics);
+ }
+ media_source_stats = std::move(audio_source_stats);
+ } else {
+ RTC_DCHECK_EQ(MediaStreamTrackInterface::kVideoKind, track->kind());
+ auto video_source_stats = std::make_unique<RTCVideoSourceStats>(
+ RTCMediaSourceStatsIDFromKindAndAttachment(
+ cricket::MEDIA_TYPE_VIDEO, sender_internal->AttachmentId()),
+ timestamp);
+ auto* video_track = static_cast<VideoTrackInterface*>(track.get());
+ auto* video_source = video_track->GetSource();
+ VideoTrackSourceInterface::Stats source_stats;
+ if (video_source && video_source->GetStats(&source_stats)) {
+ video_source_stats->width = source_stats.input_width;
+ video_source_stats->height = source_stats.input_height;
+ }
+ // TODO(https://crbug.com/webrtc/10771): We shouldn't need to have an
+ // SSRC assigned (there shouldn't need to exist a send-stream, created
+ // by an O/A exchange) in order to get framesPerSecond.
+ // TODO(https://crbug.com/webrtc/8694): SSRC 0 shouldn't be a magic
+ // value indicating no SSRC.
+ if (sender_internal->ssrc() != 0) {
+ auto* video_sender_info =
+ track_media_info_map.GetVideoSenderInfoBySsrc(
+ sender_internal->ssrc());
+ if (video_sender_info) {
+ video_source_stats->frames_per_second =
+ video_sender_info->framerate_input;
+ video_source_stats->frames = video_sender_info->frames;
+ }
+ }
+ media_source_stats = std::move(video_source_stats);
+ }
+ media_source_stats->track_identifier = track->id();
+ media_source_stats->kind = track->kind();
+ report->AddStats(std::move(media_source_stats));
+ }
+ }
+}
+
+void RTCStatsCollector::ProducePeerConnectionStats_s(
+ Timestamp timestamp,
+ RTCStatsReport* report) const {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
+
+ auto stats(std::make_unique<RTCPeerConnectionStats>("P", timestamp));
+ stats->data_channels_opened = internal_record_.data_channels_opened;
+ stats->data_channels_closed = internal_record_.data_channels_closed;
+ report->AddStats(std::move(stats));
+}
+
+void RTCStatsCollector::ProduceAudioPlayoutStats_s(
+ Timestamp timestamp,
+ RTCStatsReport* report) const {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
+
+ if (audio_device_stats_) {
+ report->AddStats(CreateAudioPlayoutStats(*audio_device_stats_, timestamp));
+ }
+}
+
+void RTCStatsCollector::ProduceRTPStreamStats_n(
+ Timestamp timestamp,
+ const std::vector<RtpTransceiverStatsInfo>& transceiver_stats_infos,
+ RTCStatsReport* report) const {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
+
+ for (const RtpTransceiverStatsInfo& stats : transceiver_stats_infos) {
+ if (stats.media_type == cricket::MEDIA_TYPE_AUDIO) {
+ ProduceAudioRTPStreamStats_n(timestamp, stats, report);
+ } else if (stats.media_type == cricket::MEDIA_TYPE_VIDEO) {
+ ProduceVideoRTPStreamStats_n(timestamp, stats, report);
+ } else {
+ RTC_DCHECK_NOTREACHED();
+ }
+ }
+}
+
+void RTCStatsCollector::ProduceAudioRTPStreamStats_n(
+ Timestamp timestamp,
+ const RtpTransceiverStatsInfo& stats,
+ RTCStatsReport* report) const {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
+
+ if (!stats.mid || !stats.transport_name) {
+ return;
+ }
+ RTC_DCHECK(stats.track_media_info_map.voice_media_info().has_value());
+ std::string mid = *stats.mid;
+ std::string transport_id = RTCTransportStatsIDFromTransportChannel(
+ *stats.transport_name, cricket::ICE_CANDIDATE_COMPONENT_RTP);
+ // Inbound and remote-outbound.
+ // The remote-outbound stats are based on RTCP sender reports sent from the
+ // remote endpoint providing metrics about the remote outbound streams.
+ for (const cricket::VoiceReceiverInfo& voice_receiver_info :
+ stats.track_media_info_map.voice_media_info()->receivers) {
+ if (!voice_receiver_info.connected())
+ continue;
+ // Inbound.
+ auto inbound_audio = CreateInboundAudioStreamStats(
+ *stats.track_media_info_map.voice_media_info(), voice_receiver_info,
+ transport_id, mid, timestamp, report);
+ // TODO(hta): This lookup should look for the sender, not the track.
+ rtc::scoped_refptr<AudioTrackInterface> audio_track =
+ stats.track_media_info_map.GetAudioTrack(voice_receiver_info);
+ if (audio_track) {
+ inbound_audio->track_identifier = audio_track->id();
+ }
+ if (audio_device_stats_ && stats.media_type == cricket::MEDIA_TYPE_AUDIO &&
+ stats.current_direction &&
+ (*stats.current_direction == RtpTransceiverDirection::kSendRecv ||
+ *stats.current_direction == RtpTransceiverDirection::kRecvOnly)) {
+ inbound_audio->playout_id = kAudioPlayoutSingletonId;
+ }
+ auto* inbound_audio_ptr = report->TryAddStats(std::move(inbound_audio));
+ if (!inbound_audio_ptr) {
+ RTC_LOG(LS_ERROR)
+ << "Unable to add audio 'inbound-rtp' to report, ID is not unique.";
+ continue;
+ }
+ // Remote-outbound.
+ auto remote_outbound_audio = CreateRemoteOutboundAudioStreamStats(
+ voice_receiver_info, mid, *inbound_audio_ptr, transport_id);
+ // Add stats.
+ if (remote_outbound_audio) {
+ // When the remote outbound stats are available, the remote ID for the
+ // local inbound stats is set.
+ auto* remote_outbound_audio_ptr =
+ report->TryAddStats(std::move(remote_outbound_audio));
+ if (remote_outbound_audio_ptr) {
+ inbound_audio_ptr->remote_id = remote_outbound_audio_ptr->id();
+ } else {
+ RTC_LOG(LS_ERROR) << "Unable to add audio 'remote-outbound-rtp' to "
+ << "report, ID is not unique.";
+ }
+ }
+ }
+ // Outbound.
+ std::map<std::string, RTCOutboundRtpStreamStats*> audio_outbound_rtps;
+ for (const cricket::VoiceSenderInfo& voice_sender_info :
+ stats.track_media_info_map.voice_media_info()->senders) {
+ if (!voice_sender_info.connected())
+ continue;
+ auto outbound_audio = CreateOutboundRTPStreamStatsFromVoiceSenderInfo(
+ transport_id, mid, *stats.track_media_info_map.voice_media_info(),
+ voice_sender_info, timestamp, report);
+ rtc::scoped_refptr<AudioTrackInterface> audio_track =
+ stats.track_media_info_map.GetAudioTrack(voice_sender_info);
+ if (audio_track) {
+ int attachment_id =
+ stats.track_media_info_map.GetAttachmentIdByTrack(audio_track.get())
+ .value();
+ outbound_audio->media_source_id =
+ RTCMediaSourceStatsIDFromKindAndAttachment(cricket::MEDIA_TYPE_AUDIO,
+ attachment_id);
+ }
+ auto audio_outbound_pair =
+ std::make_pair(outbound_audio->id(), outbound_audio.get());
+ if (report->TryAddStats(std::move(outbound_audio))) {
+ audio_outbound_rtps.insert(std::move(audio_outbound_pair));
+ } else {
+ RTC_LOG(LS_ERROR)
+ << "Unable to add audio 'outbound-rtp' to report, ID is not unique.";
+ }
+ }
+ // Remote-inbound.
+ // These are Report Block-based, information sent from the remote endpoint,
+ // providing metrics about our Outbound streams. We take advantage of the fact
+ // that RTCOutboundRtpStreamStats, RTCCodecStats and RTCTransport have already
+ // been added to the report.
+ for (const cricket::VoiceSenderInfo& voice_sender_info :
+ stats.track_media_info_map.voice_media_info()->senders) {
+ for (const auto& report_block_data : voice_sender_info.report_block_datas) {
+ report->AddStats(ProduceRemoteInboundRtpStreamStatsFromReportBlockData(
+ transport_id, report_block_data, cricket::MEDIA_TYPE_AUDIO,
+ audio_outbound_rtps, *report));
+ }
+ }
+}
+
+void RTCStatsCollector::ProduceVideoRTPStreamStats_n(
+ Timestamp timestamp,
+ const RtpTransceiverStatsInfo& stats,
+ RTCStatsReport* report) const {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
+
+ if (!stats.mid || !stats.transport_name) {
+ return;
+ }
+ RTC_DCHECK(stats.track_media_info_map.video_media_info().has_value());
+ std::string mid = *stats.mid;
+ std::string transport_id = RTCTransportStatsIDFromTransportChannel(
+ *stats.transport_name, cricket::ICE_CANDIDATE_COMPONENT_RTP);
+ // Inbound
+ for (const cricket::VideoReceiverInfo& video_receiver_info :
+ stats.track_media_info_map.video_media_info()->receivers) {
+ if (!video_receiver_info.connected())
+ continue;
+ auto inbound_video = CreateInboundRTPStreamStatsFromVideoReceiverInfo(
+ transport_id, mid, *stats.track_media_info_map.video_media_info(),
+ video_receiver_info, timestamp, report);
+ rtc::scoped_refptr<VideoTrackInterface> video_track =
+ stats.track_media_info_map.GetVideoTrack(video_receiver_info);
+ if (video_track) {
+ inbound_video->track_identifier = video_track->id();
+ }
+ if (!report->TryAddStats(std::move(inbound_video))) {
+ RTC_LOG(LS_ERROR)
+ << "Unable to add video 'inbound-rtp' to report, ID is not unique.";
+ }
+ }
+ // Outbound
+ std::map<std::string, RTCOutboundRtpStreamStats*> video_outbound_rtps;
+ for (const cricket::VideoSenderInfo& video_sender_info :
+ stats.track_media_info_map.video_media_info()->senders) {
+ if (!video_sender_info.connected())
+ continue;
+ auto outbound_video = CreateOutboundRTPStreamStatsFromVideoSenderInfo(
+ transport_id, mid, *stats.track_media_info_map.video_media_info(),
+ video_sender_info, timestamp, report);
+ rtc::scoped_refptr<VideoTrackInterface> video_track =
+ stats.track_media_info_map.GetVideoTrack(video_sender_info);
+ if (video_track) {
+ int attachment_id =
+ stats.track_media_info_map.GetAttachmentIdByTrack(video_track.get())
+ .value();
+ outbound_video->media_source_id =
+ RTCMediaSourceStatsIDFromKindAndAttachment(cricket::MEDIA_TYPE_VIDEO,
+ attachment_id);
+ }
+ auto video_outbound_pair =
+ std::make_pair(outbound_video->id(), outbound_video.get());
+ if (report->TryAddStats(std::move(outbound_video))) {
+ video_outbound_rtps.insert(std::move(video_outbound_pair));
+ } else {
+ RTC_LOG(LS_ERROR)
+ << "Unable to add video 'outbound-rtp' to report, ID is not unique.";
+ }
+ }
+ // Remote-inbound
+ // These are Report Block-based, information sent from the remote endpoint,
+ // providing metrics about our Outbound streams. We take advantage of the fact
+ // that RTCOutboundRtpStreamStats, RTCCodecStats and RTCTransport have already
+ // been added to the report.
+ for (const cricket::VideoSenderInfo& video_sender_info :
+ stats.track_media_info_map.video_media_info()->senders) {
+ for (const auto& report_block_data : video_sender_info.report_block_datas) {
+ report->AddStats(ProduceRemoteInboundRtpStreamStatsFromReportBlockData(
+ transport_id, report_block_data, cricket::MEDIA_TYPE_VIDEO,
+ video_outbound_rtps, *report));
+ }
+ }
+}
+
+void RTCStatsCollector::ProduceTransportStats_n(
+ Timestamp timestamp,
+ const std::map<std::string, cricket::TransportStats>&
+ transport_stats_by_name,
+ const std::map<std::string, CertificateStatsPair>& transport_cert_stats,
+ RTCStatsReport* report) const {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
+
+ for (const auto& entry : transport_stats_by_name) {
+ const std::string& transport_name = entry.first;
+ const cricket::TransportStats& transport_stats = entry.second;
+
+ // Get reference to RTCP channel, if it exists.
+ std::string rtcp_transport_stats_id;
+ for (const cricket::TransportChannelStats& channel_stats :
+ transport_stats.channel_stats) {
+ if (channel_stats.component == cricket::ICE_CANDIDATE_COMPONENT_RTCP) {
+ rtcp_transport_stats_id = RTCTransportStatsIDFromTransportChannel(
+ transport_name, channel_stats.component);
+ break;
+ }
+ }
+
+ // Get reference to local and remote certificates of this transport, if they
+ // exist.
+ const auto& certificate_stats_it =
+ transport_cert_stats.find(transport_name);
+ std::string local_certificate_id, remote_certificate_id;
+ RTC_DCHECK(certificate_stats_it != transport_cert_stats.cend());
+ if (certificate_stats_it != transport_cert_stats.cend()) {
+ if (certificate_stats_it->second.local) {
+ local_certificate_id = RTCCertificateIDFromFingerprint(
+ certificate_stats_it->second.local->fingerprint);
+ }
+ if (certificate_stats_it->second.remote) {
+ remote_certificate_id = RTCCertificateIDFromFingerprint(
+ certificate_stats_it->second.remote->fingerprint);
+ }
+ }
+
+ // There is one transport stats for each channel.
+ for (const cricket::TransportChannelStats& channel_stats :
+ transport_stats.channel_stats) {
+ auto transport_stats = std::make_unique<RTCTransportStats>(
+ RTCTransportStatsIDFromTransportChannel(transport_name,
+ channel_stats.component),
+ timestamp);
+ transport_stats->packets_sent =
+ channel_stats.ice_transport_stats.packets_sent;
+ transport_stats->packets_received =
+ channel_stats.ice_transport_stats.packets_received;
+ transport_stats->bytes_sent =
+ channel_stats.ice_transport_stats.bytes_sent;
+ transport_stats->bytes_received =
+ channel_stats.ice_transport_stats.bytes_received;
+ transport_stats->dtls_state =
+ DtlsTransportStateToRTCDtlsTransportState(channel_stats.dtls_state);
+ transport_stats->selected_candidate_pair_changes =
+ channel_stats.ice_transport_stats.selected_candidate_pair_changes;
+ transport_stats->ice_role =
+ IceRoleToRTCIceRole(channel_stats.ice_transport_stats.ice_role);
+ transport_stats->ice_local_username_fragment =
+ channel_stats.ice_transport_stats.ice_local_username_fragment;
+ transport_stats->ice_state = IceTransportStateToRTCIceTransportState(
+ channel_stats.ice_transport_stats.ice_state);
+ for (const cricket::ConnectionInfo& info :
+ channel_stats.ice_transport_stats.connection_infos) {
+ if (info.best_connection) {
+ transport_stats->selected_candidate_pair_id =
+ RTCIceCandidatePairStatsIDFromConnectionInfo(info);
+ }
+ }
+ if (channel_stats.component != cricket::ICE_CANDIDATE_COMPONENT_RTCP &&
+ !rtcp_transport_stats_id.empty()) {
+ transport_stats->rtcp_transport_stats_id = rtcp_transport_stats_id;
+ }
+ if (!local_certificate_id.empty())
+ transport_stats->local_certificate_id = local_certificate_id;
+ if (!remote_certificate_id.empty())
+ transport_stats->remote_certificate_id = remote_certificate_id;
+ // Crypto information
+ if (channel_stats.ssl_version_bytes) {
+ char bytes[5];
+ snprintf(bytes, sizeof(bytes), "%04X", channel_stats.ssl_version_bytes);
+ transport_stats->tls_version = bytes;
+ }
+
+ if (channel_stats.dtls_role) {
+ transport_stats->dtls_role =
+ *channel_stats.dtls_role == rtc::SSL_CLIENT ? "client" : "server";
+ } else {
+ transport_stats->dtls_role = "unknown";
+ }
+
+ if (channel_stats.ssl_cipher_suite != rtc::kTlsNullWithNullNull &&
+ rtc::SSLStreamAdapter::SslCipherSuiteToName(
+ channel_stats.ssl_cipher_suite)
+ .length()) {
+ transport_stats->dtls_cipher =
+ rtc::SSLStreamAdapter::SslCipherSuiteToName(
+ channel_stats.ssl_cipher_suite);
+ }
+ if (channel_stats.srtp_crypto_suite != rtc::kSrtpInvalidCryptoSuite &&
+ rtc::SrtpCryptoSuiteToName(channel_stats.srtp_crypto_suite)
+ .length()) {
+ transport_stats->srtp_cipher =
+ rtc::SrtpCryptoSuiteToName(channel_stats.srtp_crypto_suite);
+ }
+ report->AddStats(std::move(transport_stats));
+ }
+ }
+}
+
+std::map<std::string, RTCStatsCollector::CertificateStatsPair>
+RTCStatsCollector::PrepareTransportCertificateStats_n(
+ const std::map<std::string, cricket::TransportStats>&
+ transport_stats_by_name) {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
+
+ std::map<std::string, CertificateStatsPair> transport_cert_stats;
+ {
+ MutexLock lock(&cached_certificates_mutex_);
+ // Copy the certificate info from the cache, avoiding expensive
+ // rtc::SSLCertChain::GetStats() calls.
+ for (const auto& pair : cached_certificates_by_transport_) {
+ transport_cert_stats.insert(
+ std::make_pair(pair.first, pair.second.Copy()));
+ }
+ }
+ if (transport_cert_stats.empty()) {
+ // Collect certificate info.
+ for (const auto& entry : transport_stats_by_name) {
+ const std::string& transport_name = entry.first;
+
+ CertificateStatsPair certificate_stats_pair;
+ rtc::scoped_refptr<rtc::RTCCertificate> local_certificate;
+ if (pc_->GetLocalCertificate(transport_name, &local_certificate)) {
+ certificate_stats_pair.local =
+ local_certificate->GetSSLCertificateChain().GetStats();
+ }
+
+ auto remote_cert_chain = pc_->GetRemoteSSLCertChain(transport_name);
+ if (remote_cert_chain) {
+ certificate_stats_pair.remote = remote_cert_chain->GetStats();
+ }
+
+ transport_cert_stats.insert(
+ std::make_pair(transport_name, std::move(certificate_stats_pair)));
+ }
+ // Copy the result into the certificate cache for future reference.
+ MutexLock lock(&cached_certificates_mutex_);
+ for (const auto& pair : transport_cert_stats) {
+ cached_certificates_by_transport_.insert(
+ std::make_pair(pair.first, pair.second.Copy()));
+ }
+ }
+ return transport_cert_stats;
+}
+
+void RTCStatsCollector::PrepareTransceiverStatsInfosAndCallStats_s_w_n() {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+
+ transceiver_stats_infos_.clear();
+ // These are used to invoke GetStats for all the media channels together in
+ // one worker thread hop.
+ std::map<cricket::VoiceMediaSendChannelInterface*,
+ cricket::VoiceMediaSendInfo>
+ voice_send_stats;
+ std::map<cricket::VideoMediaSendChannelInterface*,
+ cricket::VideoMediaSendInfo>
+ video_send_stats;
+ std::map<cricket::VoiceMediaReceiveChannelInterface*,
+ cricket::VoiceMediaReceiveInfo>
+ voice_receive_stats;
+ std::map<cricket::VideoMediaReceiveChannelInterface*,
+ cricket::VideoMediaReceiveInfo>
+ video_receive_stats;
+
+ auto transceivers = pc_->GetTransceiversInternal();
+
+ // TODO(tommi): See if we can avoid synchronously blocking the signaling
+ // thread while we do this (or avoid the BlockingCall at all).
+ network_thread_->BlockingCall([&] {
+ rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
+
+ for (const auto& transceiver_proxy : transceivers) {
+ RtpTransceiver* transceiver = transceiver_proxy->internal();
+ cricket::MediaType media_type = transceiver->media_type();
+
+ // Prepare stats entry. The TrackMediaInfoMap will be filled in after the
+ // stats have been fetched on the worker thread.
+ transceiver_stats_infos_.emplace_back();
+ RtpTransceiverStatsInfo& stats = transceiver_stats_infos_.back();
+ stats.transceiver = transceiver;
+ stats.media_type = media_type;
+
+ cricket::ChannelInterface* channel = transceiver->channel();
+ if (!channel) {
+ // The remaining fields require a BaseChannel.
+ continue;
+ }
+
+ stats.mid = channel->mid();
+ stats.transport_name = std::string(channel->transport_name());
+
+ if (media_type == cricket::MEDIA_TYPE_AUDIO) {
+ auto voice_send_channel = channel->voice_media_send_channel();
+ RTC_DCHECK(voice_send_stats.find(voice_send_channel) ==
+ voice_send_stats.end());
+ voice_send_stats.insert(
+ std::make_pair(voice_send_channel, cricket::VoiceMediaSendInfo()));
+
+ auto voice_receive_channel = channel->voice_media_receive_channel();
+ RTC_DCHECK(voice_receive_stats.find(voice_receive_channel) ==
+ voice_receive_stats.end());
+ voice_receive_stats.insert(std::make_pair(
+ voice_receive_channel, cricket::VoiceMediaReceiveInfo()));
+ } else if (media_type == cricket::MEDIA_TYPE_VIDEO) {
+ auto video_send_channel = channel->video_media_send_channel();
+ RTC_DCHECK(video_send_stats.find(video_send_channel) ==
+ video_send_stats.end());
+ video_send_stats.insert(
+ std::make_pair(video_send_channel, cricket::VideoMediaSendInfo()));
+ auto video_receive_channel = channel->video_media_receive_channel();
+ RTC_DCHECK(video_receive_stats.find(video_receive_channel) ==
+ video_receive_stats.end());
+ video_receive_stats.insert(std::make_pair(
+ video_receive_channel, cricket::VideoMediaReceiveInfo()));
+ } else {
+ RTC_DCHECK_NOTREACHED();
+ }
+ }
+ });
+
+ // We jump to the worker thread and call GetStats() on each media channel as
+ // well as GetCallStats(). At the same time we construct the
+ // TrackMediaInfoMaps, which also needs info from the worker thread. This
+ // minimizes the number of thread jumps.
+ worker_thread_->BlockingCall([&] {
+ rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
+
+ for (auto& pair : voice_send_stats) {
+ if (!pair.first->GetStats(&pair.second)) {
+ RTC_LOG(LS_WARNING) << "Failed to get voice send stats.";
+ }
+ }
+ for (auto& pair : voice_receive_stats) {
+ if (!pair.first->GetStats(&pair.second,
+ /*get_and_clear_legacy_stats=*/false)) {
+ RTC_LOG(LS_WARNING) << "Failed to get voice receive stats.";
+ }
+ }
+ for (auto& pair : video_send_stats) {
+ if (!pair.first->GetStats(&pair.second)) {
+ RTC_LOG(LS_WARNING) << "Failed to get video send stats.";
+ }
+ }
+ for (auto& pair : video_receive_stats) {
+ if (!pair.first->GetStats(&pair.second)) {
+ RTC_LOG(LS_WARNING) << "Failed to get video receive stats.";
+ }
+ }
+
+ // Create the TrackMediaInfoMap for each transceiver stats object.
+ for (auto& stats : transceiver_stats_infos_) {
+ auto transceiver = stats.transceiver;
+ absl::optional<cricket::VoiceMediaInfo> voice_media_info;
+ absl::optional<cricket::VideoMediaInfo> video_media_info;
+ auto channel = transceiver->channel();
+ if (channel) {
+ cricket::MediaType media_type = transceiver->media_type();
+ if (media_type == cricket::MEDIA_TYPE_AUDIO) {
+ auto voice_send_channel = channel->voice_media_send_channel();
+ auto voice_receive_channel = channel->voice_media_receive_channel();
+ voice_media_info = cricket::VoiceMediaInfo(
+ std::move(voice_send_stats[voice_send_channel]),
+ std::move(voice_receive_stats[voice_receive_channel]));
+ } else if (media_type == cricket::MEDIA_TYPE_VIDEO) {
+ auto video_send_channel = channel->video_media_send_channel();
+ auto video_receive_channel = channel->video_media_receive_channel();
+ video_media_info = cricket::VideoMediaInfo(
+ std::move(video_send_stats[video_send_channel]),
+ std::move(video_receive_stats[video_receive_channel]));
+ }
+ }
+ std::vector<rtc::scoped_refptr<RtpSenderInternal>> senders;
+ for (const auto& sender : transceiver->senders()) {
+ senders.push_back(
+ rtc::scoped_refptr<RtpSenderInternal>(sender->internal()));
+ }
+ std::vector<rtc::scoped_refptr<RtpReceiverInternal>> receivers;
+ for (const auto& receiver : transceiver->receivers()) {
+ receivers.push_back(
+ rtc::scoped_refptr<RtpReceiverInternal>(receiver->internal()));
+ }
+ stats.track_media_info_map.Initialize(std::move(voice_media_info),
+ std::move(video_media_info),
+ senders, receivers);
+ }
+
+ call_stats_ = pc_->GetCallStats();
+ audio_device_stats_ = pc_->GetAudioDeviceStats();
+ });
+
+ for (auto& stats : transceiver_stats_infos_) {
+ stats.current_direction = stats.transceiver->current_direction();
+ }
+}
+
+void RTCStatsCollector::OnSctpDataChannelStateChanged(
+ int channel_id,
+ DataChannelInterface::DataState state) {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ if (state == DataChannelInterface::DataState::kOpen) {
+ bool result =
+ internal_record_.opened_data_channels.insert(channel_id).second;
+ RTC_DCHECK(result);
+ ++internal_record_.data_channels_opened;
+ } else if (state == DataChannelInterface::DataState::kClosed) {
+ // Only channels that have been fully opened (and have increased the
+ // `data_channels_opened_` counter) increase the closed counter.
+ if (internal_record_.opened_data_channels.erase(channel_id)) {
+ ++internal_record_.data_channels_closed;
+ }
+ }
+}
+
+const char* CandidateTypeToRTCIceCandidateTypeForTesting(
+ const std::string& type) {
+ return CandidateTypeToRTCIceCandidateType(type);
+}
+
+const char* DataStateToRTCDataChannelStateForTesting(
+ DataChannelInterface::DataState state) {
+ return DataStateToRTCDataChannelState(state);
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/rtc_stats_collector.h b/third_party/libwebrtc/pc/rtc_stats_collector.h
new file mode 100644
index 0000000000..e94d23944c
--- /dev/null
+++ b/third_party/libwebrtc/pc/rtc_stats_collector.h
@@ -0,0 +1,332 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_RTC_STATS_COLLECTOR_H_
+#define PC_RTC_STATS_COLLECTOR_H_
+
+#include <stdint.h>
+
+#include <cstdint>
+#include <map>
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "api/data_channel_interface.h"
+#include "api/media_types.h"
+#include "api/scoped_refptr.h"
+#include "api/stats/rtc_stats_collector_callback.h"
+#include "api/stats/rtc_stats_report.h"
+#include "api/stats/rtcstats_objects.h"
+#include "call/call.h"
+#include "media/base/media_channel.h"
+#include "modules/audio_device/include/audio_device.h"
+#include "pc/data_channel_utils.h"
+#include "pc/peer_connection_internal.h"
+#include "pc/rtp_receiver.h"
+#include "pc/rtp_sender.h"
+#include "pc/rtp_transceiver.h"
+#include "pc/sctp_data_channel.h"
+#include "pc/track_media_info_map.h"
+#include "pc/transport_stats.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/containers/flat_set.h"
+#include "rtc_base/event.h"
+#include "rtc_base/ref_count.h"
+#include "rtc_base/ssl_certificate.h"
+#include "rtc_base/ssl_identity.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "rtc_base/thread.h"
+#include "rtc_base/time_utils.h"
+
+namespace webrtc {
+
+class RtpSenderInternal;
+class RtpReceiverInternal;
+
+// All public methods of the collector are to be called on the signaling thread.
+// Stats are gathered on the signaling, worker and network threads
+// asynchronously. The callback is invoked on the signaling thread. Resulting
+// reports are cached for `cache_lifetime_` ms.
+class RTCStatsCollector : public rtc::RefCountInterface {
+ public:
+ static rtc::scoped_refptr<RTCStatsCollector> Create(
+ PeerConnectionInternal* pc,
+ int64_t cache_lifetime_us = 50 * rtc::kNumMicrosecsPerMillisec);
+
+ // Gets a recent stats report. If there is a report cached that is still fresh
+ // it is returned, otherwise new stats are gathered and returned. A report is
+ // considered fresh for `cache_lifetime_` ms. const RTCStatsReports are safe
+ // to use across multiple threads and may be destructed on any thread.
+ // If the optional selector argument is used, stats are filtered according to
+ // stats selection algorithm before delivery.
+ // https://w3c.github.io/webrtc-pc/#dfn-stats-selection-algorithm
+ void GetStatsReport(rtc::scoped_refptr<RTCStatsCollectorCallback> callback);
+ // If `selector` is null the selection algorithm is still applied (interpreted
+ // as: no RTP streams are sent by selector). The result is empty.
+ void GetStatsReport(rtc::scoped_refptr<RtpSenderInternal> selector,
+ rtc::scoped_refptr<RTCStatsCollectorCallback> callback);
+ // If `selector` is null the selection algorithm is still applied (interpreted
+ // as: no RTP streams are received by selector). The result is empty.
+ void GetStatsReport(rtc::scoped_refptr<RtpReceiverInternal> selector,
+ rtc::scoped_refptr<RTCStatsCollectorCallback> callback);
+ // Clears the cache's reference to the most recent stats report. Subsequently
+ // calling `GetStatsReport` guarantees fresh stats. This method must be called
+ // any time the PeerConnection visibly changes as a result of an API call as
+ // per
+ // https://w3c.github.io/webrtc-stats/#guidelines-for-getstats-results-caching-throttling
+ // and it must be called any time negotiation happens.
+ void ClearCachedStatsReport();
+
+ // If there is a `GetStatsReport` requests in-flight, waits until it has been
+ // completed. Must be called on the signaling thread.
+ void WaitForPendingRequest();
+
+ // Called by the PeerConnection instance when data channel states change.
+ void OnSctpDataChannelStateChanged(int channel_id,
+ DataChannelInterface::DataState state);
+
+ protected:
+ RTCStatsCollector(PeerConnectionInternal* pc, int64_t cache_lifetime_us);
+ ~RTCStatsCollector();
+
+ struct CertificateStatsPair {
+ std::unique_ptr<rtc::SSLCertificateStats> local;
+ std::unique_ptr<rtc::SSLCertificateStats> remote;
+
+ CertificateStatsPair Copy() const;
+ };
+
+ // Stats gathering on a particular thread. Virtual for the sake of testing.
+ virtual void ProducePartialResultsOnSignalingThreadImpl(
+ Timestamp timestamp,
+ RTCStatsReport* partial_report);
+ virtual void ProducePartialResultsOnNetworkThreadImpl(
+ Timestamp timestamp,
+ const std::map<std::string, cricket::TransportStats>&
+ transport_stats_by_name,
+ const std::map<std::string, CertificateStatsPair>& transport_cert_stats,
+ RTCStatsReport* partial_report);
+
+ private:
+ class RequestInfo {
+ public:
+ enum class FilterMode { kAll, kSenderSelector, kReceiverSelector };
+
+ // Constructs with FilterMode::kAll.
+ explicit RequestInfo(
+ rtc::scoped_refptr<RTCStatsCollectorCallback> callback);
+ // Constructs with FilterMode::kSenderSelector. The selection algorithm is
+ // applied even if `selector` is null, resulting in an empty report.
+ RequestInfo(rtc::scoped_refptr<RtpSenderInternal> selector,
+ rtc::scoped_refptr<RTCStatsCollectorCallback> callback);
+ // Constructs with FilterMode::kReceiverSelector. The selection algorithm is
+ // applied even if `selector` is null, resulting in an empty report.
+ RequestInfo(rtc::scoped_refptr<RtpReceiverInternal> selector,
+ rtc::scoped_refptr<RTCStatsCollectorCallback> callback);
+
+ FilterMode filter_mode() const { return filter_mode_; }
+ rtc::scoped_refptr<RTCStatsCollectorCallback> callback() const {
+ return callback_;
+ }
+ rtc::scoped_refptr<RtpSenderInternal> sender_selector() const {
+ RTC_DCHECK(filter_mode_ == FilterMode::kSenderSelector);
+ return sender_selector_;
+ }
+ rtc::scoped_refptr<RtpReceiverInternal> receiver_selector() const {
+ RTC_DCHECK(filter_mode_ == FilterMode::kReceiverSelector);
+ return receiver_selector_;
+ }
+
+ private:
+ RequestInfo(FilterMode filter_mode,
+ rtc::scoped_refptr<RTCStatsCollectorCallback> callback,
+ rtc::scoped_refptr<RtpSenderInternal> sender_selector,
+ rtc::scoped_refptr<RtpReceiverInternal> receiver_selector);
+
+ FilterMode filter_mode_;
+ rtc::scoped_refptr<RTCStatsCollectorCallback> callback_;
+ rtc::scoped_refptr<RtpSenderInternal> sender_selector_;
+ rtc::scoped_refptr<RtpReceiverInternal> receiver_selector_;
+ };
+
+ void GetStatsReportInternal(RequestInfo request);
+
+ // Structure for tracking stats about each RtpTransceiver managed by the
+ // PeerConnection. This can either by a Plan B style or Unified Plan style
+ // transceiver (i.e., can have 0 or many senders and receivers).
+ // Some fields are copied from the RtpTransceiver/BaseChannel object so that
+ // they can be accessed safely on threads other than the signaling thread.
+ // If a BaseChannel is not available (e.g., if signaling has not started),
+ // then `mid` and `transport_name` will be null.
+ struct RtpTransceiverStatsInfo {
+ rtc::scoped_refptr<RtpTransceiver> transceiver;
+ cricket::MediaType media_type;
+ absl::optional<std::string> mid;
+ absl::optional<std::string> transport_name;
+ TrackMediaInfoMap track_media_info_map;
+ absl::optional<RtpTransceiverDirection> current_direction;
+ };
+
+ void DeliverCachedReport(
+ rtc::scoped_refptr<const RTCStatsReport> cached_report,
+ std::vector<RequestInfo> requests);
+
+ // Produces `RTCCertificateStats`.
+ void ProduceCertificateStats_n(
+ Timestamp timestamp,
+ const std::map<std::string, CertificateStatsPair>& transport_cert_stats,
+ RTCStatsReport* report) const;
+ // Produces `RTCDataChannelStats`.
+ void ProduceDataChannelStats_n(Timestamp timestamp,
+ RTCStatsReport* report) const;
+ // Produces `RTCIceCandidatePairStats` and `RTCIceCandidateStats`.
+ void ProduceIceCandidateAndPairStats_n(
+ Timestamp timestamp,
+ const std::map<std::string, cricket::TransportStats>&
+ transport_stats_by_name,
+ const Call::Stats& call_stats,
+ RTCStatsReport* report) const;
+ // Produces RTCMediaSourceStats, including RTCAudioSourceStats and
+ // RTCVideoSourceStats.
+ void ProduceMediaSourceStats_s(Timestamp timestamp,
+ RTCStatsReport* report) const;
+ // Produces `RTCPeerConnectionStats`.
+ void ProducePeerConnectionStats_s(Timestamp timestamp,
+ RTCStatsReport* report) const;
+ // Produces `RTCAudioPlayoutStats`.
+ void ProduceAudioPlayoutStats_s(Timestamp timestamp,
+ RTCStatsReport* report) const;
+ // Produces `RTCInboundRtpStreamStats`, `RTCOutboundRtpStreamStats`,
+ // `RTCRemoteInboundRtpStreamStats`, `RTCRemoteOutboundRtpStreamStats` and any
+ // referenced `RTCCodecStats`. This has to be invoked after transport stats
+ // have been created because some metrics are calculated through lookup of
+ // other metrics.
+ void ProduceRTPStreamStats_n(
+ Timestamp timestamp,
+ const std::vector<RtpTransceiverStatsInfo>& transceiver_stats_infos,
+ RTCStatsReport* report) const;
+ void ProduceAudioRTPStreamStats_n(Timestamp timestamp,
+ const RtpTransceiverStatsInfo& stats,
+ RTCStatsReport* report) const;
+ void ProduceVideoRTPStreamStats_n(Timestamp timestamp,
+ const RtpTransceiverStatsInfo& stats,
+ RTCStatsReport* report) const;
+ // Produces `RTCTransportStats`.
+ void ProduceTransportStats_n(
+ Timestamp timestamp,
+ const std::map<std::string, cricket::TransportStats>&
+ transport_stats_by_name,
+ const std::map<std::string, CertificateStatsPair>& transport_cert_stats,
+ RTCStatsReport* report) const;
+
+ // Helper function to stats-producing functions.
+ std::map<std::string, CertificateStatsPair>
+ PrepareTransportCertificateStats_n(
+ const std::map<std::string, cricket::TransportStats>&
+ transport_stats_by_name);
+ // The results are stored in `transceiver_stats_infos_` and `call_stats_`.
+ void PrepareTransceiverStatsInfosAndCallStats_s_w_n();
+
+ // Stats gathering on a particular thread.
+ void ProducePartialResultsOnSignalingThread(Timestamp timestamp);
+ void ProducePartialResultsOnNetworkThread(
+ Timestamp timestamp,
+ absl::optional<std::string> sctp_transport_name);
+ // Merges `network_report_` into `partial_report_` and completes the request.
+ // This is a NO-OP if `network_report_` is null.
+ void MergeNetworkReport_s();
+
+ rtc::scoped_refptr<RTCStatsReport> CreateReportFilteredBySelector(
+ bool filter_by_sender_selector,
+ rtc::scoped_refptr<const RTCStatsReport> report,
+ rtc::scoped_refptr<RtpSenderInternal> sender_selector,
+ rtc::scoped_refptr<RtpReceiverInternal> receiver_selector);
+
+ PeerConnectionInternal* const pc_;
+ rtc::Thread* const signaling_thread_;
+ rtc::Thread* const worker_thread_;
+ rtc::Thread* const network_thread_;
+
+ int num_pending_partial_reports_;
+ int64_t partial_report_timestamp_us_;
+ // Reports that are produced on the signaling thread or the network thread are
+ // merged into this report. It is only touched on the signaling thread. Once
+ // all partial reports are merged this is the result of a request.
+ rtc::scoped_refptr<RTCStatsReport> partial_report_;
+ std::vector<RequestInfo> requests_;
+ // Holds the result of ProducePartialResultsOnNetworkThread(). It is merged
+ // into `partial_report_` on the signaling thread and then nulled by
+ // MergeNetworkReport_s(). Thread-safety is ensured by using
+ // `network_report_event_`.
+ rtc::scoped_refptr<RTCStatsReport> network_report_;
+ // If set, it is safe to touch the `network_report_` on the signaling thread.
+ // This is reset before async-invoking ProducePartialResultsOnNetworkThread()
+ // and set when ProducePartialResultsOnNetworkThread() is complete, after it
+ // has updated the value of `network_report_`.
+ rtc::Event network_report_event_;
+
+ // Cleared and set in `PrepareTransceiverStatsInfosAndCallStats_s_w_n`,
+ // starting out on the signaling thread, then network. Later read on the
+ // network and signaling threads as part of collecting stats and finally
+ // reset when the work is done. Initially this variable was added and not
+ // passed around as an arguments to avoid copies. This is thread safe due to
+ // how operations are sequenced and we don't start the stats collection
+ // sequence if one is in progress. As a future improvement though, we could
+ // now get rid of the variable and keep the data scoped within a stats
+ // collection sequence.
+ std::vector<RtpTransceiverStatsInfo> transceiver_stats_infos_;
+ // This cache avoids having to call rtc::SSLCertChain::GetStats(), which can
+ // relatively expensive. ClearCachedStatsReport() needs to be called on
+ // negotiation to ensure the cache is not obsolete.
+ Mutex cached_certificates_mutex_;
+ std::map<std::string, CertificateStatsPair> cached_certificates_by_transport_
+ RTC_GUARDED_BY(cached_certificates_mutex_);
+
+ Call::Stats call_stats_;
+
+ absl::optional<AudioDeviceModule::Stats> audio_device_stats_;
+
+ // A timestamp, in microseconds, that is based on a timer that is
+ // monotonically increasing. That is, even if the system clock is modified the
+ // difference between the timer and this timestamp is how fresh the cached
+ // report is.
+ int64_t cache_timestamp_us_;
+ int64_t cache_lifetime_us_;
+ rtc::scoped_refptr<const RTCStatsReport> cached_report_;
+
+ // Data recorded and maintained by the stats collector during its lifetime.
+ // Some stats are produced from this record instead of other components.
+ struct InternalRecord {
+ InternalRecord() : data_channels_opened(0), data_channels_closed(0) {}
+
+ // The opened count goes up when a channel is fully opened and the closed
+ // count goes up if a previously opened channel has fully closed. The opened
+ // count does not go down when a channel closes, meaning (opened - closed)
+ // is the number of channels currently opened. A channel that is closed
+ // before reaching the open state does not affect these counters.
+ uint32_t data_channels_opened;
+ uint32_t data_channels_closed;
+ // Identifies channels that have been opened, whose internal id is stored in
+ // the set until they have been fully closed.
+ webrtc::flat_set<int> opened_data_channels;
+ };
+ InternalRecord internal_record_;
+};
+
+const char* CandidateTypeToRTCIceCandidateTypeForTesting(
+ const std::string& type);
+const char* DataStateToRTCDataChannelStateForTesting(
+ DataChannelInterface::DataState state);
+
+} // namespace webrtc
+
+#endif // PC_RTC_STATS_COLLECTOR_H_
diff --git a/third_party/libwebrtc/pc/rtc_stats_collector_unittest.cc b/third_party/libwebrtc/pc/rtc_stats_collector_unittest.cc
new file mode 100644
index 0000000000..37821ac829
--- /dev/null
+++ b/third_party/libwebrtc/pc/rtc_stats_collector_unittest.cc
@@ -0,0 +1,3841 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/rtc_stats_collector.h"
+
+#include <stddef.h>
+#include <stdint.h>
+
+#include <algorithm>
+#include <initializer_list>
+#include <memory>
+#include <ostream>
+#include <string>
+#include <type_traits>
+#include <utility>
+#include <vector>
+
+#include "absl/strings/str_replace.h"
+#include "api/candidate.h"
+#include "api/dtls_transport_interface.h"
+#include "api/media_stream_interface.h"
+#include "api/media_stream_track.h"
+#include "api/rtp_parameters.h"
+#include "api/rtp_transceiver_direction.h"
+#include "api/stats/rtc_stats.h"
+#include "api/stats/rtc_stats_report.h"
+#include "api/stats/rtcstats_objects.h"
+#include "api/units/time_delta.h"
+#include "api/units/timestamp.h"
+#include "api/video/recordable_encoded_frame.h"
+#include "api/video/video_content_type.h"
+#include "api/video/video_frame.h"
+#include "api/video/video_sink_interface.h"
+#include "api/video/video_source_interface.h"
+#include "api/video/video_timing.h"
+#include "api/video_codecs/scalability_mode.h"
+#include "common_video/include/quality_limitation_reason.h"
+#include "media/base/media_channel.h"
+#include "modules/audio_device/include/audio_device.h"
+#include "modules/audio_processing/include/audio_processing_statistics.h"
+#include "modules/rtp_rtcp/include/report_block_data.h"
+#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
+#include "p2p/base/connection_info.h"
+#include "p2p/base/ice_transport_internal.h"
+#include "p2p/base/p2p_constants.h"
+#include "p2p/base/port.h"
+#include "pc/media_stream.h"
+#include "pc/stream_collection.h"
+#include "pc/test/fake_data_channel_controller.h"
+#include "pc/test/fake_peer_connection_for_stats.h"
+#include "pc/test/mock_data_channel.h"
+#include "pc/test/mock_rtp_receiver_internal.h"
+#include "pc/test/mock_rtp_sender_internal.h"
+#include "pc/test/rtc_stats_obtainer.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/fake_clock.h"
+#include "rtc_base/fake_ssl_identity.h"
+#include "rtc_base/gunit.h"
+#include "rtc_base/network_constants.h"
+#include "rtc_base/ref_counted_object.h"
+#include "rtc_base/rtc_certificate.h"
+#include "rtc_base/socket_address.h"
+#include "rtc_base/ssl_fingerprint.h"
+#include "rtc_base/ssl_identity.h"
+#include "rtc_base/ssl_stream_adapter.h"
+#include "rtc_base/string_encode.h"
+#include "rtc_base/strings/json.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "rtc_base/time_utils.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+
+using ::testing::_;
+using ::testing::AtLeast;
+using ::testing::Invoke;
+using ::testing::Return;
+
+namespace webrtc {
+
+// These are used by gtest code, such as if `EXPECT_EQ` fails.
+void PrintTo(const RTCCertificateStats& stats, ::std::ostream* os) {
+ *os << stats.ToJson();
+}
+
+void PrintTo(const RTCCodecStats& stats, ::std::ostream* os) {
+ *os << stats.ToJson();
+}
+
+void PrintTo(const RTCDataChannelStats& stats, ::std::ostream* os) {
+ *os << stats.ToJson();
+}
+
+void PrintTo(const RTCIceCandidatePairStats& stats, ::std::ostream* os) {
+ *os << stats.ToJson();
+}
+
+void PrintTo(const RTCLocalIceCandidateStats& stats, ::std::ostream* os) {
+ *os << stats.ToJson();
+}
+
+void PrintTo(const RTCRemoteIceCandidateStats& stats, ::std::ostream* os) {
+ *os << stats.ToJson();
+}
+
+void PrintTo(const RTCPeerConnectionStats& stats, ::std::ostream* os) {
+ *os << stats.ToJson();
+}
+
+void PrintTo(const RTCInboundRtpStreamStats& stats, ::std::ostream* os) {
+ *os << stats.ToJson();
+}
+
+void PrintTo(const RTCOutboundRtpStreamStats& stats, ::std::ostream* os) {
+ *os << stats.ToJson();
+}
+
+void PrintTo(const RTCRemoteInboundRtpStreamStats& stats, ::std::ostream* os) {
+ *os << stats.ToJson();
+}
+
+void PrintTo(const RTCAudioSourceStats& stats, ::std::ostream* os) {
+ *os << stats.ToJson();
+}
+
+void PrintTo(const RTCVideoSourceStats& stats, ::std::ostream* os) {
+ *os << stats.ToJson();
+}
+
+void PrintTo(const RTCTransportStats& stats, ::std::ostream* os) {
+ *os << stats.ToJson();
+}
+
+namespace {
+
+const int64_t kGetStatsReportTimeoutMs = 1000;
+
+// Fake data used by `SetupExampleStatsVoiceGraph()` to fill in remote outbound
+// stats.
+constexpr int64_t kRemoteOutboundStatsTimestampMs = 123;
+constexpr int64_t kRemoteOutboundStatsRemoteTimestampMs = 456;
+constexpr uint32_t kRemoteOutboundStatsPacketsSent = 7u;
+constexpr uint64_t kRemoteOutboundStatsBytesSent = 8u;
+constexpr uint64_t kRemoteOutboundStatsReportsCount = 9u;
+
+struct CertificateInfo {
+ rtc::scoped_refptr<rtc::RTCCertificate> certificate;
+ std::vector<std::string> ders;
+ std::vector<std::string> pems;
+ std::vector<std::string> fingerprints;
+};
+
+// Return the ID for an object of the given type in a report.
+// The object must be present and be unique.
+template <typename T>
+std::string IdForType(const RTCStatsReport* report) {
+ auto stats_of_my_type = report->RTCStatsReport::GetStatsOfType<T>();
+ // We cannot use ASSERT here, since we're within a function.
+ EXPECT_EQ(1U, stats_of_my_type.size())
+ << "Unexpected number of stats of this type";
+ if (stats_of_my_type.size() == 1) {
+ return stats_of_my_type[0]->id();
+ } else {
+ // Return something that is not going to be a valid stas ID.
+ return "Type not found";
+ }
+}
+
+std::unique_ptr<CertificateInfo> CreateFakeCertificateAndInfoFromDers(
+ const std::vector<std::string>& ders) {
+ RTC_CHECK(!ders.empty());
+ std::unique_ptr<CertificateInfo> info(new CertificateInfo());
+ info->ders = ders;
+ for (const std::string& der : ders) {
+ info->pems.push_back(rtc::SSLIdentity::DerToPem(
+ "CERTIFICATE", reinterpret_cast<const unsigned char*>(der.c_str()),
+ der.length()));
+ }
+ info->certificate =
+ rtc::RTCCertificate::Create(std::unique_ptr<rtc::FakeSSLIdentity>(
+ new rtc::FakeSSLIdentity(info->pems)));
+ // Strip header/footer and newline characters of PEM strings.
+ for (size_t i = 0; i < info->pems.size(); ++i) {
+ absl::StrReplaceAll({{"-----BEGIN CERTIFICATE-----", ""},
+ {"-----END CERTIFICATE-----", ""},
+ {"\n", ""}},
+ &info->pems[i]);
+ }
+ // Fingerprints for the whole certificate chain, starting with leaf
+ // certificate.
+ const rtc::SSLCertChain& chain = info->certificate->GetSSLCertificateChain();
+ std::unique_ptr<rtc::SSLFingerprint> fp;
+ for (size_t i = 0; i < chain.GetSize(); i++) {
+ fp = rtc::SSLFingerprint::Create("sha-1", chain.Get(i));
+ EXPECT_TRUE(fp);
+ info->fingerprints.push_back(fp->GetRfc4572Fingerprint());
+ }
+ EXPECT_EQ(info->ders.size(), info->fingerprints.size());
+ return info;
+}
+
+std::unique_ptr<cricket::Candidate> CreateFakeCandidate(
+ const std::string& hostname,
+ int port,
+ const std::string& protocol,
+ const rtc::AdapterType adapter_type,
+ const std::string& candidate_type,
+ uint32_t priority,
+ const rtc::AdapterType underlying_type_for_vpn =
+ rtc::ADAPTER_TYPE_UNKNOWN) {
+ std::unique_ptr<cricket::Candidate> candidate(new cricket::Candidate());
+ candidate->set_address(rtc::SocketAddress(hostname, port));
+ candidate->set_protocol(protocol);
+ candidate->set_network_type(adapter_type);
+ candidate->set_underlying_type_for_vpn(underlying_type_for_vpn);
+ candidate->set_type(candidate_type);
+ candidate->set_priority(priority);
+ // Defaults for testing.
+ candidate->set_foundation("foundationIsAString");
+ candidate->set_username("iceusernamefragment");
+ return candidate;
+}
+
+class FakeAudioProcessor : public AudioProcessorInterface {
+ public:
+ FakeAudioProcessor() {}
+ ~FakeAudioProcessor() {}
+
+ private:
+ AudioProcessorInterface::AudioProcessorStatistics GetStats(
+ bool has_recv_streams) override {
+ AudioProcessorStatistics stats;
+ stats.apm_statistics.echo_return_loss = 2.0;
+ stats.apm_statistics.echo_return_loss_enhancement = 3.0;
+ return stats;
+ }
+};
+
+class FakeAudioTrackForStats : public MediaStreamTrack<AudioTrackInterface> {
+ public:
+ static rtc::scoped_refptr<FakeAudioTrackForStats> Create(
+ const std::string& id,
+ MediaStreamTrackInterface::TrackState state,
+ bool create_fake_audio_processor) {
+ auto audio_track_stats = rtc::make_ref_counted<FakeAudioTrackForStats>(id);
+ audio_track_stats->set_state(state);
+ if (create_fake_audio_processor) {
+ audio_track_stats->processor_ =
+ rtc::make_ref_counted<FakeAudioProcessor>();
+ }
+ return audio_track_stats;
+ }
+
+ explicit FakeAudioTrackForStats(const std::string& id)
+ : MediaStreamTrack<AudioTrackInterface>(id) {}
+
+ std::string kind() const override {
+ return MediaStreamTrackInterface::kAudioKind;
+ }
+ webrtc::AudioSourceInterface* GetSource() const override { return nullptr; }
+ void AddSink(webrtc::AudioTrackSinkInterface* sink) override {}
+ void RemoveSink(webrtc::AudioTrackSinkInterface* sink) override {}
+ bool GetSignalLevel(int* level) override { return false; }
+ rtc::scoped_refptr<AudioProcessorInterface> GetAudioProcessor() override {
+ return processor_;
+ }
+
+ private:
+ rtc::scoped_refptr<FakeAudioProcessor> processor_;
+};
+
+class FakeVideoTrackSourceForStats : public VideoTrackSourceInterface {
+ public:
+ static rtc::scoped_refptr<FakeVideoTrackSourceForStats> Create(
+ int input_width,
+ int input_height) {
+ return rtc::make_ref_counted<FakeVideoTrackSourceForStats>(input_width,
+ input_height);
+ }
+
+ FakeVideoTrackSourceForStats(int input_width, int input_height)
+ : input_width_(input_width), input_height_(input_height) {}
+ ~FakeVideoTrackSourceForStats() override {}
+
+ // VideoTrackSourceInterface
+ bool is_screencast() const override { return false; }
+ absl::optional<bool> needs_denoising() const override { return false; }
+ bool GetStats(VideoTrackSourceInterface::Stats* stats) override {
+ stats->input_width = input_width_;
+ stats->input_height = input_height_;
+ return true;
+ }
+ // MediaSourceInterface (part of VideoTrackSourceInterface)
+ MediaSourceInterface::SourceState state() const override {
+ return MediaSourceInterface::SourceState::kLive;
+ }
+ bool remote() const override { return false; }
+ // NotifierInterface (part of MediaSourceInterface)
+ void RegisterObserver(ObserverInterface* observer) override {}
+ void UnregisterObserver(ObserverInterface* observer) override {}
+ // rtc::VideoSourceInterface<VideoFrame> (part of VideoTrackSourceInterface)
+ void AddOrUpdateSink(rtc::VideoSinkInterface<VideoFrame>* sink,
+ const rtc::VideoSinkWants& wants) override {}
+ void RemoveSink(rtc::VideoSinkInterface<VideoFrame>* sink) override {}
+ bool SupportsEncodedOutput() const override { return false; }
+ void GenerateKeyFrame() override {}
+ void AddEncodedSink(
+ rtc::VideoSinkInterface<RecordableEncodedFrame>* sink) override {}
+ void RemoveEncodedSink(
+ rtc::VideoSinkInterface<RecordableEncodedFrame>* sink) override {}
+
+ private:
+ int input_width_;
+ int input_height_;
+};
+
+class FakeVideoTrackForStats : public MediaStreamTrack<VideoTrackInterface> {
+ public:
+ static rtc::scoped_refptr<FakeVideoTrackForStats> Create(
+ const std::string& id,
+ MediaStreamTrackInterface::TrackState state,
+ rtc::scoped_refptr<VideoTrackSourceInterface> source) {
+ auto video_track =
+ rtc::make_ref_counted<FakeVideoTrackForStats>(id, std::move(source));
+ video_track->set_state(state);
+ return video_track;
+ }
+
+ FakeVideoTrackForStats(const std::string& id,
+ rtc::scoped_refptr<VideoTrackSourceInterface> source)
+ : MediaStreamTrack<VideoTrackInterface>(id), source_(source) {}
+
+ std::string kind() const override {
+ return MediaStreamTrackInterface::kVideoKind;
+ }
+
+ void AddOrUpdateSink(rtc::VideoSinkInterface<VideoFrame>* sink,
+ const rtc::VideoSinkWants& wants) override {}
+ void RemoveSink(rtc::VideoSinkInterface<VideoFrame>* sink) override {}
+
+ VideoTrackSourceInterface* GetSource() const override {
+ return source_.get();
+ }
+
+ private:
+ rtc::scoped_refptr<VideoTrackSourceInterface> source_;
+};
+
+rtc::scoped_refptr<MediaStreamTrackInterface> CreateFakeTrack(
+ cricket::MediaType media_type,
+ const std::string& track_id,
+ MediaStreamTrackInterface::TrackState track_state,
+ bool create_fake_audio_processor = false) {
+ if (media_type == cricket::MEDIA_TYPE_AUDIO) {
+ return FakeAudioTrackForStats::Create(track_id, track_state,
+ create_fake_audio_processor);
+ } else {
+ RTC_DCHECK_EQ(media_type, cricket::MEDIA_TYPE_VIDEO);
+ return FakeVideoTrackForStats::Create(track_id, track_state, nullptr);
+ }
+}
+
+rtc::scoped_refptr<MockRtpSenderInternal> CreateMockSender(
+ cricket::MediaType media_type,
+ rtc::scoped_refptr<MediaStreamTrackInterface> track,
+ uint32_t ssrc,
+ int attachment_id,
+ std::vector<std::string> local_stream_ids) {
+ RTC_DCHECK(!track ||
+ (track->kind() == MediaStreamTrackInterface::kAudioKind &&
+ media_type == cricket::MEDIA_TYPE_AUDIO) ||
+ (track->kind() == MediaStreamTrackInterface::kVideoKind &&
+ media_type == cricket::MEDIA_TYPE_VIDEO));
+ auto sender = rtc::make_ref_counted<MockRtpSenderInternal>();
+ EXPECT_CALL(*sender, track()).WillRepeatedly(Return(track));
+ EXPECT_CALL(*sender, ssrc()).WillRepeatedly(Return(ssrc));
+ EXPECT_CALL(*sender, media_type()).WillRepeatedly(Return(media_type));
+ EXPECT_CALL(*sender, GetParameters())
+ .WillRepeatedly(
+ Invoke([s = sender.get()]() { return s->GetParametersInternal(); }));
+ EXPECT_CALL(*sender, GetParametersInternal()).WillRepeatedly(Invoke([ssrc]() {
+ RtpParameters params;
+ params.encodings.push_back(RtpEncodingParameters());
+ params.encodings[0].ssrc = ssrc;
+ return params;
+ }));
+ EXPECT_CALL(*sender, AttachmentId()).WillRepeatedly(Return(attachment_id));
+ EXPECT_CALL(*sender, stream_ids()).WillRepeatedly(Return(local_stream_ids));
+ EXPECT_CALL(*sender, SetTransceiverAsStopped());
+ return sender;
+}
+
+rtc::scoped_refptr<MockRtpReceiverInternal> CreateMockReceiver(
+ const rtc::scoped_refptr<MediaStreamTrackInterface>& track,
+ uint32_t ssrc,
+ int attachment_id) {
+ auto receiver = rtc::make_ref_counted<MockRtpReceiverInternal>();
+ EXPECT_CALL(*receiver, track()).WillRepeatedly(Return(track));
+ EXPECT_CALL(*receiver, ssrc()).WillRepeatedly(Invoke([ssrc]() {
+ return ssrc;
+ }));
+ EXPECT_CALL(*receiver, streams())
+ .WillRepeatedly(
+ Return(std::vector<rtc::scoped_refptr<MediaStreamInterface>>({})));
+
+ EXPECT_CALL(*receiver, media_type())
+ .WillRepeatedly(
+ Return(track->kind() == MediaStreamTrackInterface::kAudioKind
+ ? cricket::MEDIA_TYPE_AUDIO
+ : cricket::MEDIA_TYPE_VIDEO));
+ EXPECT_CALL(*receiver, GetParameters()).WillRepeatedly(Invoke([ssrc]() {
+ RtpParameters params;
+ params.encodings.push_back(RtpEncodingParameters());
+ params.encodings[0].ssrc = ssrc;
+ return params;
+ }));
+ EXPECT_CALL(*receiver, AttachmentId()).WillRepeatedly(Return(attachment_id));
+ EXPECT_CALL(*receiver, Stop()).WillRepeatedly(Return());
+ return receiver;
+}
+
+class RTCStatsCollectorWrapper {
+ public:
+ explicit RTCStatsCollectorWrapper(
+ rtc::scoped_refptr<FakePeerConnectionForStats> pc)
+ : pc_(pc),
+ stats_collector_(
+ RTCStatsCollector::Create(pc.get(),
+ 50 * rtc::kNumMicrosecsPerMillisec)) {}
+
+ rtc::scoped_refptr<RTCStatsCollector> stats_collector() {
+ return stats_collector_;
+ }
+
+ rtc::scoped_refptr<const RTCStatsReport> GetStatsReport() {
+ rtc::scoped_refptr<RTCStatsObtainer> callback = RTCStatsObtainer::Create();
+ stats_collector_->GetStatsReport(callback);
+ return WaitForReport(callback);
+ }
+
+ rtc::scoped_refptr<const RTCStatsReport> GetStatsReportWithSenderSelector(
+ rtc::scoped_refptr<RtpSenderInternal> selector) {
+ rtc::scoped_refptr<RTCStatsObtainer> callback = RTCStatsObtainer::Create();
+ stats_collector_->GetStatsReport(selector, callback);
+ return WaitForReport(callback);
+ }
+
+ rtc::scoped_refptr<const RTCStatsReport> GetStatsReportWithReceiverSelector(
+ rtc::scoped_refptr<RtpReceiverInternal> selector) {
+ rtc::scoped_refptr<RTCStatsObtainer> callback = RTCStatsObtainer::Create();
+ stats_collector_->GetStatsReport(selector, callback);
+ return WaitForReport(callback);
+ }
+
+ rtc::scoped_refptr<const RTCStatsReport> GetFreshStatsReport() {
+ stats_collector_->ClearCachedStatsReport();
+ return GetStatsReport();
+ }
+
+ rtc::scoped_refptr<MockRtpSenderInternal> SetupLocalTrackAndSender(
+ cricket::MediaType media_type,
+ const std::string& track_id,
+ uint32_t ssrc,
+ bool add_stream,
+ int attachment_id) {
+ rtc::scoped_refptr<MediaStream> local_stream;
+ if (add_stream) {
+ local_stream = MediaStream::Create("LocalStreamId");
+ pc_->mutable_local_streams()->AddStream(local_stream);
+ }
+
+ rtc::scoped_refptr<MediaStreamTrackInterface> track;
+ if (media_type == cricket::MEDIA_TYPE_AUDIO) {
+ track = CreateFakeTrack(media_type, track_id,
+ MediaStreamTrackInterface::kLive);
+ if (add_stream) {
+ local_stream->AddTrack(rtc::scoped_refptr<AudioTrackInterface>(
+ static_cast<AudioTrackInterface*>(track.get())));
+ }
+ } else {
+ track = CreateFakeTrack(media_type, track_id,
+ MediaStreamTrackInterface::kLive);
+ if (add_stream) {
+ local_stream->AddTrack(rtc::scoped_refptr<VideoTrackInterface>(
+ static_cast<VideoTrackInterface*>(track.get())));
+ }
+ }
+
+ rtc::scoped_refptr<MockRtpSenderInternal> sender =
+ CreateMockSender(media_type, track, ssrc, attachment_id, {});
+ EXPECT_CALL(*sender, Stop());
+ EXPECT_CALL(*sender, SetMediaChannel(_));
+ pc_->AddSender(sender);
+ return sender;
+ }
+
+ rtc::scoped_refptr<MockRtpReceiverInternal> SetupRemoteTrackAndReceiver(
+ cricket::MediaType media_type,
+ const std::string& track_id,
+ const std::string& stream_id,
+ uint32_t ssrc) {
+ rtc::scoped_refptr<MediaStream> remote_stream =
+ MediaStream::Create(stream_id);
+ pc_->mutable_remote_streams()->AddStream(remote_stream);
+
+ rtc::scoped_refptr<MediaStreamTrackInterface> track;
+ if (media_type == cricket::MEDIA_TYPE_AUDIO) {
+ track = CreateFakeTrack(media_type, track_id,
+ MediaStreamTrackInterface::kLive);
+ remote_stream->AddTrack(rtc::scoped_refptr<AudioTrackInterface>(
+ static_cast<AudioTrackInterface*>(track.get())));
+ } else {
+ track = CreateFakeTrack(media_type, track_id,
+ MediaStreamTrackInterface::kLive);
+ remote_stream->AddTrack(rtc::scoped_refptr<VideoTrackInterface>(
+ static_cast<VideoTrackInterface*>(track.get())));
+ }
+
+ rtc::scoped_refptr<MockRtpReceiverInternal> receiver =
+ CreateMockReceiver(track, ssrc, 62);
+ EXPECT_CALL(*receiver, streams())
+ .WillRepeatedly(
+ Return(std::vector<rtc::scoped_refptr<MediaStreamInterface>>(
+ {remote_stream})));
+ EXPECT_CALL(*receiver, SetMediaChannel(_)).WillRepeatedly(Return());
+ pc_->AddReceiver(receiver);
+ return receiver;
+ }
+
+ // Attaches tracks to peer connections by configuring RTP senders and RTP
+ // receivers according to the tracks' pairings with
+ // |[Voice/Video][Sender/Receiver]Info| and their SSRCs. Local tracks can be
+ // associated with multiple |[Voice/Video]SenderInfo|s, remote tracks can only
+ // be associated with one |[Voice/Video]ReceiverInfo|.
+ // Senders get assigned attachment ID "ssrc + 10".
+ void CreateMockRtpSendersReceiversAndChannels(
+ std::initializer_list<
+ std::pair<MediaStreamTrackInterface*, cricket::VoiceSenderInfo>>
+ local_audio_track_info_pairs,
+ std::initializer_list<
+ std::pair<MediaStreamTrackInterface*, cricket::VoiceReceiverInfo>>
+ remote_audio_track_info_pairs,
+ std::initializer_list<
+ std::pair<MediaStreamTrackInterface*, cricket::VideoSenderInfo>>
+ local_video_track_info_pairs,
+ std::initializer_list<
+ std::pair<MediaStreamTrackInterface*, cricket::VideoReceiverInfo>>
+ remote_video_track_info_pairs,
+ std::vector<std::string> local_stream_ids,
+ std::vector<rtc::scoped_refptr<MediaStreamInterface>> remote_streams) {
+ cricket::VoiceMediaInfo voice_media_info;
+ cricket::VideoMediaInfo video_media_info;
+
+ // Local audio tracks and voice sender infos
+ for (auto& pair : local_audio_track_info_pairs) {
+ MediaStreamTrackInterface* local_audio_track = pair.first;
+ const cricket::VoiceSenderInfo& voice_sender_info = pair.second;
+ RTC_DCHECK_EQ(local_audio_track->kind(),
+ MediaStreamTrackInterface::kAudioKind);
+
+ voice_media_info.senders.push_back(voice_sender_info);
+ rtc::scoped_refptr<MockRtpSenderInternal> rtp_sender = CreateMockSender(
+ cricket::MEDIA_TYPE_AUDIO,
+ rtc::scoped_refptr<MediaStreamTrackInterface>(local_audio_track),
+ voice_sender_info.local_stats[0].ssrc,
+ voice_sender_info.local_stats[0].ssrc + 10, local_stream_ids);
+ EXPECT_CALL(*rtp_sender, SetMediaChannel(_)).WillRepeatedly(Return());
+ EXPECT_CALL(*rtp_sender, Stop());
+ pc_->AddSender(rtp_sender);
+ }
+
+ // Remote audio tracks and voice receiver infos
+ for (auto& pair : remote_audio_track_info_pairs) {
+ MediaStreamTrackInterface* remote_audio_track = pair.first;
+ const cricket::VoiceReceiverInfo& voice_receiver_info = pair.second;
+ RTC_DCHECK_EQ(remote_audio_track->kind(),
+ MediaStreamTrackInterface::kAudioKind);
+
+ voice_media_info.receivers.push_back(voice_receiver_info);
+ rtc::scoped_refptr<MockRtpReceiverInternal> rtp_receiver =
+ CreateMockReceiver(
+ rtc::scoped_refptr<MediaStreamTrackInterface>(remote_audio_track),
+ voice_receiver_info.local_stats[0].ssrc,
+ voice_receiver_info.local_stats[0].ssrc + 10);
+ EXPECT_CALL(*rtp_receiver, streams())
+ .WillRepeatedly(Return(remote_streams));
+ EXPECT_CALL(*rtp_receiver, SetMediaChannel(_)).WillRepeatedly(Return());
+ pc_->AddReceiver(rtp_receiver);
+ }
+
+ // Local video tracks and video sender infos
+ for (auto& pair : local_video_track_info_pairs) {
+ MediaStreamTrackInterface* local_video_track = pair.first;
+ const cricket::VideoSenderInfo& video_sender_info = pair.second;
+ RTC_DCHECK_EQ(local_video_track->kind(),
+ MediaStreamTrackInterface::kVideoKind);
+
+ video_media_info.senders.push_back(video_sender_info);
+ video_media_info.aggregated_senders.push_back(video_sender_info);
+ rtc::scoped_refptr<MockRtpSenderInternal> rtp_sender = CreateMockSender(
+ cricket::MEDIA_TYPE_VIDEO,
+ rtc::scoped_refptr<MediaStreamTrackInterface>(local_video_track),
+ video_sender_info.local_stats[0].ssrc,
+ video_sender_info.local_stats[0].ssrc + 10, local_stream_ids);
+ EXPECT_CALL(*rtp_sender, SetMediaChannel(_)).WillRepeatedly(Return());
+ EXPECT_CALL(*rtp_sender, Stop());
+ pc_->AddSender(rtp_sender);
+ }
+
+ // Remote video tracks and video receiver infos
+ for (auto& pair : remote_video_track_info_pairs) {
+ MediaStreamTrackInterface* remote_video_track = pair.first;
+ const cricket::VideoReceiverInfo& video_receiver_info = pair.second;
+ RTC_DCHECK_EQ(remote_video_track->kind(),
+ MediaStreamTrackInterface::kVideoKind);
+
+ video_media_info.receivers.push_back(video_receiver_info);
+ rtc::scoped_refptr<MockRtpReceiverInternal> rtp_receiver =
+ CreateMockReceiver(
+ rtc::scoped_refptr<MediaStreamTrackInterface>(remote_video_track),
+ video_receiver_info.local_stats[0].ssrc,
+ video_receiver_info.local_stats[0].ssrc + 10);
+ EXPECT_CALL(*rtp_receiver, streams())
+ .WillRepeatedly(Return(remote_streams));
+ EXPECT_CALL(*rtp_receiver, SetMediaChannel(_)).WillRepeatedly(Return());
+ pc_->AddReceiver(rtp_receiver);
+ }
+
+ pc_->AddVoiceChannel("audio", "transport", voice_media_info);
+ pc_->AddVideoChannel("video", "transport", video_media_info);
+ }
+
+ private:
+ rtc::scoped_refptr<const RTCStatsReport> WaitForReport(
+ rtc::scoped_refptr<RTCStatsObtainer> callback) {
+ EXPECT_TRUE_WAIT(callback->report() != nullptr, kGetStatsReportTimeoutMs);
+ int64_t after = rtc::TimeUTCMicros();
+ for (const RTCStats& stats : *callback->report()) {
+ if (stats.type() == RTCRemoteInboundRtpStreamStats::kType ||
+ stats.type() == RTCRemoteOutboundRtpStreamStats::kType) {
+ // Ignore remote timestamps.
+ continue;
+ }
+ EXPECT_LE(stats.timestamp().us(), after);
+ }
+ return callback->report();
+ }
+
+ rtc::scoped_refptr<FakePeerConnectionForStats> pc_;
+ rtc::scoped_refptr<RTCStatsCollector> stats_collector_;
+};
+
+class RTCStatsCollectorTest : public ::testing::Test {
+ public:
+ RTCStatsCollectorTest()
+ : pc_(rtc::make_ref_counted<FakePeerConnectionForStats>()),
+ stats_(new RTCStatsCollectorWrapper(pc_)),
+ data_channel_controller_(
+ new FakeDataChannelController(pc_->network_thread())) {}
+
+ void ExpectReportContainsCertificateInfo(
+ const rtc::scoped_refptr<const RTCStatsReport>& report,
+ const CertificateInfo& certinfo) {
+ for (size_t i = 0; i < certinfo.fingerprints.size(); ++i) {
+ RTCCertificateStats expected_certificate_stats(
+ "CF" + certinfo.fingerprints[i], report->timestamp());
+ expected_certificate_stats.fingerprint = certinfo.fingerprints[i];
+ expected_certificate_stats.fingerprint_algorithm = "sha-1";
+ expected_certificate_stats.base64_certificate = certinfo.pems[i];
+ if (i + 1 < certinfo.fingerprints.size()) {
+ expected_certificate_stats.issuer_certificate_id =
+ "CF" + certinfo.fingerprints[i + 1];
+ }
+ ASSERT_TRUE(report->Get(expected_certificate_stats.id()));
+ EXPECT_EQ(expected_certificate_stats,
+ report->Get(expected_certificate_stats.id())
+ ->cast_to<RTCCertificateStats>());
+ }
+ }
+
+ const RTCCertificateStats* GetCertificateStatsFromFingerprint(
+ const rtc::scoped_refptr<const RTCStatsReport>& report,
+ const std::string& fingerprint) {
+ auto certificates = report->GetStatsOfType<RTCCertificateStats>();
+ for (const auto* certificate : certificates) {
+ if (*certificate->fingerprint == fingerprint) {
+ return certificate;
+ }
+ }
+ return nullptr;
+ }
+
+ struct ExampleStatsGraph {
+ rtc::scoped_refptr<RtpSenderInternal> sender;
+ rtc::scoped_refptr<RtpReceiverInternal> receiver;
+
+ rtc::scoped_refptr<const RTCStatsReport> full_report;
+ std::string send_codec_id;
+ std::string recv_codec_id;
+ std::string outbound_rtp_id;
+ std::string inbound_rtp_id;
+ std::string remote_outbound_rtp_id;
+ std::string transport_id;
+ std::string peer_connection_id;
+ std::string media_source_id;
+ };
+
+ // Sets up the example stats graph (see ASCII art below) for a video only
+ // call. The graph is used for testing the stats selection algorithm (see
+ // https://w3c.github.io/webrtc-pc/#dfn-stats-selection-algorithm).
+ // These tests test the integration of the stats traversal algorithm inside of
+ // RTCStatsCollector. See rtcstatstraveral_unittest.cc for more stats
+ // traversal tests.
+ ExampleStatsGraph SetupExampleStatsGraphForSelectorTests() {
+ ExampleStatsGraph graph;
+
+ // codec (send)
+ graph.send_codec_id = "COTTransportName1_1";
+ cricket::VideoMediaInfo video_media_info;
+ RtpCodecParameters send_codec;
+ send_codec.payload_type = 1;
+ send_codec.clock_rate = 0;
+ video_media_info.send_codecs.insert(
+ std::make_pair(send_codec.payload_type, send_codec));
+ // codec (recv)
+ graph.recv_codec_id = "CITTransportName1_2";
+ RtpCodecParameters recv_codec;
+ recv_codec.payload_type = 2;
+ recv_codec.clock_rate = 0;
+ video_media_info.receive_codecs.insert(
+ std::make_pair(recv_codec.payload_type, recv_codec));
+ // outbound-rtp
+ graph.outbound_rtp_id = "OTTransportName1V3";
+ video_media_info.senders.push_back(cricket::VideoSenderInfo());
+ video_media_info.senders[0].local_stats.push_back(
+ cricket::SsrcSenderInfo());
+ video_media_info.senders[0].local_stats[0].ssrc = 3;
+ video_media_info.senders[0].codec_payload_type = send_codec.payload_type;
+ video_media_info.aggregated_senders.push_back(video_media_info.senders[0]);
+ // inbound-rtp
+ graph.inbound_rtp_id = "ITTransportName1V4";
+ video_media_info.receivers.push_back(cricket::VideoReceiverInfo());
+ video_media_info.receivers[0].local_stats.push_back(
+ cricket::SsrcReceiverInfo());
+ video_media_info.receivers[0].local_stats[0].ssrc = 4;
+ video_media_info.receivers[0].codec_payload_type = recv_codec.payload_type;
+ // transport
+ graph.transport_id = "TTransportName1";
+ pc_->AddVideoChannel("VideoMid", "TransportName", video_media_info);
+ // outbound-rtp's sender
+ graph.sender = stats_->SetupLocalTrackAndSender(
+ cricket::MEDIA_TYPE_VIDEO, "LocalVideoTrackID", 3, false, 50);
+ // inbound-rtp's receiver
+ graph.receiver = stats_->SetupRemoteTrackAndReceiver(
+ cricket::MEDIA_TYPE_VIDEO, "RemoteVideoTrackID", "RemoteStreamId", 4);
+ // peer-connection
+ graph.peer_connection_id = "P";
+ // media-source (kind: video)
+ graph.media_source_id = "SV" + rtc::ToString(graph.sender->AttachmentId());
+
+ // Expected stats graph:
+ //
+ // media-source peer-connection
+ // ^
+ // |
+ // +--------- outbound-rtp inbound-rtp
+ // | | | |
+ // v v v v
+ // codec (send) transport codec (recv)
+
+ // Verify the stats graph is set up correctly.
+ graph.full_report = stats_->GetStatsReport();
+ EXPECT_EQ(graph.full_report->size(), 7u);
+ EXPECT_TRUE(graph.full_report->Get(graph.send_codec_id));
+ EXPECT_TRUE(graph.full_report->Get(graph.recv_codec_id));
+ EXPECT_TRUE(graph.full_report->Get(graph.outbound_rtp_id));
+ EXPECT_TRUE(graph.full_report->Get(graph.inbound_rtp_id));
+ EXPECT_TRUE(graph.full_report->Get(graph.transport_id));
+ EXPECT_TRUE(graph.full_report->Get(graph.peer_connection_id));
+ EXPECT_TRUE(graph.full_report->Get(graph.media_source_id));
+ const auto& outbound_rtp = graph.full_report->Get(graph.outbound_rtp_id)
+ ->cast_to<RTCOutboundRtpStreamStats>();
+ EXPECT_EQ(*outbound_rtp.media_source_id, graph.media_source_id);
+ EXPECT_EQ(*outbound_rtp.codec_id, graph.send_codec_id);
+ EXPECT_EQ(*outbound_rtp.transport_id, graph.transport_id);
+ EXPECT_TRUE(graph.full_report->Get(graph.inbound_rtp_id));
+ // We can't use an ASSERT in a function returning non-void, so just return.
+ if (!graph.full_report->Get(graph.inbound_rtp_id)) {
+ return graph;
+ }
+ const auto& inbound_rtp = graph.full_report->Get(graph.inbound_rtp_id)
+ ->cast_to<RTCInboundRtpStreamStats>();
+ EXPECT_EQ(*inbound_rtp.codec_id, graph.recv_codec_id);
+ EXPECT_EQ(*inbound_rtp.transport_id, graph.transport_id);
+
+ return graph;
+ }
+
+ // Sets up an example stats graph (see ASCII art below) for an audio only call
+ // and checks that the expected stats are generated.
+ ExampleStatsGraph SetupExampleStatsVoiceGraph(
+ bool add_remote_outbound_stats) {
+ constexpr uint32_t kLocalSsrc = 3;
+ constexpr uint32_t kRemoteSsrc = 4;
+ ExampleStatsGraph graph;
+
+ // codec (send)
+ graph.send_codec_id = "COTTransportName1_1";
+ cricket::VoiceMediaInfo media_info;
+ RtpCodecParameters send_codec;
+ send_codec.payload_type = 1;
+ send_codec.clock_rate = 0;
+ media_info.send_codecs.insert(
+ std::make_pair(send_codec.payload_type, send_codec));
+ // codec (recv)
+ graph.recv_codec_id = "CITTransportName1_2";
+ RtpCodecParameters recv_codec;
+ recv_codec.payload_type = 2;
+ recv_codec.clock_rate = 0;
+ media_info.receive_codecs.insert(
+ std::make_pair(recv_codec.payload_type, recv_codec));
+ // outbound-rtp
+ graph.outbound_rtp_id = "OTTransportName1A3";
+ media_info.senders.push_back(cricket::VoiceSenderInfo());
+ media_info.senders[0].local_stats.push_back(cricket::SsrcSenderInfo());
+ media_info.senders[0].local_stats[0].ssrc = kLocalSsrc;
+ media_info.senders[0].codec_payload_type = send_codec.payload_type;
+ // inbound-rtp
+ graph.inbound_rtp_id = "ITTransportName1A4";
+ media_info.receivers.push_back(cricket::VoiceReceiverInfo());
+ media_info.receivers[0].local_stats.push_back(cricket::SsrcReceiverInfo());
+ media_info.receivers[0].local_stats[0].ssrc = kRemoteSsrc;
+ media_info.receivers[0].codec_payload_type = recv_codec.payload_type;
+ // remote-outbound-rtp
+ if (add_remote_outbound_stats) {
+ graph.remote_outbound_rtp_id = "ROA4";
+ media_info.receivers[0].last_sender_report_timestamp_ms =
+ kRemoteOutboundStatsTimestampMs;
+ media_info.receivers[0].last_sender_report_remote_timestamp_ms =
+ kRemoteOutboundStatsRemoteTimestampMs;
+ media_info.receivers[0].sender_reports_packets_sent =
+ kRemoteOutboundStatsPacketsSent;
+ media_info.receivers[0].sender_reports_bytes_sent =
+ kRemoteOutboundStatsBytesSent;
+ media_info.receivers[0].sender_reports_reports_count =
+ kRemoteOutboundStatsReportsCount;
+ }
+ // transport
+ graph.transport_id = "TTransportName1";
+ pc_->AddVoiceChannel("VoiceMid", "TransportName", media_info);
+ // outbound-rtp's sender
+ graph.sender = stats_->SetupLocalTrackAndSender(
+ cricket::MEDIA_TYPE_AUDIO, "LocalAudioTrackID", kLocalSsrc, false, 50);
+ // inbound-rtp's receiver
+ graph.receiver = stats_->SetupRemoteTrackAndReceiver(
+ cricket::MEDIA_TYPE_AUDIO, "RemoteAudioTrackID", "RemoteStreamId",
+ kRemoteSsrc);
+ // peer-connection
+ graph.peer_connection_id = "P";
+ // media-source (kind: video)
+ graph.media_source_id = "SA" + rtc::ToString(graph.sender->AttachmentId());
+
+ // Expected stats graph:
+ //
+ // media-source peer-connection
+ // ^
+ // |
+ // +--------- outbound-rtp inbound-rtp
+ // | | | |
+ // v v v v
+ // codec (send) transport codec (recv)
+
+ // Verify the stats graph is set up correctly.
+ graph.full_report = stats_->GetStatsReport();
+ EXPECT_EQ(graph.full_report->size(), add_remote_outbound_stats ? 8u : 7u);
+ EXPECT_TRUE(graph.full_report->Get(graph.send_codec_id));
+ EXPECT_TRUE(graph.full_report->Get(graph.recv_codec_id));
+ EXPECT_TRUE(graph.full_report->Get(graph.outbound_rtp_id));
+ EXPECT_TRUE(graph.full_report->Get(graph.inbound_rtp_id));
+ EXPECT_TRUE(graph.full_report->Get(graph.transport_id));
+ EXPECT_TRUE(graph.full_report->Get(graph.peer_connection_id));
+ EXPECT_TRUE(graph.full_report->Get(graph.media_source_id));
+ // `graph.remote_outbound_rtp_id` is omitted on purpose so that expectations
+ // can be added by the caller depending on what value it sets for the
+ // `add_remote_outbound_stats` argument.
+ const auto& outbound_rtp = graph.full_report->Get(graph.outbound_rtp_id)
+ ->cast_to<RTCOutboundRtpStreamStats>();
+ EXPECT_EQ(*outbound_rtp.media_source_id, graph.media_source_id);
+ EXPECT_EQ(*outbound_rtp.codec_id, graph.send_codec_id);
+ EXPECT_EQ(*outbound_rtp.transport_id, graph.transport_id);
+ EXPECT_TRUE(graph.full_report->Get(graph.inbound_rtp_id));
+ // We can't use ASSERT in a function with a return value.
+ if (!graph.full_report->Get(graph.inbound_rtp_id)) {
+ return graph;
+ }
+ const auto& inbound_rtp = graph.full_report->Get(graph.inbound_rtp_id)
+ ->cast_to<RTCInboundRtpStreamStats>();
+ EXPECT_EQ(*inbound_rtp.codec_id, graph.recv_codec_id);
+ EXPECT_EQ(*inbound_rtp.transport_id, graph.transport_id);
+
+ return graph;
+ }
+
+ protected:
+ rtc::ScopedFakeClock fake_clock_;
+ rtc::AutoThread main_thread_;
+ rtc::scoped_refptr<FakePeerConnectionForStats> pc_;
+ std::unique_ptr<RTCStatsCollectorWrapper> stats_;
+ std::unique_ptr<FakeDataChannelController> data_channel_controller_;
+};
+
+TEST_F(RTCStatsCollectorTest, SingleCallback) {
+ rtc::scoped_refptr<const RTCStatsReport> result;
+ stats_->stats_collector()->GetStatsReport(RTCStatsObtainer::Create(&result));
+ EXPECT_TRUE_WAIT(result != nullptr, kGetStatsReportTimeoutMs);
+}
+
+TEST_F(RTCStatsCollectorTest, MultipleCallbacks) {
+ rtc::scoped_refptr<const RTCStatsReport> a, b, c;
+ stats_->stats_collector()->GetStatsReport(RTCStatsObtainer::Create(&a));
+ stats_->stats_collector()->GetStatsReport(RTCStatsObtainer::Create(&b));
+ stats_->stats_collector()->GetStatsReport(RTCStatsObtainer::Create(&c));
+ EXPECT_TRUE_WAIT(a != nullptr, kGetStatsReportTimeoutMs);
+ EXPECT_TRUE_WAIT(b != nullptr, kGetStatsReportTimeoutMs);
+ EXPECT_TRUE_WAIT(c != nullptr, kGetStatsReportTimeoutMs);
+
+ EXPECT_EQ(a.get(), b.get());
+ EXPECT_EQ(b.get(), c.get());
+}
+
+TEST_F(RTCStatsCollectorTest, CachedStatsReports) {
+ // Caching should ensure `a` and `b` are the same report.
+ rtc::scoped_refptr<const RTCStatsReport> a = stats_->GetStatsReport();
+ rtc::scoped_refptr<const RTCStatsReport> b = stats_->GetStatsReport();
+ EXPECT_EQ(a.get(), b.get());
+ // Invalidate cache by clearing it.
+ stats_->stats_collector()->ClearCachedStatsReport();
+ rtc::scoped_refptr<const RTCStatsReport> c = stats_->GetStatsReport();
+ EXPECT_NE(b.get(), c.get());
+ // Invalidate cache by advancing time.
+ fake_clock_.AdvanceTime(TimeDelta::Millis(51));
+ rtc::scoped_refptr<const RTCStatsReport> d = stats_->GetStatsReport();
+ EXPECT_TRUE(d);
+ EXPECT_NE(c.get(), d.get());
+}
+
+TEST_F(RTCStatsCollectorTest, MultipleCallbacksWithInvalidatedCacheInBetween) {
+ rtc::scoped_refptr<const RTCStatsReport> a, b, c;
+ stats_->stats_collector()->GetStatsReport(RTCStatsObtainer::Create(&a));
+ stats_->stats_collector()->GetStatsReport(RTCStatsObtainer::Create(&b));
+ // Cache is invalidated after 50 ms.
+ fake_clock_.AdvanceTime(TimeDelta::Millis(51));
+ stats_->stats_collector()->GetStatsReport(RTCStatsObtainer::Create(&c));
+ EXPECT_TRUE_WAIT(a != nullptr, kGetStatsReportTimeoutMs);
+ EXPECT_TRUE_WAIT(b != nullptr, kGetStatsReportTimeoutMs);
+ EXPECT_TRUE_WAIT(c != nullptr, kGetStatsReportTimeoutMs);
+ EXPECT_EQ(a.get(), b.get());
+ // The act of doing `AdvanceTime` processes all messages. If this was not the
+ // case we might not require `c` to be fresher than `b`.
+ EXPECT_NE(c.get(), b.get());
+}
+
+TEST_F(RTCStatsCollectorTest, ToJsonProducesParseableJson) {
+ ExampleStatsGraph graph = SetupExampleStatsGraphForSelectorTests();
+ rtc::scoped_refptr<const RTCStatsReport> report = stats_->GetStatsReport();
+ std::string json_format = report->ToJson();
+
+ Json::CharReaderBuilder builder;
+ Json::Value json_value;
+ std::unique_ptr<Json::CharReader> reader(builder.newCharReader());
+ ASSERT_TRUE(reader->parse(json_format.c_str(),
+ json_format.c_str() + json_format.size(),
+ &json_value, nullptr));
+
+ // A very brief sanity check on the result.
+ EXPECT_EQ(report->size(), json_value.size());
+}
+
+TEST_F(RTCStatsCollectorTest, CollectRTCCertificateStatsSingle) {
+ const char kTransportName[] = "transport";
+
+ pc_->AddVoiceChannel("audio", kTransportName);
+
+ std::unique_ptr<CertificateInfo> local_certinfo =
+ CreateFakeCertificateAndInfoFromDers(
+ std::vector<std::string>({"(local) single certificate"}));
+ pc_->SetLocalCertificate(kTransportName, local_certinfo->certificate);
+
+ std::unique_ptr<CertificateInfo> remote_certinfo =
+ CreateFakeCertificateAndInfoFromDers(
+ std::vector<std::string>({"(remote) single certificate"}));
+ pc_->SetRemoteCertChain(
+ kTransportName,
+ remote_certinfo->certificate->GetSSLCertificateChain().Clone());
+
+ rtc::scoped_refptr<const RTCStatsReport> report = stats_->GetStatsReport();
+
+ ExpectReportContainsCertificateInfo(report, *local_certinfo);
+ ExpectReportContainsCertificateInfo(report, *remote_certinfo);
+}
+
+// These SSRC collisions are legal.
+TEST_F(RTCStatsCollectorTest, ValidSsrcCollisionDoesNotCrash) {
+ // BUNDLE audio/video inbound/outbound. Unique SSRCs needed within the BUNDLE.
+ cricket::VoiceMediaInfo mid1_info;
+ mid1_info.receivers.emplace_back();
+ mid1_info.receivers[0].add_ssrc(1);
+ mid1_info.senders.emplace_back();
+ mid1_info.senders[0].add_ssrc(2);
+ pc_->AddVoiceChannel("Mid1", "Transport1", mid1_info);
+ cricket::VideoMediaInfo mid2_info;
+ mid2_info.receivers.emplace_back();
+ mid2_info.receivers[0].add_ssrc(3);
+ mid2_info.senders.emplace_back();
+ mid2_info.senders[0].add_ssrc(4);
+ pc_->AddVideoChannel("Mid2", "Transport1", mid2_info);
+ // Now create a second BUNDLE group with SSRCs colliding with the first group
+ // (but again no collisions within the group).
+ cricket::VoiceMediaInfo mid3_info;
+ mid3_info.receivers.emplace_back();
+ mid3_info.receivers[0].add_ssrc(1);
+ mid3_info.senders.emplace_back();
+ mid3_info.senders[0].add_ssrc(2);
+ pc_->AddVoiceChannel("Mid3", "Transport2", mid3_info);
+ cricket::VideoMediaInfo mid4_info;
+ mid4_info.receivers.emplace_back();
+ mid4_info.receivers[0].add_ssrc(3);
+ mid4_info.senders.emplace_back();
+ mid4_info.senders[0].add_ssrc(4);
+ pc_->AddVideoChannel("Mid4", "Transport2", mid4_info);
+
+ // This should not crash (https://crbug.com/1361612).
+ rtc::scoped_refptr<const RTCStatsReport> report = stats_->GetStatsReport();
+ auto inbound_rtps = report->GetStatsOfType<RTCInboundRtpStreamStats>();
+ auto outbound_rtps = report->GetStatsOfType<RTCOutboundRtpStreamStats>();
+ EXPECT_EQ(inbound_rtps.size(), 4u);
+ EXPECT_EQ(outbound_rtps.size(), 4u);
+}
+
+// These SSRC collisions are illegal, so it is not clear if this setup can
+// happen even when talking to a malicious endpoint, but simulate illegal SSRC
+// collisions just to make sure we don't crash in even the most extreme cases.
+TEST_F(RTCStatsCollectorTest, InvalidSsrcCollisionDoesNotCrash) {
+ // One SSRC to rule them all.
+ cricket::VoiceMediaInfo mid1_info;
+ mid1_info.receivers.emplace_back();
+ mid1_info.receivers[0].add_ssrc(1);
+ mid1_info.senders.emplace_back();
+ mid1_info.senders[0].add_ssrc(1);
+ pc_->AddVoiceChannel("Mid1", "BundledTransport", mid1_info);
+ cricket::VideoMediaInfo mid2_info;
+ mid2_info.receivers.emplace_back();
+ mid2_info.receivers[0].add_ssrc(1);
+ mid2_info.senders.emplace_back();
+ mid2_info.senders[0].add_ssrc(1);
+ pc_->AddVideoChannel("Mid2", "BundledTransport", mid2_info);
+ cricket::VoiceMediaInfo mid3_info;
+ mid3_info.receivers.emplace_back();
+ mid3_info.receivers[0].add_ssrc(1);
+ mid3_info.senders.emplace_back();
+ mid3_info.senders[0].add_ssrc(1);
+ pc_->AddVoiceChannel("Mid3", "BundledTransport", mid3_info);
+ cricket::VideoMediaInfo mid4_info;
+ mid4_info.receivers.emplace_back();
+ mid4_info.receivers[0].add_ssrc(1);
+ mid4_info.senders.emplace_back();
+ mid4_info.senders[0].add_ssrc(1);
+ pc_->AddVideoChannel("Mid4", "BundledTransport", mid4_info);
+
+ // This should not crash (https://crbug.com/1361612).
+ stats_->GetStatsReport();
+ // Because this setup is illegal, there is no "right answer" to how the report
+ // should look. We only care about not crashing.
+}
+
+TEST_F(RTCStatsCollectorTest, CollectRTCCodecStatsOnlyIfReferenced) {
+ // Audio
+ cricket::VoiceMediaInfo voice_media_info;
+
+ RtpCodecParameters inbound_audio_codec;
+ inbound_audio_codec.payload_type = 1;
+ inbound_audio_codec.kind = cricket::MEDIA_TYPE_AUDIO;
+ inbound_audio_codec.name = "opus";
+ inbound_audio_codec.clock_rate = 1337;
+ inbound_audio_codec.num_channels = 1;
+ inbound_audio_codec.parameters = {{"minptime", "10"}, {"useinbandfec", "1"}};
+ voice_media_info.receive_codecs.insert(
+ std::make_pair(inbound_audio_codec.payload_type, inbound_audio_codec));
+
+ RtpCodecParameters outbound_audio_codec;
+ outbound_audio_codec.payload_type = 2;
+ outbound_audio_codec.kind = cricket::MEDIA_TYPE_AUDIO;
+ outbound_audio_codec.name = "isac";
+ outbound_audio_codec.clock_rate = 1338;
+ outbound_audio_codec.num_channels = 2;
+ voice_media_info.send_codecs.insert(
+ std::make_pair(outbound_audio_codec.payload_type, outbound_audio_codec));
+
+ // Video
+ cricket::VideoMediaInfo video_media_info;
+
+ RtpCodecParameters inbound_video_codec;
+ inbound_video_codec.payload_type = 3;
+ inbound_video_codec.kind = cricket::MEDIA_TYPE_VIDEO;
+ inbound_video_codec.name = "H264";
+ inbound_video_codec.clock_rate = 1339;
+ inbound_video_codec.parameters = {{"level-asymmetry-allowed", "1"},
+ {"packetization-mode", "1"},
+ {"profile-level-id", "42001f"}};
+ video_media_info.receive_codecs.insert(
+ std::make_pair(inbound_video_codec.payload_type, inbound_video_codec));
+
+ RtpCodecParameters outbound_video_codec;
+ outbound_video_codec.payload_type = 4;
+ outbound_video_codec.kind = cricket::MEDIA_TYPE_VIDEO;
+ outbound_video_codec.name = "VP8";
+ outbound_video_codec.clock_rate = 1340;
+ video_media_info.send_codecs.insert(
+ std::make_pair(outbound_video_codec.payload_type, outbound_video_codec));
+
+ // Ensure the above codecs are referenced.
+ cricket::VoiceReceiverInfo inbound_audio_info;
+ inbound_audio_info.add_ssrc(10);
+ inbound_audio_info.codec_payload_type = 1;
+ voice_media_info.receivers.push_back(inbound_audio_info);
+
+ cricket::VoiceSenderInfo outbound_audio_info;
+ outbound_audio_info.add_ssrc(20);
+ outbound_audio_info.codec_payload_type = 2;
+ voice_media_info.senders.push_back(outbound_audio_info);
+
+ cricket::VideoReceiverInfo inbound_video_info;
+ inbound_video_info.add_ssrc(30);
+ inbound_video_info.codec_payload_type = 3;
+ video_media_info.receivers.push_back(inbound_video_info);
+
+ cricket::VideoSenderInfo outbound_video_info;
+ outbound_video_info.add_ssrc(40);
+ outbound_video_info.codec_payload_type = 4;
+ video_media_info.senders.push_back(outbound_video_info);
+
+ auto audio_channels =
+ pc_->AddVoiceChannel("AudioMid", "TransportName", voice_media_info);
+ auto video_channels =
+ pc_->AddVideoChannel("VideoMid", "TransportName", video_media_info);
+
+ rtc::scoped_refptr<const RTCStatsReport> report = stats_->GetStatsReport();
+
+ RTCCodecStats expected_inbound_audio_codec(
+ "CITTransportName1_1_minptime=10;useinbandfec=1", report->timestamp());
+ expected_inbound_audio_codec.payload_type = 1;
+ expected_inbound_audio_codec.mime_type = "audio/opus";
+ expected_inbound_audio_codec.clock_rate = 1337;
+ expected_inbound_audio_codec.channels = 1;
+ expected_inbound_audio_codec.sdp_fmtp_line = "minptime=10;useinbandfec=1";
+ expected_inbound_audio_codec.transport_id = "TTransportName1";
+
+ RTCCodecStats expected_outbound_audio_codec("COTTransportName1_2",
+ report->timestamp());
+ expected_outbound_audio_codec.payload_type = 2;
+ expected_outbound_audio_codec.mime_type = "audio/isac";
+ expected_outbound_audio_codec.clock_rate = 1338;
+ expected_outbound_audio_codec.channels = 2;
+ expected_outbound_audio_codec.transport_id = "TTransportName1";
+
+ RTCCodecStats expected_inbound_video_codec(
+ "CITTransportName1_3_level-asymmetry-allowed=1;"
+ "packetization-mode=1;profile-level-id=42001f",
+ report->timestamp());
+ expected_inbound_video_codec.payload_type = 3;
+ expected_inbound_video_codec.mime_type = "video/H264";
+ expected_inbound_video_codec.clock_rate = 1339;
+ expected_inbound_video_codec.sdp_fmtp_line =
+ "level-asymmetry-allowed=1;packetization-mode=1;profile-level-id=42001f";
+ expected_inbound_video_codec.transport_id = "TTransportName1";
+
+ RTCCodecStats expected_outbound_video_codec("COTTransportName1_4",
+ report->timestamp());
+ expected_outbound_video_codec.payload_type = 4;
+ expected_outbound_video_codec.mime_type = "video/VP8";
+ expected_outbound_video_codec.clock_rate = 1340;
+ expected_outbound_video_codec.transport_id = "TTransportName1";
+
+ ASSERT_TRUE(report->Get(expected_inbound_audio_codec.id()));
+ EXPECT_EQ(
+ expected_inbound_audio_codec,
+ report->Get(expected_inbound_audio_codec.id())->cast_to<RTCCodecStats>());
+
+ ASSERT_TRUE(report->Get(expected_outbound_audio_codec.id()));
+ EXPECT_EQ(expected_outbound_audio_codec,
+ report->Get(expected_outbound_audio_codec.id())
+ ->cast_to<RTCCodecStats>());
+
+ ASSERT_TRUE(report->Get(expected_inbound_video_codec.id()));
+ EXPECT_EQ(
+ expected_inbound_video_codec,
+ report->Get(expected_inbound_video_codec.id())->cast_to<RTCCodecStats>());
+
+ ASSERT_TRUE(report->Get(expected_outbound_video_codec.id()));
+ EXPECT_EQ(expected_outbound_video_codec,
+ report->Get(expected_outbound_video_codec.id())
+ ->cast_to<RTCCodecStats>());
+
+ // Now remove all the RTP streams such that there are no live codecId
+ // references to the codecs, this should result in none of the RTCCodecStats
+ // being exposed, despite `send_codecs` and `receive_codecs` still being set.
+ voice_media_info.senders.clear();
+ voice_media_info.receivers.clear();
+ audio_channels.first->SetStats(voice_media_info);
+ audio_channels.second->SetStats(voice_media_info);
+ video_media_info.senders.clear();
+ video_media_info.receivers.clear();
+ video_channels.first->SetStats(video_media_info);
+ video_channels.second->SetStats(video_media_info);
+ stats_->stats_collector()->ClearCachedStatsReport();
+ report = stats_->GetStatsReport();
+ EXPECT_FALSE(report->Get(expected_inbound_audio_codec.id()));
+ EXPECT_FALSE(report->Get(expected_outbound_audio_codec.id()));
+ EXPECT_FALSE(report->Get(expected_inbound_video_codec.id()));
+ EXPECT_FALSE(report->Get(expected_outbound_video_codec.id()));
+}
+
+TEST_F(RTCStatsCollectorTest, CodecStatsAreCollectedPerTransport) {
+ // PT=10
+ RtpCodecParameters outbound_codec_pt10;
+ outbound_codec_pt10.payload_type = 10;
+ outbound_codec_pt10.kind = cricket::MEDIA_TYPE_VIDEO;
+ outbound_codec_pt10.name = "VP8";
+ outbound_codec_pt10.clock_rate = 9000;
+
+ // PT=11
+ RtpCodecParameters outbound_codec_pt11;
+ outbound_codec_pt11.payload_type = 11;
+ outbound_codec_pt11.kind = cricket::MEDIA_TYPE_VIDEO;
+ outbound_codec_pt11.name = "VP8";
+ outbound_codec_pt11.clock_rate = 9000;
+
+ // Insert codecs into `send_codecs` and ensure the PTs are referenced by RTP
+ // streams.
+ cricket::VideoMediaInfo info_pt10;
+ info_pt10.send_codecs.insert(
+ std::make_pair(outbound_codec_pt10.payload_type, outbound_codec_pt10));
+ info_pt10.senders.emplace_back();
+ info_pt10.senders[0].add_ssrc(42);
+ info_pt10.senders[0].codec_payload_type = outbound_codec_pt10.payload_type;
+
+ cricket::VideoMediaInfo info_pt11;
+ info_pt11.send_codecs.insert(
+ std::make_pair(outbound_codec_pt11.payload_type, outbound_codec_pt11));
+ info_pt11.senders.emplace_back();
+ info_pt11.senders[0].add_ssrc(43);
+ info_pt11.senders[0].codec_payload_type = outbound_codec_pt11.payload_type;
+
+ cricket::VideoMediaInfo info_pt10_pt11;
+ info_pt10_pt11.send_codecs.insert(
+ std::make_pair(outbound_codec_pt10.payload_type, outbound_codec_pt10));
+ info_pt10_pt11.send_codecs.insert(
+ std::make_pair(outbound_codec_pt11.payload_type, outbound_codec_pt11));
+ info_pt10_pt11.senders.emplace_back();
+ info_pt10_pt11.senders[0].add_ssrc(44);
+ info_pt10_pt11.senders[0].codec_payload_type =
+ outbound_codec_pt10.payload_type;
+ info_pt10_pt11.senders.emplace_back();
+ info_pt10_pt11.senders[1].add_ssrc(45);
+ info_pt10_pt11.senders[1].codec_payload_type =
+ outbound_codec_pt11.payload_type;
+
+ // First two mids contain subsets, the third one contains all PTs.
+ pc_->AddVideoChannel("Mid1", "FirstTransport", info_pt10);
+ pc_->AddVideoChannel("Mid2", "FirstTransport", info_pt11);
+ pc_->AddVideoChannel("Mid3", "FirstTransport", info_pt10_pt11);
+
+ // There should be no duplicate codecs because all codec references are on the
+ // same transport.
+ rtc::scoped_refptr<const RTCStatsReport> report = stats_->GetStatsReport();
+ auto codec_stats = report->GetStatsOfType<RTCCodecStats>();
+ EXPECT_EQ(codec_stats.size(), 2u);
+
+ // If a second transport is added with the same PT information, this does
+ // count as different codec objects.
+ pc_->AddVideoChannel("Mid4", "SecondTransport", info_pt10_pt11);
+ stats_->stats_collector()->ClearCachedStatsReport();
+ report = stats_->GetStatsReport();
+ codec_stats = report->GetStatsOfType<RTCCodecStats>();
+ EXPECT_EQ(codec_stats.size(), 4u);
+}
+
+TEST_F(RTCStatsCollectorTest, SamePayloadTypeButDifferentFmtpLines) {
+ // PT=111, useinbandfec=0
+ RtpCodecParameters inbound_codec_pt111_nofec;
+ inbound_codec_pt111_nofec.payload_type = 111;
+ inbound_codec_pt111_nofec.kind = cricket::MEDIA_TYPE_AUDIO;
+ inbound_codec_pt111_nofec.name = "opus";
+ inbound_codec_pt111_nofec.clock_rate = 48000;
+ inbound_codec_pt111_nofec.parameters.insert(
+ std::make_pair("useinbandfec", "0"));
+
+ // PT=111, useinbandfec=1
+ RtpCodecParameters inbound_codec_pt111_fec;
+ inbound_codec_pt111_fec.payload_type = 111;
+ inbound_codec_pt111_fec.kind = cricket::MEDIA_TYPE_AUDIO;
+ inbound_codec_pt111_fec.name = "opus";
+ inbound_codec_pt111_fec.clock_rate = 48000;
+ inbound_codec_pt111_fec.parameters.insert(
+ std::make_pair("useinbandfec", "1"));
+
+ cricket::VideoMediaInfo info_nofec;
+ info_nofec.receive_codecs.insert(std::make_pair(
+ inbound_codec_pt111_nofec.payload_type, inbound_codec_pt111_nofec));
+ info_nofec.receivers.emplace_back();
+ info_nofec.receivers[0].add_ssrc(123);
+ info_nofec.receivers[0].codec_payload_type =
+ inbound_codec_pt111_nofec.payload_type;
+ cricket::VideoMediaInfo info_fec;
+ info_fec.receive_codecs.insert(std::make_pair(
+ inbound_codec_pt111_fec.payload_type, inbound_codec_pt111_fec));
+ info_fec.receivers.emplace_back();
+ info_fec.receivers[0].add_ssrc(321);
+ info_fec.receivers[0].codec_payload_type =
+ inbound_codec_pt111_fec.payload_type;
+
+ // First two mids contain subsets, the third one contains all PTs.
+ pc_->AddVideoChannel("Mid1", "BundledTransport", info_nofec);
+ pc_->AddVideoChannel("Mid2", "BundledTransport", info_fec);
+
+ // Despite having the same PT we should see two codec stats because their FMTP
+ // lines are different.
+ rtc::scoped_refptr<const RTCStatsReport> report = stats_->GetStatsReport();
+ auto codec_stats = report->GetStatsOfType<RTCCodecStats>();
+ EXPECT_EQ(codec_stats.size(), 2u);
+
+ // Ensure SSRC uniqueness before the next AddVideoChannel() call. SSRCs need
+ // to be unique on different m= sections when using BUNDLE.
+ info_nofec.receivers[0].local_stats[0].ssrc = 12;
+ info_fec.receivers[0].local_stats[0].ssrc = 21;
+ // Adding more m= sections that does have the same FMTP lines does not result
+ // in duplicates.
+ pc_->AddVideoChannel("Mid3", "BundledTransport", info_nofec);
+ pc_->AddVideoChannel("Mid4", "BundledTransport", info_fec);
+ stats_->stats_collector()->ClearCachedStatsReport();
+ report = stats_->GetStatsReport();
+ codec_stats = report->GetStatsOfType<RTCCodecStats>();
+ EXPECT_EQ(codec_stats.size(), 2u);
+
+ // Same FMTP line but a different PT does count as a new codec.
+ // PT=112, useinbandfec=1
+ RtpCodecParameters inbound_codec_pt112_fec;
+ inbound_codec_pt112_fec.payload_type = 112;
+ inbound_codec_pt112_fec.kind = cricket::MEDIA_TYPE_AUDIO;
+ inbound_codec_pt112_fec.name = "opus";
+ inbound_codec_pt112_fec.clock_rate = 48000;
+ inbound_codec_pt112_fec.parameters.insert(
+ std::make_pair("useinbandfec", "1"));
+ cricket::VideoMediaInfo info_fec_pt112;
+ info_fec_pt112.receive_codecs.insert(std::make_pair(
+ inbound_codec_pt112_fec.payload_type, inbound_codec_pt112_fec));
+ info_fec_pt112.receivers.emplace_back();
+ info_fec_pt112.receivers[0].add_ssrc(112);
+ info_fec_pt112.receivers[0].codec_payload_type =
+ inbound_codec_pt112_fec.payload_type;
+ pc_->AddVideoChannel("Mid5", "BundledTransport", info_fec_pt112);
+ stats_->stats_collector()->ClearCachedStatsReport();
+ report = stats_->GetStatsReport();
+ codec_stats = report->GetStatsOfType<RTCCodecStats>();
+ EXPECT_EQ(codec_stats.size(), 3u);
+}
+
+TEST_F(RTCStatsCollectorTest, CollectRTCCertificateStatsMultiple) {
+ const char kAudioTransport[] = "audio";
+ const char kVideoTransport[] = "video";
+
+ pc_->AddVoiceChannel("audio", kAudioTransport);
+
+ std::unique_ptr<CertificateInfo> audio_local_certinfo =
+ CreateFakeCertificateAndInfoFromDers(
+ std::vector<std::string>({"(local) audio"}));
+ pc_->SetLocalCertificate(kAudioTransport, audio_local_certinfo->certificate);
+ std::unique_ptr<CertificateInfo> audio_remote_certinfo =
+ CreateFakeCertificateAndInfoFromDers(
+ std::vector<std::string>({"(remote) audio"}));
+ pc_->SetRemoteCertChain(
+ kAudioTransport,
+ audio_remote_certinfo->certificate->GetSSLCertificateChain().Clone());
+
+ pc_->AddVideoChannel("video", kVideoTransport);
+ std::unique_ptr<CertificateInfo> video_local_certinfo =
+ CreateFakeCertificateAndInfoFromDers(
+ std::vector<std::string>({"(local) video"}));
+ pc_->SetLocalCertificate(kVideoTransport, video_local_certinfo->certificate);
+ std::unique_ptr<CertificateInfo> video_remote_certinfo =
+ CreateFakeCertificateAndInfoFromDers(
+ std::vector<std::string>({"(remote) video"}));
+ pc_->SetRemoteCertChain(
+ kVideoTransport,
+ video_remote_certinfo->certificate->GetSSLCertificateChain().Clone());
+
+ rtc::scoped_refptr<const RTCStatsReport> report = stats_->GetStatsReport();
+ ExpectReportContainsCertificateInfo(report, *audio_local_certinfo);
+ ExpectReportContainsCertificateInfo(report, *audio_remote_certinfo);
+ ExpectReportContainsCertificateInfo(report, *video_local_certinfo);
+ ExpectReportContainsCertificateInfo(report, *video_remote_certinfo);
+}
+
+TEST_F(RTCStatsCollectorTest, CollectRTCCertificateStatsChain) {
+ const char kTransportName[] = "transport";
+
+ pc_->AddVoiceChannel("audio", kTransportName);
+
+ std::unique_ptr<CertificateInfo> local_certinfo =
+ CreateFakeCertificateAndInfoFromDers(
+ {"(local) this", "(local) is", "(local) a", "(local) chain"});
+ pc_->SetLocalCertificate(kTransportName, local_certinfo->certificate);
+
+ std::unique_ptr<CertificateInfo> remote_certinfo =
+ CreateFakeCertificateAndInfoFromDers({"(remote) this", "(remote) is",
+ "(remote) another",
+ "(remote) chain"});
+ pc_->SetRemoteCertChain(
+ kTransportName,
+ remote_certinfo->certificate->GetSSLCertificateChain().Clone());
+
+ rtc::scoped_refptr<const RTCStatsReport> report = stats_->GetStatsReport();
+ ExpectReportContainsCertificateInfo(report, *local_certinfo);
+ ExpectReportContainsCertificateInfo(report, *remote_certinfo);
+}
+
+TEST_F(RTCStatsCollectorTest, CertificateStatsCache) {
+ const char kTransportName[] = "transport";
+ rtc::ScopedFakeClock fake_clock;
+
+ pc_->AddVoiceChannel("audio", kTransportName);
+
+ // Set local and remote cerificates.
+ std::unique_ptr<CertificateInfo> initial_local_certinfo =
+ CreateFakeCertificateAndInfoFromDers({"LocalCertA", "LocalCertB"});
+ pc_->SetLocalCertificate(kTransportName, initial_local_certinfo->certificate);
+ std::unique_ptr<CertificateInfo> initial_remote_certinfo =
+ CreateFakeCertificateAndInfoFromDers({"RemoteCertA", "RemoteCertB"});
+ pc_->SetRemoteCertChain(
+ kTransportName,
+ initial_remote_certinfo->certificate->GetSSLCertificateChain().Clone());
+ ASSERT_EQ(initial_local_certinfo->fingerprints.size(), 2u);
+ ASSERT_EQ(initial_remote_certinfo->fingerprints.size(), 2u);
+
+ rtc::scoped_refptr<const RTCStatsReport> first_report =
+ stats_->GetStatsReport();
+ const auto* first_local_cert0 = GetCertificateStatsFromFingerprint(
+ first_report, initial_local_certinfo->fingerprints[0]);
+ const auto* first_local_cert1 = GetCertificateStatsFromFingerprint(
+ first_report, initial_local_certinfo->fingerprints[1]);
+ const auto* first_remote_cert0 = GetCertificateStatsFromFingerprint(
+ first_report, initial_remote_certinfo->fingerprints[0]);
+ const auto* first_remote_cert1 = GetCertificateStatsFromFingerprint(
+ first_report, initial_remote_certinfo->fingerprints[1]);
+ ASSERT_TRUE(first_local_cert0);
+ ASSERT_TRUE(first_local_cert1);
+ ASSERT_TRUE(first_remote_cert0);
+ ASSERT_TRUE(first_remote_cert1);
+ EXPECT_EQ(first_local_cert0->timestamp().us(), rtc::TimeMicros());
+ EXPECT_EQ(first_local_cert1->timestamp().us(), rtc::TimeMicros());
+ EXPECT_EQ(first_remote_cert0->timestamp().us(), rtc::TimeMicros());
+ EXPECT_EQ(first_remote_cert1->timestamp().us(), rtc::TimeMicros());
+
+ // Replace all certificates.
+ std::unique_ptr<CertificateInfo> updated_local_certinfo =
+ CreateFakeCertificateAndInfoFromDers(
+ {"UpdatedLocalCertA", "UpdatedLocalCertB"});
+ pc_->SetLocalCertificate(kTransportName, updated_local_certinfo->certificate);
+ std::unique_ptr<CertificateInfo> updated_remote_certinfo =
+ CreateFakeCertificateAndInfoFromDers(
+ {"UpdatedRemoteCertA", "UpdatedRemoteCertB"});
+ pc_->SetRemoteCertChain(
+ kTransportName,
+ updated_remote_certinfo->certificate->GetSSLCertificateChain().Clone());
+ // This test assumes fingerprints are different for the old and new
+ // certificates.
+ EXPECT_NE(initial_local_certinfo->fingerprints,
+ updated_local_certinfo->fingerprints);
+ EXPECT_NE(initial_remote_certinfo->fingerprints,
+ updated_remote_certinfo->fingerprints);
+
+ // Advance time to ensure a fresh stats report, but don't clear the
+ // certificate stats cache.
+ fake_clock.AdvanceTime(TimeDelta::Seconds(1));
+ rtc::scoped_refptr<const RTCStatsReport> second_report =
+ stats_->GetStatsReport();
+ // We expect to see the same certificates as before due to not clearing the
+ // certificate cache.
+ const auto* second_local_cert0 =
+ second_report->GetAs<RTCCertificateStats>(first_local_cert0->id());
+ const auto* second_local_cert1 =
+ second_report->GetAs<RTCCertificateStats>(first_local_cert1->id());
+ const auto* second_remote_cert0 =
+ second_report->GetAs<RTCCertificateStats>(first_remote_cert0->id());
+ const auto* second_remote_cert1 =
+ second_report->GetAs<RTCCertificateStats>(first_remote_cert1->id());
+ ASSERT_TRUE(second_local_cert0);
+ ASSERT_TRUE(second_local_cert1);
+ ASSERT_TRUE(second_remote_cert0);
+ ASSERT_TRUE(second_remote_cert1);
+ // The information in the certificate stats are obsolete.
+ EXPECT_EQ(*second_local_cert0->fingerprint,
+ initial_local_certinfo->fingerprints[0]);
+ EXPECT_EQ(*second_local_cert1->fingerprint,
+ initial_local_certinfo->fingerprints[1]);
+ EXPECT_EQ(*second_remote_cert0->fingerprint,
+ initial_remote_certinfo->fingerprints[0]);
+ EXPECT_EQ(*second_remote_cert1->fingerprint,
+ initial_remote_certinfo->fingerprints[1]);
+ // But timestamps are up-to-date, because this is a fresh stats report.
+ EXPECT_EQ(second_local_cert0->timestamp().us(), rtc::TimeMicros());
+ EXPECT_EQ(second_local_cert1->timestamp().us(), rtc::TimeMicros());
+ EXPECT_EQ(second_remote_cert0->timestamp().us(), rtc::TimeMicros());
+ EXPECT_EQ(second_remote_cert1->timestamp().us(), rtc::TimeMicros());
+ // The updated certificates are not part of the report yet.
+ EXPECT_FALSE(GetCertificateStatsFromFingerprint(
+ second_report, updated_local_certinfo->fingerprints[0]));
+ EXPECT_FALSE(GetCertificateStatsFromFingerprint(
+ second_report, updated_local_certinfo->fingerprints[1]));
+ EXPECT_FALSE(GetCertificateStatsFromFingerprint(
+ second_report, updated_remote_certinfo->fingerprints[0]));
+ EXPECT_FALSE(GetCertificateStatsFromFingerprint(
+ second_report, updated_remote_certinfo->fingerprints[1]));
+
+ // Clear the cache, including the cached certificates.
+ stats_->stats_collector()->ClearCachedStatsReport();
+ rtc::scoped_refptr<const RTCStatsReport> third_report =
+ stats_->GetStatsReport();
+ // Now the old certificates stats should be deleted.
+ EXPECT_FALSE(third_report->Get(first_local_cert0->id()));
+ EXPECT_FALSE(third_report->Get(first_local_cert1->id()));
+ EXPECT_FALSE(third_report->Get(first_remote_cert0->id()));
+ EXPECT_FALSE(third_report->Get(first_remote_cert1->id()));
+ // And updated certificates exist.
+ EXPECT_TRUE(GetCertificateStatsFromFingerprint(
+ third_report, updated_local_certinfo->fingerprints[0]));
+ EXPECT_TRUE(GetCertificateStatsFromFingerprint(
+ third_report, updated_local_certinfo->fingerprints[1]));
+ EXPECT_TRUE(GetCertificateStatsFromFingerprint(
+ third_report, updated_remote_certinfo->fingerprints[0]));
+ EXPECT_TRUE(GetCertificateStatsFromFingerprint(
+ third_report, updated_remote_certinfo->fingerprints[1]));
+}
+
+TEST_F(RTCStatsCollectorTest, CollectTwoRTCDataChannelStatsWithPendingId) {
+ // Note: The test assumes data channel IDs are predictable.
+ // This is not a safe assumption, but in order to make it work for
+ // the test, we reset the ID allocator at test start.
+ SctpDataChannel::ResetInternalIdAllocatorForTesting(-1);
+ pc_->AddSctpDataChannel(rtc::make_ref_counted<MockSctpDataChannel>(
+ data_channel_controller_->weak_ptr(), /*id=*/-1,
+ DataChannelInterface::kConnecting));
+ pc_->AddSctpDataChannel(rtc::make_ref_counted<MockSctpDataChannel>(
+ data_channel_controller_->weak_ptr(), /*id=*/-1,
+ DataChannelInterface::kConnecting));
+
+ rtc::scoped_refptr<const RTCStatsReport> report = stats_->GetStatsReport();
+ RTCDataChannelStats expected_data_channel0("D0", Timestamp::Zero());
+ // Default values from MockDataChannel.
+ expected_data_channel0.label = "MockSctpDataChannel";
+ expected_data_channel0.protocol = "someProtocol";
+ expected_data_channel0.state = "connecting";
+ expected_data_channel0.messages_sent = 0;
+ expected_data_channel0.bytes_sent = 0;
+ expected_data_channel0.messages_received = 0;
+ expected_data_channel0.bytes_received = 0;
+
+ ASSERT_TRUE(report->Get(expected_data_channel0.id()));
+ EXPECT_EQ(
+ expected_data_channel0,
+ report->Get(expected_data_channel0.id())->cast_to<RTCDataChannelStats>());
+}
+
+TEST_F(RTCStatsCollectorTest, CollectRTCDataChannelStats) {
+ // Note: The test assumes data channel IDs are predictable.
+ // This is not a safe assumption, but in order to make it work for
+ // the test, we reset the ID allocator at test start.
+ SctpDataChannel::ResetInternalIdAllocatorForTesting(-1);
+ pc_->AddSctpDataChannel(rtc::make_ref_counted<MockSctpDataChannel>(
+ data_channel_controller_->weak_ptr(), 0, "MockSctpDataChannel0",
+ DataChannelInterface::kConnecting, "proto1", 1, 2, 3, 4));
+ RTCDataChannelStats expected_data_channel0("D0", Timestamp::Zero());
+ expected_data_channel0.label = "MockSctpDataChannel0";
+ expected_data_channel0.protocol = "proto1";
+ expected_data_channel0.data_channel_identifier = 0;
+ expected_data_channel0.state = "connecting";
+ expected_data_channel0.messages_sent = 1;
+ expected_data_channel0.bytes_sent = 2;
+ expected_data_channel0.messages_received = 3;
+ expected_data_channel0.bytes_received = 4;
+
+ pc_->AddSctpDataChannel(rtc::make_ref_counted<MockSctpDataChannel>(
+ data_channel_controller_->weak_ptr(), 1, "MockSctpDataChannel1",
+ DataChannelInterface::kOpen, "proto2", 5, 6, 7, 8));
+ RTCDataChannelStats expected_data_channel1("D1", Timestamp::Zero());
+ expected_data_channel1.label = "MockSctpDataChannel1";
+ expected_data_channel1.protocol = "proto2";
+ expected_data_channel1.data_channel_identifier = 1;
+ expected_data_channel1.state = "open";
+ expected_data_channel1.messages_sent = 5;
+ expected_data_channel1.bytes_sent = 6;
+ expected_data_channel1.messages_received = 7;
+ expected_data_channel1.bytes_received = 8;
+
+ pc_->AddSctpDataChannel(rtc::make_ref_counted<MockSctpDataChannel>(
+ data_channel_controller_->weak_ptr(), 2, "MockSctpDataChannel2",
+ DataChannelInterface::kClosing, "proto1", 9, 10, 11, 12));
+ RTCDataChannelStats expected_data_channel2("D2", Timestamp::Zero());
+ expected_data_channel2.label = "MockSctpDataChannel2";
+ expected_data_channel2.protocol = "proto1";
+ expected_data_channel2.data_channel_identifier = 2;
+ expected_data_channel2.state = "closing";
+ expected_data_channel2.messages_sent = 9;
+ expected_data_channel2.bytes_sent = 10;
+ expected_data_channel2.messages_received = 11;
+ expected_data_channel2.bytes_received = 12;
+
+ pc_->AddSctpDataChannel(rtc::make_ref_counted<MockSctpDataChannel>(
+ data_channel_controller_->weak_ptr(), 3, "MockSctpDataChannel3",
+ DataChannelInterface::kClosed, "proto3", 13, 14, 15, 16));
+ RTCDataChannelStats expected_data_channel3("D3", Timestamp::Zero());
+ expected_data_channel3.label = "MockSctpDataChannel3";
+ expected_data_channel3.protocol = "proto3";
+ expected_data_channel3.data_channel_identifier = 3;
+ expected_data_channel3.state = "closed";
+ expected_data_channel3.messages_sent = 13;
+ expected_data_channel3.bytes_sent = 14;
+ expected_data_channel3.messages_received = 15;
+ expected_data_channel3.bytes_received = 16;
+
+ rtc::scoped_refptr<const RTCStatsReport> report = stats_->GetStatsReport();
+
+ ASSERT_TRUE(report->Get(expected_data_channel0.id()));
+ EXPECT_EQ(
+ expected_data_channel0,
+ report->Get(expected_data_channel0.id())->cast_to<RTCDataChannelStats>());
+ ASSERT_TRUE(report->Get(expected_data_channel1.id()));
+ EXPECT_EQ(
+ expected_data_channel1,
+ report->Get(expected_data_channel1.id())->cast_to<RTCDataChannelStats>());
+ ASSERT_TRUE(report->Get(expected_data_channel2.id()));
+ EXPECT_EQ(
+ expected_data_channel2,
+ report->Get(expected_data_channel2.id())->cast_to<RTCDataChannelStats>());
+ ASSERT_TRUE(report->Get(expected_data_channel3.id()));
+ EXPECT_EQ(
+ expected_data_channel3,
+ report->Get(expected_data_channel3.id())->cast_to<RTCDataChannelStats>());
+}
+
+TEST_F(RTCStatsCollectorTest, CollectRTCIceCandidateStats) {
+ // Candidates in the first transport stats.
+ std::unique_ptr<cricket::Candidate> a_local_host = CreateFakeCandidate(
+ "1.2.3.4", 5, "a_local_host's protocol", rtc::ADAPTER_TYPE_VPN,
+ cricket::LOCAL_PORT_TYPE, 0, rtc::ADAPTER_TYPE_ETHERNET);
+ RTCLocalIceCandidateStats expected_a_local_host("I" + a_local_host->id(),
+ Timestamp::Zero());
+ expected_a_local_host.transport_id = "Ta0";
+ expected_a_local_host.network_type = "vpn";
+ expected_a_local_host.ip = "1.2.3.4";
+ expected_a_local_host.address = "1.2.3.4";
+ expected_a_local_host.port = 5;
+ expected_a_local_host.protocol = "a_local_host's protocol";
+ expected_a_local_host.candidate_type = "host";
+ expected_a_local_host.priority = 0;
+ expected_a_local_host.vpn = true;
+ expected_a_local_host.network_adapter_type = "ethernet";
+ expected_a_local_host.foundation = "foundationIsAString";
+ expected_a_local_host.username_fragment = "iceusernamefragment";
+
+ std::unique_ptr<cricket::Candidate> a_remote_srflx = CreateFakeCandidate(
+ "6.7.8.9", 10, "remote_srflx's protocol", rtc::ADAPTER_TYPE_UNKNOWN,
+ cricket::STUN_PORT_TYPE, 1);
+ RTCRemoteIceCandidateStats expected_a_remote_srflx("I" + a_remote_srflx->id(),
+ Timestamp::Zero());
+ expected_a_remote_srflx.transport_id = "Ta0";
+ expected_a_remote_srflx.ip = "6.7.8.9";
+ expected_a_remote_srflx.address = "6.7.8.9";
+ expected_a_remote_srflx.port = 10;
+ expected_a_remote_srflx.protocol = "remote_srflx's protocol";
+ expected_a_remote_srflx.candidate_type = "srflx";
+ expected_a_remote_srflx.priority = 1;
+ expected_a_remote_srflx.foundation = "foundationIsAString";
+ expected_a_remote_srflx.username_fragment = "iceusernamefragment";
+
+ std::unique_ptr<cricket::Candidate> a_local_prflx = CreateFakeCandidate(
+ "11.12.13.14", 15, "a_local_prflx's protocol",
+ rtc::ADAPTER_TYPE_CELLULAR_2G, cricket::PRFLX_PORT_TYPE, 2);
+ RTCLocalIceCandidateStats expected_a_local_prflx("I" + a_local_prflx->id(),
+ Timestamp::Zero());
+ expected_a_local_prflx.transport_id = "Ta0";
+ expected_a_local_prflx.network_type = "cellular";
+ expected_a_local_prflx.ip = "11.12.13.14";
+ expected_a_local_prflx.address = "11.12.13.14";
+ expected_a_local_prflx.port = 15;
+ expected_a_local_prflx.protocol = "a_local_prflx's protocol";
+ expected_a_local_prflx.candidate_type = "prflx";
+ expected_a_local_prflx.priority = 2;
+ expected_a_local_prflx.vpn = false;
+ expected_a_local_prflx.network_adapter_type = "cellular2g";
+ expected_a_local_prflx.foundation = "foundationIsAString";
+ expected_a_local_prflx.username_fragment = "iceusernamefragment";
+
+ std::unique_ptr<cricket::Candidate> a_remote_relay = CreateFakeCandidate(
+ "16.17.18.19", 20, "a_remote_relay's protocol", rtc::ADAPTER_TYPE_UNKNOWN,
+ cricket::RELAY_PORT_TYPE, 3);
+ RTCRemoteIceCandidateStats expected_a_remote_relay("I" + a_remote_relay->id(),
+ Timestamp::Zero());
+ expected_a_remote_relay.transport_id = "Ta0";
+ expected_a_remote_relay.ip = "16.17.18.19";
+ expected_a_remote_relay.address = "16.17.18.19";
+ expected_a_remote_relay.port = 20;
+ expected_a_remote_relay.protocol = "a_remote_relay's protocol";
+ expected_a_remote_relay.candidate_type = "relay";
+ expected_a_remote_relay.priority = 3;
+ expected_a_remote_relay.foundation = "foundationIsAString";
+ expected_a_remote_relay.username_fragment = "iceusernamefragment";
+
+ std::unique_ptr<cricket::Candidate> a_local_relay = CreateFakeCandidate(
+ "16.17.18.19", 21, "a_local_relay's protocol", rtc::ADAPTER_TYPE_UNKNOWN,
+ cricket::RELAY_PORT_TYPE, 1);
+ a_local_relay->set_relay_protocol("tcp");
+ a_local_relay->set_url("turn:url1");
+
+ RTCLocalIceCandidateStats expected_a_local_relay("I" + a_local_relay->id(),
+ Timestamp::Zero());
+ expected_a_local_relay.transport_id = "Ta0";
+ expected_a_local_relay.network_type = "unknown";
+ expected_a_local_relay.ip = "16.17.18.19";
+ expected_a_local_relay.address = "16.17.18.19";
+ expected_a_local_relay.port = 21;
+ expected_a_local_relay.protocol = "a_local_relay's protocol";
+ expected_a_local_relay.relay_protocol = "tcp";
+ expected_a_local_relay.candidate_type = "relay";
+ expected_a_local_relay.priority = 1;
+ expected_a_local_relay.url = "turn:url1";
+ expected_a_local_relay.vpn = false;
+ expected_a_local_relay.network_adapter_type = "unknown";
+ expected_a_local_relay.foundation = "foundationIsAString";
+ expected_a_local_relay.username_fragment = "iceusernamefragment";
+
+ std::unique_ptr<cricket::Candidate> a_local_relay_prflx = CreateFakeCandidate(
+ "11.12.13.20", 22, "a_local_relay_prflx's protocol",
+ rtc::ADAPTER_TYPE_UNKNOWN, cricket::PRFLX_PORT_TYPE, 1);
+ a_local_relay_prflx->set_relay_protocol("udp");
+
+ RTCLocalIceCandidateStats expected_a_local_relay_prflx(
+ "I" + a_local_relay_prflx->id(), Timestamp::Zero());
+ expected_a_local_relay_prflx.transport_id = "Ta0";
+ expected_a_local_relay_prflx.network_type = "unknown";
+ expected_a_local_relay_prflx.ip = "11.12.13.20";
+ expected_a_local_relay_prflx.address = "11.12.13.20";
+ expected_a_local_relay_prflx.port = 22;
+ expected_a_local_relay_prflx.protocol = "a_local_relay_prflx's protocol";
+ expected_a_local_relay_prflx.relay_protocol = "udp";
+ expected_a_local_relay_prflx.candidate_type = "prflx";
+ expected_a_local_relay_prflx.priority = 1;
+ expected_a_local_relay_prflx.vpn = false;
+ expected_a_local_relay_prflx.network_adapter_type = "unknown";
+ expected_a_local_relay_prflx.foundation = "foundationIsAString";
+ expected_a_local_relay_prflx.username_fragment = "iceusernamefragment";
+
+ // A non-paired local candidate.
+ std::unique_ptr<cricket::Candidate> a_local_host_not_paired =
+ CreateFakeCandidate("1.2.3.4", 4404, "a_local_host_not_paired's protocol",
+ rtc::ADAPTER_TYPE_VPN, cricket::LOCAL_PORT_TYPE, 0,
+ rtc::ADAPTER_TYPE_ETHERNET);
+ RTCLocalIceCandidateStats expected_a_local_host_not_paired(
+ "I" + a_local_host_not_paired->id(), Timestamp::Zero());
+ expected_a_local_host_not_paired.transport_id = "Ta0";
+ expected_a_local_host_not_paired.network_type = "vpn";
+ expected_a_local_host_not_paired.ip = "1.2.3.4";
+ expected_a_local_host_not_paired.address = "1.2.3.4";
+ expected_a_local_host_not_paired.port = 4404;
+ expected_a_local_host_not_paired.protocol =
+ "a_local_host_not_paired's protocol";
+ expected_a_local_host_not_paired.candidate_type = "host";
+ expected_a_local_host_not_paired.priority = 0;
+ expected_a_local_host_not_paired.vpn = true;
+ expected_a_local_host_not_paired.network_adapter_type = "ethernet";
+ expected_a_local_host_not_paired.foundation = "foundationIsAString";
+ expected_a_local_host_not_paired.username_fragment = "iceusernamefragment";
+
+ // Candidates in the second transport stats.
+ std::unique_ptr<cricket::Candidate> b_local =
+ CreateFakeCandidate("42.42.42.42", 42, "b_local's protocol",
+ rtc::ADAPTER_TYPE_WIFI, cricket::LOCAL_PORT_TYPE, 42);
+ RTCLocalIceCandidateStats expected_b_local("I" + b_local->id(),
+ Timestamp::Zero());
+ expected_b_local.transport_id = "Tb0";
+ expected_b_local.network_type = "wifi";
+ expected_b_local.ip = "42.42.42.42";
+ expected_b_local.address = "42.42.42.42";
+ expected_b_local.port = 42;
+ expected_b_local.protocol = "b_local's protocol";
+ expected_b_local.candidate_type = "host";
+ expected_b_local.priority = 42;
+ expected_b_local.vpn = false;
+ expected_b_local.network_adapter_type = "wifi";
+ expected_b_local.foundation = "foundationIsAString";
+ expected_b_local.username_fragment = "iceusernamefragment";
+
+ std::unique_ptr<cricket::Candidate> b_remote = CreateFakeCandidate(
+ "42.42.42.42", 42, "b_remote's protocol", rtc::ADAPTER_TYPE_UNKNOWN,
+ cricket::LOCAL_PORT_TYPE, 42);
+ RTCRemoteIceCandidateStats expected_b_remote("I" + b_remote->id(),
+ Timestamp::Zero());
+ expected_b_remote.transport_id = "Tb0";
+ expected_b_remote.ip = "42.42.42.42";
+ expected_b_remote.address = "42.42.42.42";
+ expected_b_remote.port = 42;
+ expected_b_remote.protocol = "b_remote's protocol";
+ expected_b_remote.candidate_type = "host";
+ expected_b_remote.priority = 42;
+ expected_b_remote.foundation = "foundationIsAString";
+ expected_b_remote.username_fragment = "iceusernamefragment";
+
+ // Add candidate pairs to connection.
+ cricket::TransportChannelStats a_transport_channel_stats;
+ a_transport_channel_stats.ice_transport_stats.connection_infos.push_back(
+ cricket::ConnectionInfo());
+ a_transport_channel_stats.ice_transport_stats.connection_infos[0]
+ .local_candidate = *a_local_host.get();
+ a_transport_channel_stats.ice_transport_stats.connection_infos[0]
+ .remote_candidate = *a_remote_srflx.get();
+ a_transport_channel_stats.ice_transport_stats.connection_infos.push_back(
+ cricket::ConnectionInfo());
+ a_transport_channel_stats.ice_transport_stats.connection_infos[1]
+ .local_candidate = *a_local_prflx.get();
+ a_transport_channel_stats.ice_transport_stats.connection_infos[1]
+ .remote_candidate = *a_remote_relay.get();
+ a_transport_channel_stats.ice_transport_stats.connection_infos.push_back(
+ cricket::ConnectionInfo());
+ a_transport_channel_stats.ice_transport_stats.connection_infos[2]
+ .local_candidate = *a_local_relay.get();
+ a_transport_channel_stats.ice_transport_stats.connection_infos[2]
+ .remote_candidate = *a_remote_relay.get();
+ a_transport_channel_stats.ice_transport_stats.connection_infos.push_back(
+ cricket::ConnectionInfo());
+ a_transport_channel_stats.ice_transport_stats.connection_infos[3]
+ .local_candidate = *a_local_relay_prflx.get();
+ a_transport_channel_stats.ice_transport_stats.connection_infos[3]
+ .remote_candidate = *a_remote_relay.get();
+ a_transport_channel_stats.ice_transport_stats.candidate_stats_list.push_back(
+ cricket::CandidateStats(*a_local_host_not_paired.get()));
+
+ pc_->AddVoiceChannel("audio", "a");
+ pc_->SetTransportStats("a", a_transport_channel_stats);
+
+ cricket::TransportChannelStats b_transport_channel_stats;
+ b_transport_channel_stats.ice_transport_stats.connection_infos.push_back(
+ cricket::ConnectionInfo());
+ b_transport_channel_stats.ice_transport_stats.connection_infos[0]
+ .local_candidate = *b_local.get();
+ b_transport_channel_stats.ice_transport_stats.connection_infos[0]
+ .remote_candidate = *b_remote.get();
+
+ pc_->AddVideoChannel("video", "b");
+ pc_->SetTransportStats("b", b_transport_channel_stats);
+
+ rtc::scoped_refptr<const RTCStatsReport> report = stats_->GetStatsReport();
+
+ ASSERT_TRUE(report->Get(expected_a_local_host.id()));
+ EXPECT_EQ(expected_a_local_host, report->Get(expected_a_local_host.id())
+ ->cast_to<RTCLocalIceCandidateStats>());
+
+ ASSERT_TRUE(report->Get(expected_a_local_host_not_paired.id()));
+ EXPECT_EQ(expected_a_local_host_not_paired,
+ report->Get(expected_a_local_host_not_paired.id())
+ ->cast_to<RTCLocalIceCandidateStats>());
+
+ ASSERT_TRUE(report->Get(expected_a_remote_srflx.id()));
+ EXPECT_EQ(expected_a_remote_srflx,
+ report->Get(expected_a_remote_srflx.id())
+ ->cast_to<RTCRemoteIceCandidateStats>());
+ ASSERT_TRUE(report->Get(expected_a_local_prflx.id()));
+ EXPECT_EQ(expected_a_local_prflx, report->Get(expected_a_local_prflx.id())
+ ->cast_to<RTCLocalIceCandidateStats>());
+ ASSERT_TRUE(report->Get(expected_a_remote_relay.id()));
+ EXPECT_EQ(expected_a_remote_relay,
+ report->Get(expected_a_remote_relay.id())
+ ->cast_to<RTCRemoteIceCandidateStats>());
+ ASSERT_TRUE(report->Get(expected_a_local_relay.id()));
+ EXPECT_EQ(expected_a_local_relay, report->Get(expected_a_local_relay.id())
+ ->cast_to<RTCLocalIceCandidateStats>());
+ ASSERT_TRUE(report->Get(expected_a_local_relay_prflx.id()));
+ EXPECT_EQ(expected_a_local_relay_prflx,
+ report->Get(expected_a_local_relay_prflx.id())
+ ->cast_to<RTCLocalIceCandidateStats>());
+ ASSERT_TRUE(report->Get(expected_b_local.id()));
+ EXPECT_EQ(
+ expected_b_local,
+ report->Get(expected_b_local.id())->cast_to<RTCLocalIceCandidateStats>());
+ ASSERT_TRUE(report->Get(expected_b_remote.id()));
+ EXPECT_EQ(expected_b_remote, report->Get(expected_b_remote.id())
+ ->cast_to<RTCRemoteIceCandidateStats>());
+ EXPECT_TRUE(report->Get("Ta0"));
+ EXPECT_TRUE(report->Get("Tb0"));
+}
+
+TEST_F(RTCStatsCollectorTest, CollectRTCIceCandidatePairStats) {
+ const char kTransportName[] = "transport";
+
+ std::unique_ptr<cricket::Candidate> local_candidate =
+ CreateFakeCandidate("42.42.42.42", 42, "protocol", rtc::ADAPTER_TYPE_WIFI,
+ cricket::LOCAL_PORT_TYPE, 42);
+ local_candidate->set_username("local_iceusernamefragment");
+
+ std::unique_ptr<cricket::Candidate> remote_candidate = CreateFakeCandidate(
+ "42.42.42.42", 42, "protocol", rtc::ADAPTER_TYPE_UNKNOWN,
+ cricket::STUN_PORT_TYPE, 42);
+ remote_candidate->set_related_address(rtc::SocketAddress("192.168.2.1", 43));
+ remote_candidate->set_username("remote_iceusernamefragment");
+
+ cricket::ConnectionInfo connection_info;
+ connection_info.best_connection = false;
+ connection_info.local_candidate = *local_candidate.get();
+ connection_info.remote_candidate = *remote_candidate.get();
+ connection_info.writable = true;
+ connection_info.sent_discarded_packets = 3;
+ connection_info.sent_total_packets = 10;
+ connection_info.packets_received = 51;
+ connection_info.sent_discarded_bytes = 7;
+ connection_info.sent_total_bytes = 42;
+ connection_info.recv_total_bytes = 1234;
+ connection_info.total_round_trip_time_ms = 0;
+ connection_info.current_round_trip_time_ms = absl::nullopt;
+ connection_info.recv_ping_requests = 2020;
+ connection_info.sent_ping_requests_total = 2222;
+ connection_info.sent_ping_requests_before_first_response = 2000;
+ connection_info.recv_ping_responses = 4321;
+ connection_info.sent_ping_responses = 1000;
+ connection_info.state = cricket::IceCandidatePairState::IN_PROGRESS;
+ connection_info.priority = 5555;
+ connection_info.nominated = false;
+ connection_info.last_data_received = Timestamp::Millis(2500);
+ connection_info.last_data_sent = Timestamp::Millis(5200);
+
+ cricket::TransportChannelStats transport_channel_stats;
+ transport_channel_stats.component = cricket::ICE_CANDIDATE_COMPONENT_RTP;
+ transport_channel_stats.ice_transport_stats.connection_infos.push_back(
+ connection_info);
+
+ pc_->AddVideoChannel("video", kTransportName);
+ pc_->SetTransportStats(kTransportName, transport_channel_stats);
+
+ rtc::scoped_refptr<const RTCStatsReport> report = stats_->GetStatsReport();
+
+ RTCIceCandidatePairStats expected_pair(
+ "CP" + local_candidate->id() + "_" + remote_candidate->id(),
+ report->timestamp());
+ expected_pair.transport_id =
+ "Ttransport" + rtc::ToString(cricket::ICE_CANDIDATE_COMPONENT_RTP);
+ expected_pair.local_candidate_id = "I" + local_candidate->id();
+ expected_pair.remote_candidate_id = "I" + remote_candidate->id();
+ expected_pair.state = "in-progress";
+ expected_pair.priority = 5555;
+ expected_pair.nominated = false;
+ expected_pair.writable = true;
+ expected_pair.packets_sent = 7;
+ expected_pair.packets_received = 51;
+ expected_pair.packets_discarded_on_send = 3;
+ expected_pair.bytes_sent = 42;
+ expected_pair.bytes_received = 1234;
+ expected_pair.bytes_discarded_on_send = 7;
+ expected_pair.total_round_trip_time = 0.0;
+ expected_pair.requests_received = 2020;
+ expected_pair.requests_sent = 2222;
+ expected_pair.responses_received = 4321;
+ expected_pair.responses_sent = 1000;
+ expected_pair.consent_requests_sent = (2222 - 2000);
+ expected_pair.last_packet_received_timestamp = 2500;
+ expected_pair.last_packet_sent_timestamp = 5200;
+
+ // `expected_pair.current_round_trip_time` should be undefined because the
+ // current RTT is not set.
+ // `expected_pair.available_[outgoing/incoming]_bitrate` should be undefined
+ // because is is not the current pair.
+
+ ASSERT_TRUE(report->Get(expected_pair.id()));
+ EXPECT_EQ(
+ expected_pair,
+ report->Get(expected_pair.id())->cast_to<RTCIceCandidatePairStats>());
+ EXPECT_TRUE(report->Get(*expected_pair.transport_id));
+
+ // Set nominated and "GetStats" again.
+ transport_channel_stats.ice_transport_stats.connection_infos[0].nominated =
+ true;
+ pc_->SetTransportStats(kTransportName, transport_channel_stats);
+ report = stats_->GetFreshStatsReport();
+ expected_pair.nominated = true;
+ ASSERT_TRUE(report->Get(expected_pair.id()));
+ EXPECT_EQ(
+ expected_pair,
+ report->Get(expected_pair.id())->cast_to<RTCIceCandidatePairStats>());
+ EXPECT_TRUE(report->Get(*expected_pair.transport_id));
+
+ // Set round trip times and "GetStats" again.
+ transport_channel_stats.ice_transport_stats.connection_infos[0]
+ .total_round_trip_time_ms = 7331;
+ transport_channel_stats.ice_transport_stats.connection_infos[0]
+ .current_round_trip_time_ms = 1337;
+ pc_->SetTransportStats(kTransportName, transport_channel_stats);
+ report = stats_->GetFreshStatsReport();
+ expected_pair.total_round_trip_time = 7.331;
+ expected_pair.current_round_trip_time = 1.337;
+ ASSERT_TRUE(report->Get(expected_pair.id()));
+ EXPECT_EQ(
+ expected_pair,
+ report->Get(expected_pair.id())->cast_to<RTCIceCandidatePairStats>());
+ EXPECT_TRUE(report->Get(*expected_pair.transport_id));
+
+ // Make pair the current pair, clear bandwidth and "GetStats" again.
+ transport_channel_stats.ice_transport_stats.connection_infos[0]
+ .best_connection = true;
+ pc_->SetTransportStats(kTransportName, transport_channel_stats);
+ report = stats_->GetFreshStatsReport();
+ // |expected_pair.available_[outgoing/incoming]_bitrate| should still be
+ // undefined because bandwidth is not set.
+ ASSERT_TRUE(report->Get(expected_pair.id()));
+ EXPECT_EQ(
+ expected_pair,
+ report->Get(expected_pair.id())->cast_to<RTCIceCandidatePairStats>());
+ EXPECT_TRUE(report->Get(*expected_pair.transport_id));
+
+ // Set bandwidth and "GetStats" again.
+ webrtc::Call::Stats call_stats;
+ const int kSendBandwidth = 888;
+ call_stats.send_bandwidth_bps = kSendBandwidth;
+ const int kRecvBandwidth = 999;
+ call_stats.recv_bandwidth_bps = kRecvBandwidth;
+ pc_->SetCallStats(call_stats);
+ report = stats_->GetFreshStatsReport();
+ expected_pair.available_outgoing_bitrate = kSendBandwidth;
+ expected_pair.available_incoming_bitrate = kRecvBandwidth;
+ ASSERT_TRUE(report->Get(expected_pair.id()));
+ EXPECT_EQ(
+ expected_pair,
+ report->Get(expected_pair.id())->cast_to<RTCIceCandidatePairStats>());
+ EXPECT_TRUE(report->Get(*expected_pair.transport_id));
+
+ RTCLocalIceCandidateStats expected_local_candidate(
+ *expected_pair.local_candidate_id, report->timestamp());
+ expected_local_candidate.transport_id = *expected_pair.transport_id;
+ expected_local_candidate.network_type = "wifi";
+ expected_local_candidate.ip = "42.42.42.42";
+ expected_local_candidate.address = "42.42.42.42";
+ expected_local_candidate.port = 42;
+ expected_local_candidate.protocol = "protocol";
+ expected_local_candidate.candidate_type = "host";
+ expected_local_candidate.priority = 42;
+ expected_local_candidate.foundation = "foundationIsAString";
+ expected_local_candidate.username_fragment = "local_iceusernamefragment";
+ expected_local_candidate.vpn = false;
+ expected_local_candidate.network_adapter_type = "wifi";
+ ASSERT_TRUE(report->Get(expected_local_candidate.id()));
+ EXPECT_EQ(expected_local_candidate,
+ report->Get(expected_local_candidate.id())
+ ->cast_to<RTCLocalIceCandidateStats>());
+
+ RTCRemoteIceCandidateStats expected_remote_candidate(
+ *expected_pair.remote_candidate_id, report->timestamp());
+ expected_remote_candidate.transport_id = *expected_pair.transport_id;
+ expected_remote_candidate.ip = "42.42.42.42";
+ expected_remote_candidate.address = "42.42.42.42";
+ expected_remote_candidate.port = 42;
+ expected_remote_candidate.protocol = "protocol";
+ expected_remote_candidate.candidate_type = "srflx";
+ expected_remote_candidate.priority = 42;
+ expected_remote_candidate.foundation = "foundationIsAString";
+ expected_remote_candidate.username_fragment = "remote_iceusernamefragment";
+ expected_remote_candidate.related_address = "192.168.2.1";
+ expected_remote_candidate.related_port = 43;
+ ASSERT_TRUE(report->Get(expected_remote_candidate.id()));
+ EXPECT_EQ(expected_remote_candidate,
+ report->Get(expected_remote_candidate.id())
+ ->cast_to<RTCRemoteIceCandidateStats>());
+}
+
+TEST_F(RTCStatsCollectorTest, CollectRTCPeerConnectionStats) {
+ {
+ rtc::scoped_refptr<const RTCStatsReport> report = stats_->GetStatsReport();
+ RTCPeerConnectionStats expected("P", report->timestamp());
+ expected.data_channels_opened = 0;
+ expected.data_channels_closed = 0;
+ ASSERT_TRUE(report->Get("P"));
+ EXPECT_EQ(expected, report->Get("P")->cast_to<RTCPeerConnectionStats>());
+ }
+
+ FakeDataChannelController controller(pc_->network_thread());
+ rtc::scoped_refptr<SctpDataChannel> dummy_channel_a = SctpDataChannel::Create(
+ controller.weak_ptr(), "DummyChannelA", false, InternalDataChannelInit(),
+ rtc::Thread::Current(), rtc::Thread::Current());
+ rtc::scoped_refptr<SctpDataChannel> dummy_channel_b = SctpDataChannel::Create(
+ controller.weak_ptr(), "DummyChannelB", false, InternalDataChannelInit(),
+ rtc::Thread::Current(), rtc::Thread::Current());
+
+ stats_->stats_collector()->OnSctpDataChannelStateChanged(
+ dummy_channel_a->internal_id(), DataChannelInterface::DataState::kOpen);
+ // Closing a channel that is not opened should not affect the counts.
+ stats_->stats_collector()->OnSctpDataChannelStateChanged(
+ dummy_channel_b->internal_id(), DataChannelInterface::DataState::kClosed);
+
+ {
+ rtc::scoped_refptr<const RTCStatsReport> report =
+ stats_->GetFreshStatsReport();
+ RTCPeerConnectionStats expected("P", report->timestamp());
+ expected.data_channels_opened = 1;
+ expected.data_channels_closed = 0;
+ ASSERT_TRUE(report->Get("P"));
+ EXPECT_EQ(expected, report->Get("P")->cast_to<RTCPeerConnectionStats>());
+ }
+
+ stats_->stats_collector()->OnSctpDataChannelStateChanged(
+ dummy_channel_b->internal_id(), DataChannelInterface::DataState::kOpen);
+ stats_->stats_collector()->OnSctpDataChannelStateChanged(
+ dummy_channel_b->internal_id(), DataChannelInterface::DataState::kClosed);
+
+ {
+ rtc::scoped_refptr<const RTCStatsReport> report =
+ stats_->GetFreshStatsReport();
+ RTCPeerConnectionStats expected("P", report->timestamp());
+ expected.data_channels_opened = 2;
+ expected.data_channels_closed = 1;
+ ASSERT_TRUE(report->Get("P"));
+ EXPECT_EQ(expected, report->Get("P")->cast_to<RTCPeerConnectionStats>());
+ }
+
+ // Re-opening a data channel (or opening a new data channel that is re-using
+ // the same address in memory) should increase the opened count.
+ stats_->stats_collector()->OnSctpDataChannelStateChanged(
+ dummy_channel_b->internal_id(), DataChannelInterface::DataState::kOpen);
+
+ {
+ rtc::scoped_refptr<const RTCStatsReport> report =
+ stats_->GetFreshStatsReport();
+ RTCPeerConnectionStats expected("P", report->timestamp());
+ expected.data_channels_opened = 3;
+ expected.data_channels_closed = 1;
+ ASSERT_TRUE(report->Get("P"));
+ EXPECT_EQ(expected, report->Get("P")->cast_to<RTCPeerConnectionStats>());
+ }
+
+ stats_->stats_collector()->OnSctpDataChannelStateChanged(
+ dummy_channel_a->internal_id(), DataChannelInterface::DataState::kClosed);
+ stats_->stats_collector()->OnSctpDataChannelStateChanged(
+ dummy_channel_b->internal_id(), DataChannelInterface::DataState::kClosed);
+
+ {
+ rtc::scoped_refptr<const RTCStatsReport> report =
+ stats_->GetFreshStatsReport();
+ RTCPeerConnectionStats expected("P", report->timestamp());
+ expected.data_channels_opened = 3;
+ expected.data_channels_closed = 3;
+ ASSERT_TRUE(report->Get("P"));
+ EXPECT_EQ(expected, report->Get("P")->cast_to<RTCPeerConnectionStats>());
+ }
+}
+
+TEST_F(RTCStatsCollectorTest, CollectRTCInboundRtpStreamStats_Audio) {
+ cricket::VoiceMediaInfo voice_media_info;
+
+ voice_media_info.receivers.push_back(cricket::VoiceReceiverInfo());
+ voice_media_info.receivers[0].local_stats.push_back(
+ cricket::SsrcReceiverInfo());
+ voice_media_info.receivers[0].local_stats[0].ssrc = 1;
+ voice_media_info.receivers[0].packets_lost = -1; // Signed per RFC3550
+ voice_media_info.receivers[0].packets_discarded = 7788;
+ voice_media_info.receivers[0].packets_received = 2;
+ voice_media_info.receivers[0].nacks_sent = 5;
+ voice_media_info.receivers[0].fec_packets_discarded = 5566;
+ voice_media_info.receivers[0].fec_packets_received = 6677;
+ voice_media_info.receivers[0].payload_bytes_received = 3;
+ voice_media_info.receivers[0].header_and_padding_bytes_received = 4;
+ voice_media_info.receivers[0].codec_payload_type = 42;
+ voice_media_info.receivers[0].jitter_ms = 4500;
+ voice_media_info.receivers[0].jitter_buffer_delay_seconds = 1.0;
+ voice_media_info.receivers[0].jitter_buffer_target_delay_seconds = 1.1;
+ voice_media_info.receivers[0].jitter_buffer_minimum_delay_seconds = 0.999;
+ voice_media_info.receivers[0].jitter_buffer_emitted_count = 2;
+ voice_media_info.receivers[0].total_samples_received = 3;
+ voice_media_info.receivers[0].concealed_samples = 4;
+ voice_media_info.receivers[0].silent_concealed_samples = 5;
+ voice_media_info.receivers[0].concealment_events = 6;
+ voice_media_info.receivers[0].inserted_samples_for_deceleration = 7;
+ voice_media_info.receivers[0].removed_samples_for_acceleration = 8;
+ voice_media_info.receivers[0].audio_level = 14442; // [0,32767]
+ voice_media_info.receivers[0].total_output_energy = 10.0;
+ voice_media_info.receivers[0].total_output_duration = 11.0;
+ voice_media_info.receivers[0].jitter_buffer_flushes = 7;
+ voice_media_info.receivers[0].delayed_packet_outage_samples = 15;
+ voice_media_info.receivers[0].relative_packet_arrival_delay_seconds = 16;
+ voice_media_info.receivers[0].interruption_count = 7788;
+ voice_media_info.receivers[0].total_interruption_duration_ms = 778899;
+ voice_media_info.receivers[0].last_packet_received = absl::nullopt;
+
+ RtpCodecParameters codec_parameters;
+ codec_parameters.payload_type = 42;
+ codec_parameters.kind = cricket::MEDIA_TYPE_AUDIO;
+ codec_parameters.name = "dummy";
+ codec_parameters.clock_rate = 0;
+ voice_media_info.receive_codecs.insert(
+ std::make_pair(codec_parameters.payload_type, codec_parameters));
+
+ auto voice_media_channels =
+ pc_->AddVoiceChannel("AudioMid", "TransportName", voice_media_info);
+ stats_->SetupRemoteTrackAndReceiver(
+ cricket::MEDIA_TYPE_AUDIO, "RemoteAudioTrackID", "RemoteStreamId", 1);
+
+ // Needed for playoutId to be populated.
+ pc_->SetAudioDeviceStats(AudioDeviceModule::Stats());
+ pc_->GetTransceiversInternal()[0]->internal()->set_current_direction(
+ RtpTransceiverDirection::kSendRecv);
+
+ rtc::scoped_refptr<const RTCStatsReport> report = stats_->GetStatsReport();
+
+ RTCInboundRtpStreamStats expected_audio("ITTransportName1A1",
+ report->timestamp());
+ expected_audio.ssrc = 1;
+ expected_audio.kind = "audio";
+ expected_audio.track_identifier = "RemoteAudioTrackID";
+ expected_audio.mid = "AudioMid";
+ expected_audio.transport_id = "TTransportName1";
+ expected_audio.codec_id = "CITTransportName1_42";
+ expected_audio.packets_received = 2;
+ expected_audio.nack_count = 5;
+ expected_audio.fec_packets_discarded = 5566;
+ expected_audio.fec_packets_received = 6677;
+ expected_audio.bytes_received = 3;
+ expected_audio.header_bytes_received = 4;
+ expected_audio.packets_lost = -1;
+ expected_audio.packets_discarded = 7788;
+ // `expected_audio.last_packet_received_timestamp` should be undefined.
+ expected_audio.jitter = 4.5;
+ expected_audio.jitter_buffer_delay = 1.0;
+ expected_audio.jitter_buffer_target_delay = 1.1;
+ expected_audio.jitter_buffer_minimum_delay = 0.999;
+ expected_audio.jitter_buffer_emitted_count = 2;
+ expected_audio.total_samples_received = 3;
+ expected_audio.concealed_samples = 4;
+ expected_audio.silent_concealed_samples = 5;
+ expected_audio.concealment_events = 6;
+ expected_audio.inserted_samples_for_deceleration = 7;
+ expected_audio.removed_samples_for_acceleration = 8;
+ expected_audio.audio_level = 14442.0 / 32767.0; // [0,1]
+ expected_audio.total_audio_energy = 10.0;
+ expected_audio.total_samples_duration = 11.0;
+ expected_audio.jitter_buffer_flushes = 7;
+ expected_audio.delayed_packet_outage_samples = 15;
+ expected_audio.relative_packet_arrival_delay = 16;
+ expected_audio.interruption_count = 7788;
+ expected_audio.total_interruption_duration = 778.899;
+ expected_audio.playout_id = "AP";
+
+ ASSERT_TRUE(report->Get(expected_audio.id()));
+ EXPECT_EQ(
+ report->Get(expected_audio.id())->cast_to<RTCInboundRtpStreamStats>(),
+ expected_audio);
+
+ // Set previously undefined values and "GetStats" again.
+ voice_media_info.receivers[0].last_packet_received = Timestamp::Seconds(3);
+ expected_audio.last_packet_received_timestamp = 3000.0;
+ voice_media_info.receivers[0].estimated_playout_ntp_timestamp_ms = 4567;
+ expected_audio.estimated_playout_timestamp = 4567;
+ voice_media_channels.first->SetStats(voice_media_info);
+ voice_media_channels.second->SetStats(voice_media_info);
+
+ report = stats_->GetFreshStatsReport();
+
+ ASSERT_TRUE(report->Get(expected_audio.id()));
+ EXPECT_EQ(
+ report->Get(expected_audio.id())->cast_to<RTCInboundRtpStreamStats>(),
+ expected_audio);
+ EXPECT_TRUE(report->Get(*expected_audio.transport_id));
+ EXPECT_TRUE(report->Get(*expected_audio.codec_id));
+}
+
+TEST_F(RTCStatsCollectorTest, CollectRTCInboundRtpStreamStats_Audio_PlayoutId) {
+ cricket::VoiceMediaInfo voice_media_info;
+
+ voice_media_info.receivers.push_back(cricket::VoiceReceiverInfo());
+ voice_media_info.receivers[0].local_stats.push_back(
+ cricket::SsrcReceiverInfo());
+ voice_media_info.receivers[0].local_stats[0].ssrc = 1;
+
+ pc_->AddVoiceChannel("AudioMid", "TransportName", voice_media_info);
+ stats_->SetupRemoteTrackAndReceiver(
+ cricket::MEDIA_TYPE_AUDIO, "RemoteAudioTrackID", "RemoteStreamId", 1);
+ // Needed for playoutId to be populated.
+ pc_->SetAudioDeviceStats(AudioDeviceModule::Stats());
+
+ {
+ // We do not expect a playout id when only sending.
+ pc_->GetTransceiversInternal()[0]->internal()->set_current_direction(
+ RtpTransceiverDirection::kSendOnly);
+ rtc::scoped_refptr<const RTCStatsReport> report = stats_->GetStatsReport();
+ ASSERT_TRUE(report->Get("ITTransportName1A1"));
+ auto stats =
+ report->Get("ITTransportName1A1")->cast_to<RTCInboundRtpStreamStats>();
+ ASSERT_FALSE(stats.playout_id.is_defined());
+ }
+ {
+ // We do expect a playout id when receiving.
+ pc_->GetTransceiversInternal()[0]->internal()->set_current_direction(
+ RtpTransceiverDirection::kRecvOnly);
+ rtc::scoped_refptr<const RTCStatsReport> report =
+ stats_->GetFreshStatsReport();
+ ASSERT_TRUE(report->Get("ITTransportName1A1"));
+ auto stats =
+ report->Get("ITTransportName1A1")->cast_to<RTCInboundRtpStreamStats>();
+ ASSERT_TRUE(stats.playout_id.is_defined());
+ EXPECT_EQ(*stats.playout_id, "AP");
+ }
+}
+
+TEST_F(RTCStatsCollectorTest, CollectRTCInboundRtpStreamStats_Video) {
+ cricket::VideoMediaInfo video_media_info;
+
+ video_media_info.receivers.push_back(cricket::VideoReceiverInfo());
+ video_media_info.receivers[0].local_stats.push_back(
+ cricket::SsrcReceiverInfo());
+ video_media_info.receivers[0].local_stats[0].ssrc = 1;
+ video_media_info.receivers[0].packets_received = 2;
+ video_media_info.receivers[0].packets_lost = 42;
+ video_media_info.receivers[0].payload_bytes_received = 3;
+ video_media_info.receivers[0].header_and_padding_bytes_received = 12;
+ video_media_info.receivers[0].codec_payload_type = 42;
+ video_media_info.receivers[0].firs_sent = 5;
+ video_media_info.receivers[0].plis_sent = 6;
+ video_media_info.receivers[0].nacks_sent = 7;
+ video_media_info.receivers[0].frames_received = 8;
+ video_media_info.receivers[0].frames_decoded = 9;
+ video_media_info.receivers[0].key_frames_decoded = 3;
+ video_media_info.receivers[0].frames_dropped = 13;
+ video_media_info.receivers[0].qp_sum = absl::nullopt;
+ video_media_info.receivers[0].total_decode_time =
+ webrtc::TimeDelta::Seconds(9);
+ video_media_info.receivers[0].total_processing_delay =
+ webrtc::TimeDelta::Millis(600);
+ video_media_info.receivers[0].total_assembly_time =
+ webrtc::TimeDelta::Millis(500);
+ video_media_info.receivers[0].frames_assembled_from_multiple_packets = 23;
+ video_media_info.receivers[0].total_inter_frame_delay = 0.123;
+ video_media_info.receivers[0].total_squared_inter_frame_delay = 0.00456;
+ video_media_info.receivers[0].pause_count = 2;
+ video_media_info.receivers[0].total_pauses_duration_ms = 10000;
+ video_media_info.receivers[0].freeze_count = 3;
+ video_media_info.receivers[0].total_freezes_duration_ms = 1000;
+ video_media_info.receivers[0].jitter_ms = 1199;
+ video_media_info.receivers[0].jitter_buffer_delay_seconds = 3.456;
+ video_media_info.receivers[0].jitter_buffer_target_delay_seconds = 1.1;
+ video_media_info.receivers[0].jitter_buffer_minimum_delay_seconds = 0.999;
+ video_media_info.receivers[0].jitter_buffer_emitted_count = 13;
+ video_media_info.receivers[0].last_packet_received = absl::nullopt;
+ video_media_info.receivers[0].content_type = VideoContentType::UNSPECIFIED;
+ video_media_info.receivers[0].estimated_playout_ntp_timestamp_ms =
+ absl::nullopt;
+ video_media_info.receivers[0].decoder_implementation_name = absl::nullopt;
+ video_media_info.receivers[0].min_playout_delay_ms = 50;
+ video_media_info.receivers[0].power_efficient_decoder = false;
+ video_media_info.receivers[0].retransmitted_packets_received = 17;
+ video_media_info.receivers[0].retransmitted_bytes_received = 62;
+ video_media_info.receivers[0].fec_packets_received = 32;
+ video_media_info.receivers[0].fec_bytes_received = 54;
+ video_media_info.receivers[0].ssrc_groups.push_back(
+ {cricket::kFidSsrcGroupSemantics, {1, 4404}});
+ video_media_info.receivers[0].ssrc_groups.push_back(
+ {cricket::kFecFrSsrcGroupSemantics, {1, 5505}});
+
+ // Note: these two values intentionally differ,
+ // only the decoded one should show up.
+ video_media_info.receivers[0].framerate_received = 15;
+ video_media_info.receivers[0].framerate_decoded = 5;
+
+ RtpCodecParameters codec_parameters;
+ codec_parameters.payload_type = 42;
+ codec_parameters.kind = cricket::MEDIA_TYPE_VIDEO;
+ codec_parameters.name = "dummy";
+ codec_parameters.clock_rate = 0;
+ video_media_info.receive_codecs.insert(
+ std::make_pair(codec_parameters.payload_type, codec_parameters));
+
+ auto video_media_channels =
+ pc_->AddVideoChannel("VideoMid", "TransportName", video_media_info);
+ stats_->SetupRemoteTrackAndReceiver(
+ cricket::MEDIA_TYPE_VIDEO, "RemoteVideoTrackID", "RemoteStreamId", 1);
+
+ rtc::scoped_refptr<const RTCStatsReport> report = stats_->GetStatsReport();
+
+ RTCInboundRtpStreamStats expected_video("ITTransportName1V1",
+ report->timestamp());
+ expected_video.ssrc = 1;
+ expected_video.kind = "video";
+ expected_video.track_identifier = "RemoteVideoTrackID";
+ expected_video.mid = "VideoMid";
+ expected_video.transport_id = "TTransportName1";
+ expected_video.codec_id = "CITTransportName1_42";
+ expected_video.fir_count = 5;
+ expected_video.pli_count = 6;
+ expected_video.nack_count = 7;
+ expected_video.packets_received = 2;
+ expected_video.bytes_received = 3;
+ expected_video.header_bytes_received = 12;
+ expected_video.packets_lost = 42;
+ expected_video.frames_received = 8;
+ expected_video.frames_decoded = 9;
+ expected_video.key_frames_decoded = 3;
+ expected_video.frames_dropped = 13;
+ // `expected_video.qp_sum` should be undefined.
+ expected_video.total_decode_time = 9.0;
+ expected_video.total_processing_delay = 0.6;
+ expected_video.total_assembly_time = 0.5;
+ expected_video.frames_assembled_from_multiple_packets = 23;
+ expected_video.total_inter_frame_delay = 0.123;
+ expected_video.total_squared_inter_frame_delay = 0.00456;
+ expected_video.pause_count = 2;
+ expected_video.total_pauses_duration = 10;
+ expected_video.freeze_count = 3;
+ expected_video.total_freezes_duration = 1;
+ expected_video.jitter = 1.199;
+ expected_video.jitter_buffer_delay = 3.456;
+ expected_video.jitter_buffer_target_delay = 1.1;
+ expected_video.jitter_buffer_minimum_delay = 0.999;
+ expected_video.jitter_buffer_emitted_count = 13;
+ // `expected_video.last_packet_received_timestamp` should be undefined.
+ // `expected_video.content_type` should be undefined.
+ // `expected_video.decoder_implementation` should be undefined.
+ expected_video.min_playout_delay = 0.05;
+ expected_video.frames_per_second = 5;
+ expected_video.power_efficient_decoder = false;
+ expected_video.retransmitted_packets_received = 17;
+ expected_video.retransmitted_bytes_received = 62;
+ expected_video.fec_packets_received = 32;
+ expected_video.fec_bytes_received = 54;
+ expected_video.rtx_ssrc = 4404;
+ expected_video.fec_ssrc = 5505;
+
+ ASSERT_TRUE(report->Get(expected_video.id()));
+ EXPECT_EQ(
+ report->Get(expected_video.id())->cast_to<RTCInboundRtpStreamStats>(),
+ expected_video);
+
+ // Set previously undefined values and "GetStats" again.
+ video_media_info.receivers[0].qp_sum = 9;
+ expected_video.qp_sum = 9;
+ video_media_info.receivers[0].last_packet_received = Timestamp::Seconds(1);
+ expected_video.last_packet_received_timestamp = 1000.0;
+ video_media_info.receivers[0].content_type = VideoContentType::SCREENSHARE;
+ expected_video.content_type = "screenshare";
+ video_media_info.receivers[0].estimated_playout_ntp_timestamp_ms = 1234;
+ expected_video.estimated_playout_timestamp = 1234;
+ video_media_info.receivers[0].decoder_implementation_name = "libfoodecoder";
+ expected_video.decoder_implementation = "libfoodecoder";
+ video_media_info.receivers[0].power_efficient_decoder = true;
+ expected_video.power_efficient_decoder = true;
+ video_media_channels.first->SetStats(video_media_info);
+ video_media_channels.second->SetStats(video_media_info);
+
+ report = stats_->GetFreshStatsReport();
+
+ ASSERT_TRUE(report->Get(expected_video.id()));
+ EXPECT_EQ(
+ report->Get(expected_video.id())->cast_to<RTCInboundRtpStreamStats>(),
+ expected_video);
+ EXPECT_TRUE(report->Get(*expected_video.transport_id));
+ EXPECT_TRUE(report->Get(*expected_video.codec_id));
+}
+
+TEST_F(RTCStatsCollectorTest, CollectRTCAudioPlayoutStats) {
+ AudioDeviceModule::Stats audio_device_stats;
+ audio_device_stats.synthesized_samples_duration_s = 1;
+ audio_device_stats.synthesized_samples_events = 2;
+ audio_device_stats.total_samples_count = 3;
+ audio_device_stats.total_samples_duration_s = 4;
+ audio_device_stats.total_playout_delay_s = 5;
+ pc_->SetAudioDeviceStats(audio_device_stats);
+
+ rtc::scoped_refptr<const RTCStatsReport> report = stats_->GetStatsReport();
+ auto stats_of_track_type = report->GetStatsOfType<RTCAudioPlayoutStats>();
+ ASSERT_EQ(1U, stats_of_track_type.size());
+
+ RTCAudioPlayoutStats expected_stats("AP", report->timestamp());
+ expected_stats.kind = "audio";
+ expected_stats.synthesized_samples_duration = 1;
+ expected_stats.synthesized_samples_events = 2;
+ expected_stats.total_samples_count = 3;
+ expected_stats.total_samples_duration = 4;
+ expected_stats.total_playout_delay = 5;
+
+ ASSERT_TRUE(report->Get(expected_stats.id()));
+ EXPECT_EQ(report->Get(expected_stats.id())->cast_to<RTCAudioPlayoutStats>(),
+ expected_stats);
+}
+
+TEST_F(RTCStatsCollectorTest, CollectGoogTimingFrameInfo) {
+ cricket::VideoMediaInfo video_media_info;
+
+ video_media_info.receivers.push_back(cricket::VideoReceiverInfo());
+ video_media_info.receivers[0].local_stats.push_back(
+ cricket::SsrcReceiverInfo());
+ video_media_info.receivers[0].local_stats[0].ssrc = 1;
+ TimingFrameInfo timing_frame_info;
+ timing_frame_info.rtp_timestamp = 1;
+ timing_frame_info.capture_time_ms = 2;
+ timing_frame_info.encode_start_ms = 3;
+ timing_frame_info.encode_finish_ms = 4;
+ timing_frame_info.packetization_finish_ms = 5;
+ timing_frame_info.pacer_exit_ms = 6;
+ timing_frame_info.network_timestamp_ms = 7;
+ timing_frame_info.network2_timestamp_ms = 8;
+ timing_frame_info.receive_start_ms = 9;
+ timing_frame_info.receive_finish_ms = 10;
+ timing_frame_info.decode_start_ms = 11;
+ timing_frame_info.decode_finish_ms = 12;
+ timing_frame_info.render_time_ms = 13;
+ timing_frame_info.flags = 14;
+ video_media_info.receivers[0].timing_frame_info = timing_frame_info;
+
+ pc_->AddVideoChannel("Mid0", "Transport0", video_media_info);
+ stats_->SetupRemoteTrackAndReceiver(
+ cricket::MEDIA_TYPE_VIDEO, "RemoteVideoTrackID", "RemoteStreamId", 1);
+
+ rtc::scoped_refptr<const RTCStatsReport> report = stats_->GetStatsReport();
+ auto inbound_rtps = report->GetStatsOfType<RTCInboundRtpStreamStats>();
+ ASSERT_EQ(inbound_rtps.size(), 1u);
+ ASSERT_TRUE(inbound_rtps[0]->goog_timing_frame_info.is_defined());
+ EXPECT_EQ(*inbound_rtps[0]->goog_timing_frame_info,
+ "1,2,3,4,5,6,7,8,9,10,11,12,13,1,0");
+}
+
+TEST_F(RTCStatsCollectorTest, CollectRTCOutboundRtpStreamStats_Audio) {
+ cricket::VoiceMediaInfo voice_media_info;
+
+ voice_media_info.senders.push_back(cricket::VoiceSenderInfo());
+ voice_media_info.senders[0].local_stats.push_back(cricket::SsrcSenderInfo());
+ voice_media_info.senders[0].local_stats[0].ssrc = 1;
+ voice_media_info.senders[0].packets_sent = 2;
+ voice_media_info.senders[0].total_packet_send_delay = TimeDelta::Seconds(1);
+ voice_media_info.senders[0].retransmitted_packets_sent = 20;
+ voice_media_info.senders[0].payload_bytes_sent = 3;
+ voice_media_info.senders[0].header_and_padding_bytes_sent = 12;
+ voice_media_info.senders[0].retransmitted_bytes_sent = 30;
+ voice_media_info.senders[0].nacks_received = 31;
+ voice_media_info.senders[0].target_bitrate = 32000;
+ voice_media_info.senders[0].codec_payload_type = 42;
+ voice_media_info.senders[0].active = true;
+
+ RtpCodecParameters codec_parameters;
+ codec_parameters.payload_type = 42;
+ codec_parameters.kind = cricket::MEDIA_TYPE_AUDIO;
+ codec_parameters.name = "dummy";
+ codec_parameters.clock_rate = 0;
+ voice_media_info.send_codecs.insert(
+ std::make_pair(codec_parameters.payload_type, codec_parameters));
+
+ pc_->AddVoiceChannel("AudioMid", "TransportName", voice_media_info);
+ stats_->SetupLocalTrackAndSender(cricket::MEDIA_TYPE_AUDIO,
+ "LocalAudioTrackID", 1, true,
+ /*attachment_id=*/50);
+
+ rtc::scoped_refptr<const RTCStatsReport> report = stats_->GetStatsReport();
+
+ RTCOutboundRtpStreamStats expected_audio("OTTransportName1A1",
+ report->timestamp());
+ expected_audio.media_source_id = "SA50";
+ // `expected_audio.remote_id` should be undefined.
+ expected_audio.mid = "AudioMid";
+ expected_audio.ssrc = 1;
+ expected_audio.kind = "audio";
+ expected_audio.transport_id = "TTransportName1";
+ expected_audio.codec_id = "COTTransportName1_42";
+ expected_audio.packets_sent = 2;
+ expected_audio.total_packet_send_delay = 1;
+ expected_audio.retransmitted_packets_sent = 20;
+ expected_audio.bytes_sent = 3;
+ expected_audio.header_bytes_sent = 12;
+ expected_audio.retransmitted_bytes_sent = 30;
+ expected_audio.nack_count = 31;
+ expected_audio.target_bitrate = 32000;
+ expected_audio.active = true;
+
+ ASSERT_TRUE(report->Get(expected_audio.id()));
+ EXPECT_EQ(
+ report->Get(expected_audio.id())->cast_to<RTCOutboundRtpStreamStats>(),
+ expected_audio);
+
+ ASSERT_TRUE(report->Get(expected_audio.id()));
+ EXPECT_EQ(
+ report->Get(expected_audio.id())->cast_to<RTCOutboundRtpStreamStats>(),
+ expected_audio);
+ EXPECT_TRUE(report->Get(*expected_audio.transport_id));
+ EXPECT_TRUE(report->Get(*expected_audio.codec_id));
+}
+
+TEST_F(RTCStatsCollectorTest, CollectRTCOutboundRtpStreamStats_Video) {
+ cricket::VideoMediaInfo video_media_info;
+
+ video_media_info.senders.push_back(cricket::VideoSenderInfo());
+ video_media_info.senders[0].local_stats.push_back(cricket::SsrcSenderInfo());
+ video_media_info.senders[0].local_stats[0].ssrc = 1;
+ video_media_info.senders[0].firs_received = 2;
+ video_media_info.senders[0].plis_received = 3;
+ video_media_info.senders[0].nacks_received = 4;
+ video_media_info.senders[0].packets_sent = 5;
+ video_media_info.senders[0].retransmitted_packets_sent = 50;
+ video_media_info.senders[0].payload_bytes_sent = 6;
+ video_media_info.senders[0].header_and_padding_bytes_sent = 12;
+ video_media_info.senders[0].retransmitted_bytes_sent = 60;
+ video_media_info.senders[0].codec_payload_type = 42;
+ video_media_info.senders[0].frames_encoded = 8;
+ video_media_info.senders[0].key_frames_encoded = 3;
+ video_media_info.senders[0].total_encode_time_ms = 9000;
+ video_media_info.senders[0].total_encoded_bytes_target = 1234;
+ video_media_info.senders[0].total_packet_send_delay =
+ webrtc::TimeDelta::Seconds(10);
+ video_media_info.senders[0].quality_limitation_reason =
+ QualityLimitationReason::kBandwidth;
+ video_media_info.senders[0].quality_limitation_durations_ms
+ [webrtc::QualityLimitationReason::kBandwidth] = 300;
+ video_media_info.senders[0].quality_limitation_resolution_changes = 56u;
+ video_media_info.senders[0].qp_sum = absl::nullopt;
+ video_media_info.senders[0].content_type = VideoContentType::UNSPECIFIED;
+ video_media_info.senders[0].encoder_implementation_name = absl::nullopt;
+ video_media_info.senders[0].power_efficient_encoder = false;
+ video_media_info.senders[0].send_frame_width = 200;
+ video_media_info.senders[0].send_frame_height = 100;
+ video_media_info.senders[0].framerate_sent = 10;
+ video_media_info.senders[0].frames_sent = 5;
+ video_media_info.senders[0].huge_frames_sent = 2;
+ video_media_info.senders[0].active = false;
+ video_media_info.senders[0].scalability_mode = ScalabilityMode::kL3T3_KEY;
+ video_media_info.senders[0].ssrc_groups.push_back(
+ {cricket::kFidSsrcGroupSemantics, {1, 4404}});
+ video_media_info.aggregated_senders.push_back(video_media_info.senders[0]);
+ RtpCodecParameters codec_parameters;
+ codec_parameters.payload_type = 42;
+ codec_parameters.kind = cricket::MEDIA_TYPE_AUDIO;
+ codec_parameters.name = "dummy";
+ codec_parameters.clock_rate = 0;
+ video_media_info.send_codecs.insert(
+ std::make_pair(codec_parameters.payload_type, codec_parameters));
+
+ auto video_media_channels =
+ pc_->AddVideoChannel("VideoMid", "TransportName", video_media_info);
+ stats_->SetupLocalTrackAndSender(cricket::MEDIA_TYPE_VIDEO,
+ "LocalVideoTrackID", 1, true,
+ /*attachment_id=*/50);
+
+ rtc::scoped_refptr<const RTCStatsReport> report = stats_->GetStatsReport();
+
+ auto stats_of_my_type = report->GetStatsOfType<RTCOutboundRtpStreamStats>();
+ ASSERT_EQ(1U, stats_of_my_type.size());
+
+ RTCOutboundRtpStreamStats expected_video(stats_of_my_type[0]->id(),
+ report->timestamp());
+ expected_video.media_source_id = "SV50";
+ // `expected_video.remote_id` should be undefined.
+ expected_video.mid = "VideoMid";
+ expected_video.ssrc = 1;
+ expected_video.kind = "video";
+ expected_video.transport_id = "TTransportName1";
+ expected_video.codec_id = "COTTransportName1_42";
+ expected_video.fir_count = 2;
+ expected_video.pli_count = 3;
+ expected_video.nack_count = 4;
+ expected_video.packets_sent = 5;
+ expected_video.retransmitted_packets_sent = 50;
+ expected_video.bytes_sent = 6;
+ expected_video.header_bytes_sent = 12;
+ expected_video.retransmitted_bytes_sent = 60;
+ expected_video.frames_encoded = 8;
+ expected_video.key_frames_encoded = 3;
+ expected_video.total_encode_time = 9.0;
+ expected_video.total_encoded_bytes_target = 1234;
+ expected_video.total_packet_send_delay = 10.0;
+ expected_video.quality_limitation_reason = "bandwidth";
+ expected_video.quality_limitation_durations = std::map<std::string, double>{
+ std::pair<std::string, double>{"bandwidth", 0.3},
+ };
+ expected_video.quality_limitation_resolution_changes = 56u;
+ expected_video.frame_width = 200u;
+ expected_video.frame_height = 100u;
+ expected_video.frames_per_second = 10.0;
+ expected_video.frames_sent = 5;
+ expected_video.huge_frames_sent = 2;
+ expected_video.active = false;
+ expected_video.power_efficient_encoder = false;
+ expected_video.scalability_mode = "L3T3_KEY";
+ expected_video.rtx_ssrc = 4404;
+ // `expected_video.content_type` should be undefined.
+ // `expected_video.qp_sum` should be undefined.
+ // `expected_video.encoder_implementation` should be undefined.
+ ASSERT_TRUE(report->Get(expected_video.id()));
+
+ EXPECT_EQ(
+ report->Get(expected_video.id())->cast_to<RTCOutboundRtpStreamStats>(),
+ expected_video);
+
+ // Set previously undefined values and "GetStats" again.
+ video_media_info.senders[0].qp_sum = 9;
+ expected_video.qp_sum = 9;
+ video_media_info.senders[0].content_type = VideoContentType::SCREENSHARE;
+ expected_video.content_type = "screenshare";
+ video_media_info.senders[0].encoder_implementation_name = "libfooencoder";
+ video_media_info.aggregated_senders[0] = video_media_info.senders[0];
+ expected_video.encoder_implementation = "libfooencoder";
+ video_media_info.senders[0].power_efficient_encoder = true;
+ expected_video.power_efficient_encoder = true;
+ video_media_channels.first->SetStats(video_media_info);
+ video_media_channels.second->SetStats(video_media_info);
+
+ report = stats_->GetFreshStatsReport();
+
+ ASSERT_TRUE(report->Get(expected_video.id()));
+ EXPECT_EQ(
+ report->Get(expected_video.id())->cast_to<RTCOutboundRtpStreamStats>(),
+ expected_video);
+ EXPECT_TRUE(report->Get(*expected_video.transport_id));
+ EXPECT_TRUE(report->Get(*expected_video.codec_id));
+}
+
+TEST_F(RTCStatsCollectorTest, CollectRTCTransportStats) {
+ const char kTransportName[] = "transport";
+
+ pc_->AddVoiceChannel("audio", kTransportName);
+
+ std::unique_ptr<cricket::Candidate> rtp_local_candidate =
+ CreateFakeCandidate("42.42.42.42", 42, "protocol", rtc::ADAPTER_TYPE_WIFI,
+ cricket::LOCAL_PORT_TYPE, 42);
+ std::unique_ptr<cricket::Candidate> rtp_remote_candidate =
+ CreateFakeCandidate("42.42.42.42", 42, "protocol",
+ rtc::ADAPTER_TYPE_UNKNOWN, cricket::LOCAL_PORT_TYPE,
+ 42);
+ std::unique_ptr<cricket::Candidate> rtcp_local_candidate =
+ CreateFakeCandidate("42.42.42.42", 42, "protocol", rtc::ADAPTER_TYPE_WIFI,
+ cricket::LOCAL_PORT_TYPE, 42);
+ std::unique_ptr<cricket::Candidate> rtcp_remote_candidate =
+ CreateFakeCandidate("42.42.42.42", 42, "protocol",
+ rtc::ADAPTER_TYPE_UNKNOWN, cricket::LOCAL_PORT_TYPE,
+ 42);
+
+ cricket::ConnectionInfo rtp_connection_info;
+ rtp_connection_info.best_connection = false;
+ rtp_connection_info.local_candidate = *rtp_local_candidate.get();
+ rtp_connection_info.remote_candidate = *rtp_remote_candidate.get();
+ rtp_connection_info.sent_total_bytes = 42;
+ rtp_connection_info.recv_total_bytes = 1337;
+ rtp_connection_info.sent_total_packets = 3;
+ rtp_connection_info.sent_discarded_packets = 2;
+ rtp_connection_info.packets_received = 4;
+ cricket::TransportChannelStats rtp_transport_channel_stats;
+ rtp_transport_channel_stats.component = cricket::ICE_CANDIDATE_COMPONENT_RTP;
+ rtp_transport_channel_stats.ice_transport_stats.connection_infos.push_back(
+ rtp_connection_info);
+ rtp_transport_channel_stats.dtls_state = DtlsTransportState::kNew;
+ rtp_transport_channel_stats.ice_transport_stats.bytes_sent = 42;
+ rtp_transport_channel_stats.ice_transport_stats.packets_sent = 1;
+ rtp_transport_channel_stats.ice_transport_stats.bytes_received = 1337;
+ rtp_transport_channel_stats.ice_transport_stats.packets_received = 4;
+ rtp_transport_channel_stats.ice_transport_stats
+ .selected_candidate_pair_changes = 1;
+ rtp_transport_channel_stats.ice_transport_stats.ice_local_username_fragment =
+ "thelocalufrag";
+ pc_->SetTransportStats(kTransportName, {rtp_transport_channel_stats});
+
+ // Get stats without RTCP, an active connection or certificates.
+ rtc::scoped_refptr<const RTCStatsReport> report = stats_->GetStatsReport();
+
+ RTCTransportStats expected_rtp_transport(
+ "Ttransport" + rtc::ToString(cricket::ICE_CANDIDATE_COMPONENT_RTP),
+ report->timestamp());
+ expected_rtp_transport.bytes_sent = 42;
+ expected_rtp_transport.packets_sent = 1;
+ expected_rtp_transport.bytes_received = 1337;
+ expected_rtp_transport.packets_received = 4;
+ expected_rtp_transport.dtls_state = "new";
+ expected_rtp_transport.dtls_role = "unknown";
+ expected_rtp_transport.selected_candidate_pair_changes = 1;
+ expected_rtp_transport.ice_role = "unknown";
+ expected_rtp_transport.ice_local_username_fragment = "thelocalufrag";
+ expected_rtp_transport.ice_state = "new";
+
+ ASSERT_TRUE(report->Get(expected_rtp_transport.id()));
+ EXPECT_EQ(
+ expected_rtp_transport,
+ report->Get(expected_rtp_transport.id())->cast_to<RTCTransportStats>());
+
+ cricket::ConnectionInfo rtcp_connection_info;
+ rtcp_connection_info.best_connection = false;
+ rtcp_connection_info.local_candidate = *rtcp_local_candidate.get();
+ rtcp_connection_info.remote_candidate = *rtcp_remote_candidate.get();
+ rtcp_connection_info.sent_total_bytes = 1337;
+ rtcp_connection_info.recv_total_bytes = 42;
+ rtcp_connection_info.sent_total_packets = 3;
+ rtcp_connection_info.sent_discarded_packets = 2;
+ rtcp_connection_info.packets_received = 4;
+ cricket::TransportChannelStats rtcp_transport_channel_stats;
+ rtcp_transport_channel_stats.component =
+ cricket::ICE_CANDIDATE_COMPONENT_RTCP;
+ rtcp_transport_channel_stats.ice_transport_stats.connection_infos.push_back(
+ rtcp_connection_info);
+ rtcp_transport_channel_stats.dtls_state = DtlsTransportState::kConnecting;
+ rtcp_transport_channel_stats.ice_transport_stats.bytes_sent = 1337;
+ rtcp_transport_channel_stats.ice_transport_stats.packets_sent = 1;
+ rtcp_transport_channel_stats.ice_transport_stats.bytes_received = 42;
+ rtcp_transport_channel_stats.ice_transport_stats.packets_received = 4;
+ rtcp_transport_channel_stats.ice_transport_stats.ice_local_username_fragment =
+ "thelocalufrag";
+ rtcp_transport_channel_stats.ice_transport_stats.ice_state =
+ IceTransportState::kChecking;
+ pc_->SetTransportStats(kTransportName, {rtp_transport_channel_stats,
+ rtcp_transport_channel_stats});
+
+ // Get stats with RTCP and without an active connection or certificates.
+ report = stats_->GetFreshStatsReport();
+
+ RTCTransportStats expected_rtcp_transport(
+ "Ttransport" + rtc::ToString(cricket::ICE_CANDIDATE_COMPONENT_RTCP),
+ report->timestamp());
+ expected_rtcp_transport.bytes_sent = 1337;
+ expected_rtcp_transport.packets_sent = 1;
+ expected_rtcp_transport.bytes_received = 42;
+ expected_rtcp_transport.packets_received = 4;
+ expected_rtcp_transport.dtls_state = "connecting";
+ expected_rtcp_transport.dtls_role = "unknown";
+ expected_rtcp_transport.selected_candidate_pair_changes = 0;
+ expected_rtcp_transport.ice_role = "unknown";
+ expected_rtcp_transport.ice_local_username_fragment = "thelocalufrag";
+ expected_rtcp_transport.ice_state = "checking";
+
+ expected_rtp_transport.rtcp_transport_stats_id = expected_rtcp_transport.id();
+ ASSERT_TRUE(report->Get(expected_rtp_transport.id()));
+ EXPECT_EQ(
+ expected_rtp_transport,
+ report->Get(expected_rtp_transport.id())->cast_to<RTCTransportStats>());
+ ASSERT_TRUE(report->Get(expected_rtcp_transport.id()));
+ EXPECT_EQ(
+ expected_rtcp_transport,
+ report->Get(expected_rtcp_transport.id())->cast_to<RTCTransportStats>());
+
+ // Get stats with an active connection (selected candidate pair).
+ rtcp_transport_channel_stats.ice_transport_stats.connection_infos[0]
+ .best_connection = true;
+ pc_->SetTransportStats(kTransportName, {rtp_transport_channel_stats,
+ rtcp_transport_channel_stats});
+
+ report = stats_->GetFreshStatsReport();
+
+ expected_rtcp_transport.selected_candidate_pair_id =
+ "CP" + rtcp_local_candidate->id() + "_" + rtcp_remote_candidate->id();
+
+ ASSERT_TRUE(report->Get(expected_rtp_transport.id()));
+ EXPECT_EQ(
+ expected_rtp_transport,
+ report->Get(expected_rtp_transport.id())->cast_to<RTCTransportStats>());
+ ASSERT_TRUE(report->Get(expected_rtcp_transport.id()));
+ EXPECT_EQ(
+ expected_rtcp_transport,
+ report->Get(expected_rtcp_transport.id())->cast_to<RTCTransportStats>());
+
+ // Get stats with certificates.
+ std::unique_ptr<CertificateInfo> local_certinfo =
+ CreateFakeCertificateAndInfoFromDers({"(local) local", "(local) chain"});
+ pc_->SetLocalCertificate(kTransportName, local_certinfo->certificate);
+ std::unique_ptr<CertificateInfo> remote_certinfo =
+ CreateFakeCertificateAndInfoFromDers(
+ {"(remote) local", "(remote) chain"});
+ pc_->SetRemoteCertChain(
+ kTransportName,
+ remote_certinfo->certificate->GetSSLCertificateChain().Clone());
+
+ report = stats_->GetFreshStatsReport();
+
+ expected_rtp_transport.local_certificate_id =
+ "CF" + local_certinfo->fingerprints[0];
+ expected_rtp_transport.remote_certificate_id =
+ "CF" + remote_certinfo->fingerprints[0];
+
+ expected_rtcp_transport.local_certificate_id =
+ *expected_rtp_transport.local_certificate_id;
+ expected_rtcp_transport.remote_certificate_id =
+ *expected_rtp_transport.remote_certificate_id;
+
+ ASSERT_TRUE(report->Get(expected_rtp_transport.id()));
+ EXPECT_EQ(
+ expected_rtp_transport,
+ report->Get(expected_rtp_transport.id())->cast_to<RTCTransportStats>());
+ ASSERT_TRUE(report->Get(expected_rtcp_transport.id()));
+ EXPECT_EQ(
+ expected_rtcp_transport,
+ report->Get(expected_rtcp_transport.id())->cast_to<RTCTransportStats>());
+}
+
+TEST_F(RTCStatsCollectorTest, CollectRTCTransportStatsWithCrypto) {
+ const char kTransportName[] = "transport";
+
+ pc_->AddVoiceChannel("audio", kTransportName);
+
+ std::unique_ptr<cricket::Candidate> rtp_local_candidate =
+ CreateFakeCandidate("42.42.42.42", 42, "protocol", rtc::ADAPTER_TYPE_WIFI,
+ cricket::LOCAL_PORT_TYPE, 42);
+ std::unique_ptr<cricket::Candidate> rtp_remote_candidate =
+ CreateFakeCandidate("42.42.42.42", 42, "protocol",
+ rtc::ADAPTER_TYPE_UNKNOWN, cricket::LOCAL_PORT_TYPE,
+ 42);
+ std::unique_ptr<cricket::Candidate> rtcp_local_candidate =
+ CreateFakeCandidate("42.42.42.42", 42, "protocol", rtc::ADAPTER_TYPE_WIFI,
+ cricket::LOCAL_PORT_TYPE, 42);
+ std::unique_ptr<cricket::Candidate> rtcp_remote_candidate =
+ CreateFakeCandidate("42.42.42.42", 42, "protocol",
+ rtc::ADAPTER_TYPE_UNKNOWN, cricket::LOCAL_PORT_TYPE,
+ 42);
+
+ cricket::ConnectionInfo rtp_connection_info;
+ rtp_connection_info.best_connection = false;
+ rtp_connection_info.local_candidate = *rtp_local_candidate.get();
+ rtp_connection_info.remote_candidate = *rtp_remote_candidate.get();
+ cricket::TransportChannelStats rtp_transport_channel_stats;
+ rtp_transport_channel_stats.component = cricket::ICE_CANDIDATE_COMPONENT_RTP;
+ rtp_transport_channel_stats.ice_transport_stats.connection_infos.push_back(
+ rtp_connection_info);
+ // The state must be connected in order for crypto parameters to show up.
+ rtp_transport_channel_stats.dtls_state = DtlsTransportState::kConnected;
+ rtp_transport_channel_stats.ice_transport_stats
+ .selected_candidate_pair_changes = 1;
+ rtp_transport_channel_stats.ssl_version_bytes = 0x0203;
+ rtp_transport_channel_stats.dtls_role = rtc::SSL_CLIENT;
+ rtp_transport_channel_stats.ice_transport_stats.ice_role =
+ cricket::ICEROLE_CONTROLLING;
+ rtp_transport_channel_stats.ice_transport_stats.ice_local_username_fragment =
+ "thelocalufrag";
+ rtp_transport_channel_stats.ice_transport_stats.ice_state =
+ IceTransportState::kConnected;
+ // 0x2F is TLS_RSA_WITH_AES_128_CBC_SHA according to IANA
+ rtp_transport_channel_stats.ssl_cipher_suite = 0x2F;
+ rtp_transport_channel_stats.srtp_crypto_suite = rtc::kSrtpAes128CmSha1_80;
+ pc_->SetTransportStats(kTransportName, {rtp_transport_channel_stats});
+
+ // Get stats
+ rtc::scoped_refptr<const RTCStatsReport> report = stats_->GetStatsReport();
+
+ RTCTransportStats expected_rtp_transport(
+ "Ttransport" + rtc::ToString(cricket::ICE_CANDIDATE_COMPONENT_RTP),
+ report->timestamp());
+ expected_rtp_transport.dtls_state = "connected";
+ expected_rtp_transport.selected_candidate_pair_changes = 1;
+ expected_rtp_transport.ice_role = "unknown";
+ expected_rtp_transport.bytes_sent = 0;
+ expected_rtp_transport.bytes_received = 0;
+ expected_rtp_transport.packets_sent = 0;
+ expected_rtp_transport.packets_received = 0;
+ expected_rtp_transport.ice_role = "controlling";
+ expected_rtp_transport.ice_local_username_fragment = "thelocalufrag";
+ expected_rtp_transport.ice_state = "connected";
+ // Crypto parameters
+ expected_rtp_transport.tls_version = "0203";
+ expected_rtp_transport.dtls_role = "client";
+ expected_rtp_transport.dtls_cipher = "TLS_RSA_WITH_AES_128_CBC_SHA";
+ expected_rtp_transport.srtp_cipher = "AES_CM_128_HMAC_SHA1_80";
+
+ ASSERT_TRUE(report->Get(expected_rtp_transport.id()));
+ EXPECT_EQ(
+ expected_rtp_transport,
+ report->Get(expected_rtp_transport.id())->cast_to<RTCTransportStats>());
+}
+
+TEST_F(RTCStatsCollectorTest, CollectNoStreamRTCOutboundRtpStreamStats_Audio) {
+ cricket::VoiceMediaInfo voice_media_info;
+
+ voice_media_info.senders.push_back(cricket::VoiceSenderInfo());
+ voice_media_info.senders[0].local_stats.push_back(cricket::SsrcSenderInfo());
+ voice_media_info.senders[0].local_stats[0].ssrc = 1;
+ voice_media_info.senders[0].packets_sent = 2;
+ voice_media_info.senders[0].total_packet_send_delay = TimeDelta::Seconds(0.5);
+ voice_media_info.senders[0].retransmitted_packets_sent = 20;
+ voice_media_info.senders[0].payload_bytes_sent = 3;
+ voice_media_info.senders[0].header_and_padding_bytes_sent = 4;
+ voice_media_info.senders[0].retransmitted_bytes_sent = 30;
+ voice_media_info.senders[0].nacks_received = 31;
+ voice_media_info.senders[0].codec_payload_type = 42;
+ voice_media_info.senders[0].active = true;
+
+ RtpCodecParameters codec_parameters;
+ codec_parameters.payload_type = 42;
+ codec_parameters.kind = cricket::MEDIA_TYPE_AUDIO;
+ codec_parameters.name = "dummy";
+ codec_parameters.clock_rate = 0;
+ voice_media_info.send_codecs.insert(
+ std::make_pair(codec_parameters.payload_type, codec_parameters));
+
+ // Emulates the case where AddTrack is used without an associated MediaStream
+ pc_->AddVoiceChannel("AudioMid", "TransportName", voice_media_info);
+ stats_->SetupLocalTrackAndSender(cricket::MEDIA_TYPE_AUDIO,
+ "LocalAudioTrackID", 1, false,
+ /*attachment_id=*/50);
+
+ rtc::scoped_refptr<const RTCStatsReport> report = stats_->GetStatsReport();
+
+ RTCOutboundRtpStreamStats expected_audio("OTTransportName1A1",
+ report->timestamp());
+ expected_audio.media_source_id = "SA50";
+ expected_audio.mid = "AudioMid";
+ expected_audio.ssrc = 1;
+ expected_audio.kind = "audio";
+ expected_audio.transport_id = "TTransportName1";
+ expected_audio.codec_id = "COTTransportName1_42";
+ expected_audio.packets_sent = 2;
+ expected_audio.total_packet_send_delay = 0.5;
+ expected_audio.retransmitted_packets_sent = 20;
+ expected_audio.bytes_sent = 3;
+ expected_audio.header_bytes_sent = 4;
+ expected_audio.retransmitted_bytes_sent = 30;
+ expected_audio.nack_count = 31;
+ expected_audio.active = true;
+
+ ASSERT_TRUE(report->Get(expected_audio.id()));
+ EXPECT_EQ(
+ report->Get(expected_audio.id())->cast_to<RTCOutboundRtpStreamStats>(),
+ expected_audio);
+ EXPECT_TRUE(report->Get(*expected_audio.transport_id));
+ EXPECT_TRUE(report->Get(*expected_audio.codec_id));
+}
+
+TEST_F(RTCStatsCollectorTest, RTCAudioSourceStatsCollectedForSenderWithTrack) {
+ const uint32_t kSsrc = 4;
+ const int kAttachmentId = 42;
+
+ cricket::VoiceMediaInfo voice_media_info;
+ voice_media_info.senders.push_back(cricket::VoiceSenderInfo());
+ voice_media_info.senders[0].local_stats.push_back(cricket::SsrcSenderInfo());
+ voice_media_info.senders[0].local_stats[0].ssrc = kSsrc;
+ voice_media_info.senders[0].audio_level = 32767; // [0,32767]
+ voice_media_info.senders[0].total_input_energy = 2.0;
+ voice_media_info.senders[0].total_input_duration = 3.0;
+ voice_media_info.senders[0].apm_statistics.echo_return_loss = 42.0;
+ voice_media_info.senders[0].apm_statistics.echo_return_loss_enhancement =
+ 52.0;
+ pc_->AddVoiceChannel("AudioMid", "TransportName", voice_media_info);
+ stats_->SetupLocalTrackAndSender(cricket::MEDIA_TYPE_AUDIO,
+ "LocalAudioTrackID", kSsrc, false,
+ kAttachmentId);
+
+ rtc::scoped_refptr<const RTCStatsReport> report = stats_->GetStatsReport();
+
+ RTCAudioSourceStats expected_audio("SA42", report->timestamp());
+ expected_audio.track_identifier = "LocalAudioTrackID";
+ expected_audio.kind = "audio";
+ expected_audio.audio_level = 1.0; // [0,1]
+ expected_audio.total_audio_energy = 2.0;
+ expected_audio.total_samples_duration = 3.0;
+ expected_audio.echo_return_loss = 42.0;
+ expected_audio.echo_return_loss_enhancement = 52.0;
+
+ ASSERT_TRUE(report->Get(expected_audio.id()));
+ EXPECT_EQ(report->Get(expected_audio.id())->cast_to<RTCAudioSourceStats>(),
+ expected_audio);
+}
+
+TEST_F(RTCStatsCollectorTest, RTCVideoSourceStatsCollectedForSenderWithTrack) {
+ const uint32_t kSsrc = 4;
+ const int kAttachmentId = 42;
+ const int kVideoSourceWidth = 12;
+ const int kVideoSourceHeight = 34;
+
+ cricket::VideoMediaInfo video_media_info;
+ video_media_info.aggregated_senders.push_back(cricket::VideoSenderInfo());
+ video_media_info.senders.push_back(cricket::VideoSenderInfo());
+ video_media_info.senders[0].local_stats.push_back(cricket::SsrcSenderInfo());
+ video_media_info.senders[0].local_stats[0].ssrc = kSsrc;
+ video_media_info.senders[0].framerate_input = 29.0;
+ video_media_info.aggregated_senders[0].local_stats.push_back(
+ cricket::SsrcSenderInfo());
+ video_media_info.aggregated_senders[0].local_stats[0].ssrc = kSsrc;
+ video_media_info.aggregated_senders[0].framerate_input = 29.0;
+ video_media_info.aggregated_senders[0].frames = 10001;
+ pc_->AddVideoChannel("VideoMid", "TransportName", video_media_info);
+
+ auto video_source = FakeVideoTrackSourceForStats::Create(kVideoSourceWidth,
+ kVideoSourceHeight);
+ auto video_track = FakeVideoTrackForStats::Create(
+ "LocalVideoTrackID", MediaStreamTrackInterface::kLive, video_source);
+ rtc::scoped_refptr<MockRtpSenderInternal> sender = CreateMockSender(
+ cricket::MEDIA_TYPE_VIDEO, video_track, kSsrc, kAttachmentId, {});
+ EXPECT_CALL(*sender, Stop());
+ EXPECT_CALL(*sender, SetMediaChannel(_));
+ pc_->AddSender(sender);
+
+ rtc::scoped_refptr<const RTCStatsReport> report = stats_->GetStatsReport();
+
+ RTCVideoSourceStats expected_video("SV42", report->timestamp());
+ expected_video.track_identifier = "LocalVideoTrackID";
+ expected_video.kind = "video";
+ expected_video.width = kVideoSourceWidth;
+ expected_video.height = kVideoSourceHeight;
+ expected_video.frames_per_second = 29.0;
+ expected_video.frames = 10001;
+
+ ASSERT_TRUE(report->Get(expected_video.id()));
+ EXPECT_EQ(report->Get(expected_video.id())->cast_to<RTCVideoSourceStats>(),
+ expected_video);
+}
+
+// This test exercises the current behavior and code path, but the correct
+// behavior is to report frame rate even if we have no SSRC.
+// TODO(hbos): When we know the frame rate even if we have no SSRC, update the
+// expectations of this test.
+TEST_F(RTCStatsCollectorTest,
+ RTCVideoSourceStatsMissingFrameRateWhenSenderHasNoSsrc) {
+ // TODO(https://crbug.com/webrtc/8694): When 0 is no longer a magic value for
+ // "none", update this test.
+ const uint32_t kNoSsrc = 0;
+ const int kAttachmentId = 42;
+ const int kVideoSourceWidth = 12;
+ const int kVideoSourceHeight = 34;
+
+ cricket::VideoMediaInfo video_media_info;
+ video_media_info.senders.push_back(cricket::VideoSenderInfo());
+ video_media_info.senders[0].local_stats.push_back(cricket::SsrcSenderInfo());
+ video_media_info.senders[0].framerate_input = 29.0;
+ pc_->AddVideoChannel("VideoMid", "TransportName", video_media_info);
+
+ auto video_source = FakeVideoTrackSourceForStats::Create(kVideoSourceWidth,
+ kVideoSourceHeight);
+ auto video_track = FakeVideoTrackForStats::Create(
+ "LocalVideoTrackID", MediaStreamTrackInterface::kLive, video_source);
+ rtc::scoped_refptr<MockRtpSenderInternal> sender = CreateMockSender(
+ cricket::MEDIA_TYPE_VIDEO, video_track, kNoSsrc, kAttachmentId, {});
+ EXPECT_CALL(*sender, Stop());
+ EXPECT_CALL(*sender, SetMediaChannel(_));
+ pc_->AddSender(sender);
+
+ rtc::scoped_refptr<const RTCStatsReport> report = stats_->GetStatsReport();
+ ASSERT_TRUE(report->Get("SV42"));
+ auto video_stats = report->Get("SV42")->cast_to<RTCVideoSourceStats>();
+ EXPECT_FALSE(video_stats.frames_per_second.is_defined());
+ EXPECT_FALSE(video_stats.frames.is_defined());
+}
+
+// The track not having a source is not expected to be true in practise, but
+// this is true in some tests relying on fakes. This test covers that code path.
+TEST_F(RTCStatsCollectorTest,
+ RTCVideoSourceStatsMissingResolutionWhenTrackHasNoSource) {
+ const uint32_t kSsrc = 4;
+ const int kAttachmentId = 42;
+
+ cricket::VideoMediaInfo video_media_info;
+ video_media_info.senders.push_back(cricket::VideoSenderInfo());
+ video_media_info.senders[0].local_stats.push_back(cricket::SsrcSenderInfo());
+ video_media_info.senders[0].local_stats[0].ssrc = kSsrc;
+ video_media_info.senders[0].framerate_input = 29.0;
+ pc_->AddVideoChannel("VideoMid", "TransportName", video_media_info);
+
+ auto video_track = FakeVideoTrackForStats::Create(
+ "LocalVideoTrackID", MediaStreamTrackInterface::kLive,
+ /*source=*/nullptr);
+ rtc::scoped_refptr<MockRtpSenderInternal> sender = CreateMockSender(
+ cricket::MEDIA_TYPE_VIDEO, video_track, kSsrc, kAttachmentId, {});
+ EXPECT_CALL(*sender, Stop());
+ EXPECT_CALL(*sender, SetMediaChannel(_));
+ pc_->AddSender(sender);
+
+ rtc::scoped_refptr<const RTCStatsReport> report = stats_->GetStatsReport();
+ ASSERT_TRUE(report->Get("SV42"));
+ auto video_stats = report->Get("SV42")->cast_to<RTCVideoSourceStats>();
+ EXPECT_FALSE(video_stats.width.is_defined());
+ EXPECT_FALSE(video_stats.height.is_defined());
+}
+
+TEST_F(RTCStatsCollectorTest,
+ RTCAudioSourceStatsNotCollectedForSenderWithoutTrack) {
+ const uint32_t kSsrc = 4;
+ const int kAttachmentId = 42;
+
+ cricket::VoiceMediaInfo voice_media_info;
+ voice_media_info.senders.push_back(cricket::VoiceSenderInfo());
+ voice_media_info.senders[0].local_stats.push_back(cricket::SsrcSenderInfo());
+ voice_media_info.senders[0].local_stats[0].ssrc = kSsrc;
+ pc_->AddVoiceChannel("AudioMid", "TransportName", voice_media_info);
+ rtc::scoped_refptr<MockRtpSenderInternal> sender = CreateMockSender(
+ cricket::MEDIA_TYPE_AUDIO, /*track=*/nullptr, kSsrc, kAttachmentId, {});
+ EXPECT_CALL(*sender, Stop());
+ EXPECT_CALL(*sender, SetMediaChannel(_));
+ pc_->AddSender(sender);
+
+ rtc::scoped_refptr<const RTCStatsReport> report = stats_->GetStatsReport();
+ EXPECT_FALSE(report->Get("SA42"));
+}
+
+// Parameterized tests on cricket::MediaType (audio or video).
+class RTCStatsCollectorTestWithParamKind
+ : public RTCStatsCollectorTest,
+ public ::testing::WithParamInterface<cricket::MediaType> {
+ public:
+ RTCStatsCollectorTestWithParamKind() : media_type_(GetParam()) {
+ RTC_DCHECK(media_type_ == cricket::MEDIA_TYPE_AUDIO ||
+ media_type_ == cricket::MEDIA_TYPE_VIDEO);
+ }
+
+ std::string MediaTypeCharStr() const {
+ switch (media_type_) {
+ case cricket::MEDIA_TYPE_AUDIO:
+ return "A";
+ case cricket::MEDIA_TYPE_VIDEO:
+ return "V";
+ case cricket::MEDIA_TYPE_DATA:
+ case cricket::MEDIA_TYPE_UNSUPPORTED:
+ RTC_DCHECK_NOTREACHED();
+ return "?";
+ }
+ }
+
+ std::string MediaTypeKind() const {
+ switch (media_type_) {
+ case cricket::MEDIA_TYPE_AUDIO:
+ return "audio";
+ case cricket::MEDIA_TYPE_VIDEO:
+ return "video";
+ case cricket::MEDIA_TYPE_DATA:
+ case cricket::MEDIA_TYPE_UNSUPPORTED:
+ RTC_DCHECK_NOTREACHED();
+ return "";
+ }
+ }
+
+ // Adds a sender and channel of the appropriate kind, creating a sender info
+ // with the report block's `source_ssrc` and report block data.
+ void AddSenderInfoAndMediaChannel(
+ std::string transport_name,
+ const std::vector<ReportBlockData>& report_block_datas,
+ absl::optional<RtpCodecParameters> codec) {
+ switch (media_type_) {
+ case cricket::MEDIA_TYPE_AUDIO: {
+ cricket::VoiceMediaInfo voice_media_info;
+ for (const auto& report_block_data : report_block_datas) {
+ cricket::VoiceSenderInfo sender;
+ sender.local_stats.push_back(cricket::SsrcSenderInfo());
+ sender.local_stats[0].ssrc = report_block_data.source_ssrc();
+ if (codec.has_value()) {
+ sender.codec_payload_type = codec->payload_type;
+ voice_media_info.send_codecs.insert(
+ std::make_pair(codec->payload_type, *codec));
+ }
+ sender.report_block_datas.push_back(report_block_data);
+ voice_media_info.senders.push_back(sender);
+ }
+ pc_->AddVoiceChannel("mid", transport_name, voice_media_info);
+ return;
+ }
+ case cricket::MEDIA_TYPE_VIDEO: {
+ cricket::VideoMediaInfo video_media_info;
+ for (const auto& report_block_data : report_block_datas) {
+ cricket::VideoSenderInfo sender;
+ sender.local_stats.push_back(cricket::SsrcSenderInfo());
+ sender.local_stats[0].ssrc = report_block_data.source_ssrc();
+ if (codec.has_value()) {
+ sender.codec_payload_type = codec->payload_type;
+ video_media_info.send_codecs.insert(
+ std::make_pair(codec->payload_type, *codec));
+ }
+ sender.report_block_datas.push_back(report_block_data);
+ video_media_info.aggregated_senders.push_back(sender);
+ video_media_info.senders.push_back(sender);
+ }
+ pc_->AddVideoChannel("mid", transport_name, video_media_info);
+ return;
+ }
+ case cricket::MEDIA_TYPE_DATA:
+ case cricket::MEDIA_TYPE_UNSUPPORTED:
+ RTC_DCHECK_NOTREACHED();
+ }
+ }
+
+ protected:
+ cricket::MediaType media_type_;
+};
+
+// Verifies RTCRemoteInboundRtpStreamStats members that don't require
+// RTCCodecStats (codecId, jitter) and without setting up an RTCP transport.
+TEST_P(RTCStatsCollectorTestWithParamKind,
+ RTCRemoteInboundRtpStreamStatsCollectedFromReportBlock) {
+ const Timestamp kReportBlockTimestampUtc = Timestamp::Micros(123456789);
+ const uint8_t kFractionLost = 12;
+ const TimeDelta kRoundTripTimeSample1 = TimeDelta::Millis(1'234);
+ const TimeDelta kRoundTripTimeSample2 = TimeDelta::Seconds(13);
+
+ // The report block's timestamp cannot be from the future, set the fake clock
+ // to match.
+ fake_clock_.SetTime(kReportBlockTimestampUtc);
+ auto ssrcs = {12, 13};
+ std::vector<ReportBlockData> report_block_datas;
+ for (auto ssrc : ssrcs) {
+ rtcp::ReportBlock report_block;
+ // The remote-inbound-rtp SSRC and the outbound-rtp SSRC is the same as the
+ // `source_ssrc`, "SSRC of the RTP packet sender".
+ report_block.SetMediaSsrc(ssrc);
+ report_block.SetCumulativeLost(7);
+ report_block.SetFractionLost(kFractionLost);
+ ReportBlockData report_block_data;
+ report_block_data.SetReportBlock(0, report_block, kReportBlockTimestampUtc);
+ report_block_data.AddRoundTripTimeSample(kRoundTripTimeSample1);
+ // Only the last sample should be exposed as the
+ // `RTCRemoteInboundRtpStreamStats::round_trip_time`.
+ report_block_data.AddRoundTripTimeSample(kRoundTripTimeSample2);
+ report_block_datas.push_back(report_block_data);
+ }
+ AddSenderInfoAndMediaChannel("TransportName", report_block_datas,
+ absl::nullopt);
+
+ rtc::scoped_refptr<const RTCStatsReport> report = stats_->GetStatsReport();
+ for (auto ssrc : ssrcs) {
+ std::string stream_id = "" + std::to_string(ssrc);
+ RTCRemoteInboundRtpStreamStats expected_remote_inbound_rtp(
+ "RI" + MediaTypeCharStr() + stream_id, kReportBlockTimestampUtc);
+ expected_remote_inbound_rtp.ssrc = ssrc;
+ expected_remote_inbound_rtp.fraction_lost =
+ static_cast<double>(kFractionLost) / (1 << 8);
+ expected_remote_inbound_rtp.kind = MediaTypeKind();
+ expected_remote_inbound_rtp.transport_id =
+ "TTransportName1"; // 1 for RTP (we have no RTCP
+ // transport)
+ expected_remote_inbound_rtp.packets_lost = 7;
+ expected_remote_inbound_rtp.local_id =
+ "OTTransportName1" + MediaTypeCharStr() + stream_id;
+ expected_remote_inbound_rtp.round_trip_time =
+ kRoundTripTimeSample2.seconds<double>();
+ expected_remote_inbound_rtp.total_round_trip_time =
+ (kRoundTripTimeSample1 + kRoundTripTimeSample2).seconds<double>();
+ expected_remote_inbound_rtp.round_trip_time_measurements = 2;
+ // This test does not set up RTCCodecStats, so `codec_id` and `jitter` are
+ // expected to be missing. These are tested separately.
+
+ ASSERT_TRUE(report->Get(expected_remote_inbound_rtp.id()));
+ EXPECT_EQ(report->Get(expected_remote_inbound_rtp.id())
+ ->cast_to<RTCRemoteInboundRtpStreamStats>(),
+ expected_remote_inbound_rtp);
+ EXPECT_TRUE(report->Get(*expected_remote_inbound_rtp.transport_id));
+ ASSERT_TRUE(report->Get(*expected_remote_inbound_rtp.local_id));
+ // Lookup works in both directions.
+ EXPECT_EQ(*report->Get(*expected_remote_inbound_rtp.local_id)
+ ->cast_to<RTCOutboundRtpStreamStats>()
+ .remote_id,
+ expected_remote_inbound_rtp.id());
+ }
+}
+
+TEST_P(RTCStatsCollectorTestWithParamKind,
+ RTCRemoteInboundRtpStreamStatsRttMissingBeforeMeasurement) {
+ constexpr Timestamp kReportBlockTimestampUtc = Timestamp::Micros(123456789);
+
+ rtcp::ReportBlock report_block;
+ // The remote-inbound-rtp SSRC and the outbound-rtp SSRC is the same as the
+ // `source_ssrc`, "SSRC of the RTP packet sender".
+ report_block.SetMediaSsrc(12);
+ ReportBlockData report_block_data; // AddRoundTripTimeSample() not called.
+ report_block_data.SetReportBlock(0, report_block, kReportBlockTimestampUtc);
+
+ AddSenderInfoAndMediaChannel("TransportName", {report_block_data},
+ absl::nullopt);
+
+ rtc::scoped_refptr<const RTCStatsReport> report = stats_->GetStatsReport();
+
+ std::string remote_inbound_rtp_id = "RI" + MediaTypeCharStr() + "12";
+ ASSERT_TRUE(report->Get(remote_inbound_rtp_id));
+ auto& remote_inbound_rtp = report->Get(remote_inbound_rtp_id)
+ ->cast_to<RTCRemoteInboundRtpStreamStats>();
+
+ EXPECT_TRUE(remote_inbound_rtp.round_trip_time_measurements.is_defined());
+ EXPECT_EQ(0, *remote_inbound_rtp.round_trip_time_measurements);
+ EXPECT_FALSE(remote_inbound_rtp.round_trip_time.is_defined());
+}
+
+TEST_P(RTCStatsCollectorTestWithParamKind,
+ RTCRemoteInboundRtpStreamStatsWithTimestampFromReportBlock) {
+ const Timestamp kReportBlockTimestampUtc = Timestamp::Micros(123456789);
+ fake_clock_.SetTime(kReportBlockTimestampUtc);
+
+ rtcp::ReportBlock report_block;
+ // The remote-inbound-rtp SSRC and the outbound-rtp SSRC is the same as the
+ // `source_ssrc`, "SSRC of the RTP packet sender".
+ report_block.SetMediaSsrc(12);
+ ReportBlockData report_block_data;
+ report_block_data.SetReportBlock(0, report_block, kReportBlockTimestampUtc);
+
+ AddSenderInfoAndMediaChannel("TransportName", {report_block_data},
+ absl::nullopt);
+
+ // Advance time, it should be OK to have fresher reports than report blocks.
+ fake_clock_.AdvanceTime(TimeDelta::Micros(1234));
+
+ rtc::scoped_refptr<const RTCStatsReport> report = stats_->GetStatsReport();
+
+ std::string remote_inbound_rtp_id = "RI" + MediaTypeCharStr() + "12";
+ ASSERT_TRUE(report->Get(remote_inbound_rtp_id));
+ auto& remote_inbound_rtp = report->Get(remote_inbound_rtp_id)
+ ->cast_to<RTCRemoteInboundRtpStreamStats>();
+
+ // Even though the report time is different, the remote-inbound-rtp timestamp
+ // is of the time that the report block was received.
+ EXPECT_EQ(report->timestamp(),
+ kReportBlockTimestampUtc + TimeDelta::Micros(1234));
+ EXPECT_EQ(remote_inbound_rtp.timestamp(), kReportBlockTimestampUtc);
+}
+
+TEST_P(RTCStatsCollectorTestWithParamKind,
+ RTCRemoteInboundRtpStreamStatsWithCodecBasedMembers) {
+ const Timestamp kReportBlockTimestampUtc = Timestamp::Micros(123456789);
+ fake_clock_.SetTime(kReportBlockTimestampUtc);
+
+ rtcp::ReportBlock report_block;
+ // The remote-inbound-rtp SSRC and the outbound-rtp SSRC is the same as the
+ // `source_ssrc`, "SSRC of the RTP packet sender".
+ report_block.SetMediaSsrc(12);
+ report_block.SetJitter(5000);
+ ReportBlockData report_block_data;
+ report_block_data.SetReportBlock(0, report_block, kReportBlockTimestampUtc);
+
+ RtpCodecParameters codec;
+ codec.payload_type = 3;
+ codec.kind = media_type_;
+ codec.clock_rate = 1000;
+
+ AddSenderInfoAndMediaChannel("TransportName", {report_block_data}, codec);
+
+ rtc::scoped_refptr<const RTCStatsReport> report = stats_->GetStatsReport();
+
+ std::string remote_inbound_rtp_id = "RI" + MediaTypeCharStr() + "12";
+ ASSERT_TRUE(report->Get(remote_inbound_rtp_id));
+ auto& remote_inbound_rtp = report->Get(remote_inbound_rtp_id)
+ ->cast_to<RTCRemoteInboundRtpStreamStats>();
+
+ EXPECT_TRUE(remote_inbound_rtp.codec_id.is_defined());
+ EXPECT_TRUE(report->Get(*remote_inbound_rtp.codec_id));
+
+ EXPECT_TRUE(remote_inbound_rtp.jitter.is_defined());
+ // The jitter (in seconds) is the report block's jitter divided by the codec's
+ // clock rate.
+ EXPECT_EQ(5.0, *remote_inbound_rtp.jitter);
+}
+
+TEST_P(RTCStatsCollectorTestWithParamKind,
+ RTCRemoteInboundRtpStreamStatsWithRtcpTransport) {
+ const Timestamp kReportBlockTimestampUtc = Timestamp::Micros(123456789);
+ fake_clock_.SetTime(kReportBlockTimestampUtc);
+
+ rtcp::ReportBlock report_block;
+ // The remote-inbound-rtp SSRC and the outbound-rtp SSRC is the same as the
+ // `source_ssrc`, "SSRC of the RTP packet sender".
+ report_block.SetMediaSsrc(12);
+ ReportBlockData report_block_data;
+ report_block_data.SetReportBlock(0, report_block, kReportBlockTimestampUtc);
+
+ cricket::TransportChannelStats rtp_transport_channel_stats;
+ rtp_transport_channel_stats.component = cricket::ICE_CANDIDATE_COMPONENT_RTP;
+ rtp_transport_channel_stats.dtls_state = DtlsTransportState::kNew;
+ cricket::TransportChannelStats rtcp_transport_channel_stats;
+ rtcp_transport_channel_stats.component =
+ cricket::ICE_CANDIDATE_COMPONENT_RTCP;
+ rtcp_transport_channel_stats.dtls_state = DtlsTransportState::kNew;
+ pc_->SetTransportStats("TransportName", {rtp_transport_channel_stats,
+ rtcp_transport_channel_stats});
+ AddSenderInfoAndMediaChannel("TransportName", {report_block_data},
+ absl::nullopt);
+
+ rtc::scoped_refptr<const RTCStatsReport> report = stats_->GetStatsReport();
+
+ std::string remote_inbound_rtp_id = "RI" + MediaTypeCharStr() + "12";
+ ASSERT_TRUE(report->Get(remote_inbound_rtp_id));
+ auto& remote_inbound_rtp = report->Get(remote_inbound_rtp_id)
+ ->cast_to<RTCRemoteInboundRtpStreamStats>();
+
+ EXPECT_TRUE(remote_inbound_rtp.transport_id.is_defined());
+ EXPECT_EQ("TTransportName2", // 2 for RTCP
+ *remote_inbound_rtp.transport_id);
+ EXPECT_TRUE(report->Get(*remote_inbound_rtp.transport_id));
+}
+
+INSTANTIATE_TEST_SUITE_P(All,
+ RTCStatsCollectorTestWithParamKind,
+ ::testing::Values(cricket::MEDIA_TYPE_AUDIO, // "/0"
+ cricket::MEDIA_TYPE_VIDEO)); // "/1"
+
+// Checks that no remote outbound stats are collected if not available in
+// `VoiceMediaInfo`.
+TEST_F(RTCStatsCollectorTest,
+ RTCRemoteOutboundRtpAudioStreamStatsNotCollected) {
+ ExampleStatsGraph graph =
+ SetupExampleStatsVoiceGraph(/*add_remote_outbound_stats=*/false);
+ EXPECT_FALSE(graph.full_report->Get(graph.remote_outbound_rtp_id));
+ // Also check that no other remote outbound report is created (in case the
+ // expected ID is incorrect).
+ rtc::scoped_refptr<const RTCStatsReport> report = stats_->GetStatsReport();
+ ASSERT_NE(report->begin(), report->end())
+ << "No reports have been generated.";
+ for (const auto& stats : *report) {
+ SCOPED_TRACE(stats.id());
+ EXPECT_NE(stats.type(), RTCRemoteOutboundRtpStreamStats::kType);
+ }
+}
+
+// Checks that the remote outbound stats are collected when available in
+// `VoiceMediaInfo`.
+TEST_F(RTCStatsCollectorTest, RTCRemoteOutboundRtpAudioStreamStatsCollected) {
+ ExampleStatsGraph graph =
+ SetupExampleStatsVoiceGraph(/*add_remote_outbound_stats=*/true);
+ ASSERT_TRUE(graph.full_report->Get(graph.remote_outbound_rtp_id));
+ const auto& remote_outbound_rtp =
+ graph.full_report->Get(graph.remote_outbound_rtp_id)
+ ->cast_to<RTCRemoteOutboundRtpStreamStats>();
+ EXPECT_EQ(remote_outbound_rtp.timestamp(),
+ Timestamp::Millis(kRemoteOutboundStatsTimestampMs));
+ EXPECT_FLOAT_EQ(*remote_outbound_rtp.remote_timestamp,
+ static_cast<double>(kRemoteOutboundStatsRemoteTimestampMs));
+ EXPECT_EQ(*remote_outbound_rtp.packets_sent, kRemoteOutboundStatsPacketsSent);
+ EXPECT_EQ(*remote_outbound_rtp.bytes_sent, kRemoteOutboundStatsBytesSent);
+ EXPECT_EQ(*remote_outbound_rtp.reports_sent,
+ kRemoteOutboundStatsReportsCount);
+}
+
+TEST_F(RTCStatsCollectorTest,
+ RTCVideoSourceStatsNotCollectedForSenderWithoutTrack) {
+ const uint32_t kSsrc = 4;
+ const int kAttachmentId = 42;
+
+ cricket::VideoMediaInfo video_media_info;
+ video_media_info.senders.push_back(cricket::VideoSenderInfo());
+ video_media_info.senders[0].local_stats.push_back(cricket::SsrcSenderInfo());
+ video_media_info.senders[0].local_stats[0].ssrc = kSsrc;
+ video_media_info.senders[0].framerate_input = 29.0;
+ pc_->AddVideoChannel("VideoMid", "TransportName", video_media_info);
+
+ rtc::scoped_refptr<MockRtpSenderInternal> sender = CreateMockSender(
+ cricket::MEDIA_TYPE_VIDEO, /*track=*/nullptr, kSsrc, kAttachmentId, {});
+ EXPECT_CALL(*sender, Stop());
+ EXPECT_CALL(*sender, SetMediaChannel(_));
+ pc_->AddSender(sender);
+
+ rtc::scoped_refptr<const RTCStatsReport> report = stats_->GetStatsReport();
+ EXPECT_FALSE(report->Get("SV42"));
+}
+
+// Test collecting echo return loss stats from the audio processor attached to
+// the track, rather than the voice sender info.
+TEST_F(RTCStatsCollectorTest, CollectEchoReturnLossFromTrackAudioProcessor) {
+ rtc::scoped_refptr<MediaStream> local_stream =
+ MediaStream::Create("LocalStreamId");
+ pc_->mutable_local_streams()->AddStream(local_stream);
+
+ // Local audio track
+ rtc::scoped_refptr<MediaStreamTrackInterface> local_audio_track =
+ CreateFakeTrack(cricket::MEDIA_TYPE_AUDIO, "LocalAudioTrackID",
+ MediaStreamTrackInterface::kEnded,
+ /*create_fake_audio_processor=*/true);
+ local_stream->AddTrack(rtc::scoped_refptr<AudioTrackInterface>(
+ static_cast<AudioTrackInterface*>(local_audio_track.get())));
+
+ cricket::VoiceSenderInfo voice_sender_info_ssrc1;
+ voice_sender_info_ssrc1.local_stats.push_back(cricket::SsrcSenderInfo());
+ voice_sender_info_ssrc1.local_stats[0].ssrc = 1;
+
+ stats_->CreateMockRtpSendersReceiversAndChannels(
+ {std::make_pair(local_audio_track.get(), voice_sender_info_ssrc1)}, {},
+ {}, {}, {local_stream->id()}, {});
+
+ rtc::scoped_refptr<const RTCStatsReport> report = stats_->GetStatsReport();
+
+ RTCAudioSourceStats expected_audio("SA11", report->timestamp());
+ expected_audio.track_identifier = "LocalAudioTrackID";
+ expected_audio.kind = "audio";
+ expected_audio.audio_level = 0;
+ expected_audio.total_audio_energy = 0;
+ expected_audio.total_samples_duration = 0;
+ expected_audio.echo_return_loss = 2.0;
+ expected_audio.echo_return_loss_enhancement = 3.0;
+
+ ASSERT_TRUE(report->Get(expected_audio.id()));
+ EXPECT_EQ(report->Get(expected_audio.id())->cast_to<RTCAudioSourceStats>(),
+ expected_audio);
+}
+
+TEST_F(RTCStatsCollectorTest, GetStatsWithSenderSelector) {
+ ExampleStatsGraph graph = SetupExampleStatsGraphForSelectorTests();
+ // Expected stats graph when filtered by sender:
+ //
+ // media-source
+ // ^
+ // |
+ // +--------- outbound-rtp
+ // | |
+ // v v
+ // codec (send) transport
+ rtc::scoped_refptr<const RTCStatsReport> sender_report =
+ stats_->GetStatsReportWithSenderSelector(graph.sender);
+ EXPECT_TRUE(sender_report);
+ EXPECT_EQ(sender_report->timestamp(), graph.full_report->timestamp());
+ EXPECT_EQ(sender_report->size(), 4u);
+ EXPECT_TRUE(sender_report->Get(graph.send_codec_id));
+ EXPECT_FALSE(sender_report->Get(graph.recv_codec_id));
+ EXPECT_TRUE(sender_report->Get(graph.outbound_rtp_id));
+ EXPECT_FALSE(sender_report->Get(graph.inbound_rtp_id));
+ EXPECT_TRUE(sender_report->Get(graph.transport_id));
+ EXPECT_FALSE(sender_report->Get(graph.peer_connection_id));
+ EXPECT_TRUE(sender_report->Get(graph.media_source_id));
+}
+
+TEST_F(RTCStatsCollectorTest, GetStatsWithReceiverSelector) {
+ ExampleStatsGraph graph = SetupExampleStatsGraphForSelectorTests();
+ // Expected stats graph when filtered by receiver:
+ //
+ //
+ //
+ // inbound-rtp
+ // | |
+ // v v
+ // transport codec (recv)
+ rtc::scoped_refptr<const RTCStatsReport> receiver_report =
+ stats_->GetStatsReportWithReceiverSelector(graph.receiver);
+ EXPECT_TRUE(receiver_report);
+ EXPECT_EQ(receiver_report->size(), 3u);
+ EXPECT_EQ(receiver_report->timestamp(), graph.full_report->timestamp());
+ EXPECT_FALSE(receiver_report->Get(graph.send_codec_id));
+ EXPECT_TRUE(receiver_report->Get(graph.recv_codec_id));
+ EXPECT_FALSE(receiver_report->Get(graph.outbound_rtp_id));
+ EXPECT_TRUE(receiver_report->Get(graph.inbound_rtp_id));
+ EXPECT_TRUE(receiver_report->Get(graph.transport_id));
+ EXPECT_FALSE(receiver_report->Get(graph.peer_connection_id));
+ EXPECT_FALSE(receiver_report->Get(graph.media_source_id));
+}
+
+TEST_F(RTCStatsCollectorTest, GetStatsWithNullSenderSelector) {
+ ExampleStatsGraph graph = SetupExampleStatsGraphForSelectorTests();
+ rtc::scoped_refptr<const RTCStatsReport> empty_report =
+ stats_->GetStatsReportWithSenderSelector(nullptr);
+ EXPECT_TRUE(empty_report);
+ EXPECT_EQ(empty_report->timestamp(), graph.full_report->timestamp());
+ EXPECT_EQ(empty_report->size(), 0u);
+}
+
+TEST_F(RTCStatsCollectorTest, GetStatsWithNullReceiverSelector) {
+ ExampleStatsGraph graph = SetupExampleStatsGraphForSelectorTests();
+ rtc::scoped_refptr<const RTCStatsReport> empty_report =
+ stats_->GetStatsReportWithReceiverSelector(nullptr);
+ EXPECT_TRUE(empty_report);
+ EXPECT_EQ(empty_report->timestamp(), graph.full_report->timestamp());
+ EXPECT_EQ(empty_report->size(), 0u);
+}
+
+// Before SetLocalDescription() senders don't have an SSRC.
+// To simulate this case we create a mock sender with SSRC=0.
+TEST_F(RTCStatsCollectorTest, RtpIsMissingWhileSsrcIsZero) {
+ rtc::scoped_refptr<MediaStreamTrackInterface> track =
+ CreateFakeTrack(cricket::MEDIA_TYPE_AUDIO, "audioTrack",
+ MediaStreamTrackInterface::kLive);
+ rtc::scoped_refptr<MockRtpSenderInternal> sender =
+ CreateMockSender(cricket::MEDIA_TYPE_AUDIO, track, 0, 49, {});
+ EXPECT_CALL(*sender, Stop());
+ pc_->AddSender(sender);
+
+ rtc::scoped_refptr<const RTCStatsReport> report = stats_->GetStatsReport();
+
+ auto outbound_rtps = report->GetStatsOfType<RTCOutboundRtpStreamStats>();
+ EXPECT_TRUE(outbound_rtps.empty());
+}
+
+// We may also be in a case where the SSRC has been assigned but no
+// `voice_sender_info` stats exist yet.
+TEST_F(RTCStatsCollectorTest, DoNotCrashIfSsrcIsKnownButInfosAreStillMissing) {
+ rtc::scoped_refptr<MediaStreamTrackInterface> track =
+ CreateFakeTrack(cricket::MEDIA_TYPE_AUDIO, "audioTrack",
+ MediaStreamTrackInterface::kLive);
+ rtc::scoped_refptr<MockRtpSenderInternal> sender =
+ CreateMockSender(cricket::MEDIA_TYPE_AUDIO, track, 4711, 49, {});
+ EXPECT_CALL(*sender, Stop());
+ pc_->AddSender(sender);
+
+ // We do not generate any matching voice_sender_info stats.
+ rtc::scoped_refptr<const RTCStatsReport> report = stats_->GetStatsReport();
+
+ auto outbound_rtps = report->GetStatsOfType<RTCOutboundRtpStreamStats>();
+ EXPECT_TRUE(outbound_rtps.empty());
+}
+
+// Used for test below, to test calling GetStatsReport during a callback.
+class RecursiveCallback : public RTCStatsCollectorCallback {
+ public:
+ explicit RecursiveCallback(RTCStatsCollectorWrapper* stats) : stats_(stats) {}
+
+ void OnStatsDelivered(
+ const rtc::scoped_refptr<const RTCStatsReport>& report) override {
+ stats_->GetStatsReport();
+ called_ = true;
+ }
+
+ bool called() const { return called_; }
+
+ private:
+ RTCStatsCollectorWrapper* stats_;
+ bool called_ = false;
+};
+
+// Test that nothing bad happens if a callback causes GetStatsReport to be
+// called again recursively. Regression test for crbug.com/webrtc/8973.
+TEST_F(RTCStatsCollectorTest, DoNotCrashWhenGetStatsCalledDuringCallback) {
+ auto callback1 = rtc::make_ref_counted<RecursiveCallback>(stats_.get());
+ auto callback2 = rtc::make_ref_counted<RecursiveCallback>(stats_.get());
+ stats_->stats_collector()->GetStatsReport(callback1);
+ stats_->stats_collector()->GetStatsReport(callback2);
+ EXPECT_TRUE_WAIT(callback1->called(), kGetStatsReportTimeoutMs);
+ EXPECT_TRUE_WAIT(callback2->called(), kGetStatsReportTimeoutMs);
+}
+
+class RTCTestStats : public RTCStats {
+ public:
+ WEBRTC_RTCSTATS_DECL();
+
+ RTCTestStats(const std::string& id, Timestamp timestamp)
+ : RTCStats(id, timestamp), dummy_stat("dummyStat") {}
+
+ RTCStatsMember<int32_t> dummy_stat;
+};
+
+WEBRTC_RTCSTATS_IMPL(RTCTestStats, RTCStats, "test-stats", &dummy_stat)
+
+// Overrides the stats collection to verify thread usage and that the resulting
+// partial reports are merged.
+class FakeRTCStatsCollector : public RTCStatsCollector,
+ public RTCStatsCollectorCallback {
+ public:
+ static rtc::scoped_refptr<FakeRTCStatsCollector> Create(
+ PeerConnectionInternal* pc,
+ int64_t cache_lifetime_us) {
+ return rtc::scoped_refptr<FakeRTCStatsCollector>(
+ new rtc::RefCountedObject<FakeRTCStatsCollector>(pc,
+ cache_lifetime_us));
+ }
+
+ // Since FakeRTCStatsCollector inherits twice from RefCountInterface, once via
+ // RTCStatsCollector and once via RTCStatsCollectorCallback, scoped_refptr
+ // will get confused about which AddRef()/Release() methods to call.
+ // So to remove all doubt, we declare them here again in the class that we
+ // give to scoped_refptr.
+ // Satisfying the implementation of these methods and associating them with a
+ // reference counter, will be done by RefCountedObject.
+ virtual void AddRef() const = 0;
+ virtual rtc::RefCountReleaseStatus Release() const = 0;
+
+ // RTCStatsCollectorCallback implementation.
+ void OnStatsDelivered(
+ const rtc::scoped_refptr<const RTCStatsReport>& report) override {
+ EXPECT_TRUE(signaling_thread_->IsCurrent());
+ MutexLock lock(&lock_);
+ delivered_report_ = report;
+ }
+
+ void VerifyThreadUsageAndResultsMerging() {
+ GetStatsReport(rtc::scoped_refptr<RTCStatsCollectorCallback>(this));
+ EXPECT_TRUE_WAIT(HasVerifiedResults(), kGetStatsReportTimeoutMs);
+ }
+
+ bool HasVerifiedResults() {
+ EXPECT_TRUE(signaling_thread_->IsCurrent());
+ MutexLock lock(&lock_);
+ if (!delivered_report_)
+ return false;
+ EXPECT_EQ(produced_on_signaling_thread_, 1);
+ EXPECT_EQ(produced_on_network_thread_, 1);
+
+ EXPECT_TRUE(delivered_report_->Get("SignalingThreadStats"));
+ EXPECT_TRUE(delivered_report_->Get("NetworkThreadStats"));
+
+ produced_on_signaling_thread_ = 0;
+ produced_on_network_thread_ = 0;
+ delivered_report_ = nullptr;
+ return true;
+ }
+
+ protected:
+ FakeRTCStatsCollector(PeerConnectionInternal* pc, int64_t cache_lifetime)
+ : RTCStatsCollector(pc, cache_lifetime),
+ signaling_thread_(pc->signaling_thread()),
+ worker_thread_(pc->worker_thread()),
+ network_thread_(pc->network_thread()) {}
+
+ void ProducePartialResultsOnSignalingThreadImpl(
+ Timestamp timestamp,
+ RTCStatsReport* partial_report) override {
+ EXPECT_TRUE(signaling_thread_->IsCurrent());
+ {
+ MutexLock lock(&lock_);
+ EXPECT_FALSE(delivered_report_);
+ ++produced_on_signaling_thread_;
+ }
+
+ partial_report->AddStats(std::unique_ptr<const RTCStats>(
+ new RTCTestStats("SignalingThreadStats", timestamp)));
+ }
+ void ProducePartialResultsOnNetworkThreadImpl(
+ Timestamp timestamp,
+ const std::map<std::string, cricket::TransportStats>&
+ transport_stats_by_name,
+ const std::map<std::string, CertificateStatsPair>& transport_cert_stats,
+ RTCStatsReport* partial_report) override {
+ EXPECT_TRUE(network_thread_->IsCurrent());
+ {
+ MutexLock lock(&lock_);
+ EXPECT_FALSE(delivered_report_);
+ ++produced_on_network_thread_;
+ }
+
+ partial_report->AddStats(std::unique_ptr<const RTCStats>(
+ new RTCTestStats("NetworkThreadStats", timestamp)));
+ }
+
+ private:
+ rtc::Thread* const signaling_thread_;
+ rtc::Thread* const worker_thread_;
+ rtc::Thread* const network_thread_;
+
+ Mutex lock_;
+ rtc::scoped_refptr<const RTCStatsReport> delivered_report_;
+ int produced_on_signaling_thread_ = 0;
+ int produced_on_network_thread_ = 0;
+};
+
+TEST(RTCStatsCollectorTestWithFakeCollector, ThreadUsageAndResultsMerging) {
+ rtc::AutoThread main_thread_;
+ auto pc = rtc::make_ref_counted<FakePeerConnectionForStats>();
+ rtc::scoped_refptr<FakeRTCStatsCollector> stats_collector(
+ FakeRTCStatsCollector::Create(pc.get(),
+ 50 * rtc::kNumMicrosecsPerMillisec));
+ stats_collector->VerifyThreadUsageAndResultsMerging();
+}
+
+} // namespace
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/rtc_stats_integrationtest.cc b/third_party/libwebrtc/pc/rtc_stats_integrationtest.cc
new file mode 100644
index 0000000000..648efab69a
--- /dev/null
+++ b/third_party/libwebrtc/pc/rtc_stats_integrationtest.cc
@@ -0,0 +1,1204 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdint.h>
+#include <string.h>
+
+#include <memory>
+#include <set>
+#include <string>
+#include <vector>
+
+#include "absl/algorithm/container.h"
+#include "absl/strings/match.h"
+#include "api/audio_codecs/builtin_audio_decoder_factory.h"
+#include "api/audio_codecs/builtin_audio_encoder_factory.h"
+#include "api/audio_options.h"
+#include "api/data_channel_interface.h"
+#include "api/peer_connection_interface.h"
+#include "api/rtp_receiver_interface.h"
+#include "api/rtp_sender_interface.h"
+#include "api/scoped_refptr.h"
+#include "api/stats/rtc_stats.h"
+#include "api/stats/rtc_stats_report.h"
+#include "api/stats/rtcstats_objects.h"
+#include "pc/rtc_stats_traversal.h"
+#include "pc/test/peer_connection_test_wrapper.h"
+#include "pc/test/rtc_stats_obtainer.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/event_tracer.h"
+#include "rtc_base/gunit.h"
+#include "rtc_base/thread.h"
+#include "rtc_base/trace_event.h"
+#include "rtc_base/virtual_socket_server.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+
+using ::testing::Contains;
+
+namespace webrtc {
+
+namespace {
+
+const int64_t kGetStatsTimeoutMs = 10000;
+
+const unsigned char* GetCategoryEnabledHandler(const char* name) {
+ if (strcmp("webrtc_stats", name) != 0) {
+ return reinterpret_cast<const unsigned char*>("");
+ }
+ return reinterpret_cast<const unsigned char*>(name);
+}
+
+class RTCStatsReportTraceListener {
+ public:
+ static void SetUp() {
+ if (!traced_report_)
+ traced_report_ = new RTCStatsReportTraceListener();
+ traced_report_->last_trace_ = "";
+ SetupEventTracer(&GetCategoryEnabledHandler,
+ &RTCStatsReportTraceListener::AddTraceEventHandler);
+ }
+
+ static const std::string& last_trace() {
+ RTC_DCHECK(traced_report_);
+ return traced_report_->last_trace_;
+ }
+
+ private:
+ static void AddTraceEventHandler(
+ char phase,
+ const unsigned char* category_enabled,
+ const char* name,
+ unsigned long long id, // NOLINT(runtime/int)
+ int num_args,
+ const char** arg_names,
+ const unsigned char* arg_types,
+ const unsigned long long* arg_values, // NOLINT(runtime/int)
+ unsigned char flags) {
+ RTC_DCHECK(traced_report_);
+ EXPECT_STREQ("webrtc_stats",
+ reinterpret_cast<const char*>(category_enabled));
+ EXPECT_STREQ("webrtc_stats", name);
+ EXPECT_EQ(1, num_args);
+ EXPECT_STREQ("report", arg_names[0]);
+ EXPECT_EQ(TRACE_VALUE_TYPE_COPY_STRING, arg_types[0]);
+
+ traced_report_->last_trace_ = reinterpret_cast<const char*>(arg_values[0]);
+ }
+
+ static RTCStatsReportTraceListener* traced_report_;
+ std::string last_trace_;
+};
+
+RTCStatsReportTraceListener* RTCStatsReportTraceListener::traced_report_ =
+ nullptr;
+
+class RTCStatsIntegrationTest : public ::testing::Test {
+ public:
+ RTCStatsIntegrationTest()
+ : network_thread_(new rtc::Thread(&virtual_socket_server_)),
+ worker_thread_(rtc::Thread::Create()) {
+ RTCStatsReportTraceListener::SetUp();
+
+ RTC_CHECK(network_thread_->Start());
+ RTC_CHECK(worker_thread_->Start());
+
+ caller_ = rtc::make_ref_counted<PeerConnectionTestWrapper>(
+ "caller", &virtual_socket_server_, network_thread_.get(),
+ worker_thread_.get());
+ callee_ = rtc::make_ref_counted<PeerConnectionTestWrapper>(
+ "callee", &virtual_socket_server_, network_thread_.get(),
+ worker_thread_.get());
+ }
+
+ void StartCall() {
+ // Create PeerConnections and "connect" sigslots
+ PeerConnectionInterface::RTCConfiguration config;
+ config.sdp_semantics = SdpSemantics::kUnifiedPlan;
+ PeerConnectionInterface::IceServer ice_server;
+ ice_server.uri = "stun:1.1.1.1:3478";
+ config.servers.push_back(ice_server);
+ EXPECT_TRUE(caller_->CreatePc(config, CreateBuiltinAudioEncoderFactory(),
+ CreateBuiltinAudioDecoderFactory()));
+ EXPECT_TRUE(callee_->CreatePc(config, CreateBuiltinAudioEncoderFactory(),
+ CreateBuiltinAudioDecoderFactory()));
+ PeerConnectionTestWrapper::Connect(caller_.get(), callee_.get());
+
+ // Get user media for audio and video
+ caller_->GetAndAddUserMedia(true, cricket::AudioOptions(), true);
+ callee_->GetAndAddUserMedia(true, cricket::AudioOptions(), true);
+
+ // Create data channels
+ DataChannelInit init;
+ caller_->CreateDataChannel("data", init);
+ callee_->CreateDataChannel("data", init);
+
+ // Negotiate and wait for call to establish
+ caller_->CreateOffer(PeerConnectionInterface::RTCOfferAnswerOptions());
+ caller_->WaitForCallEstablished();
+ callee_->WaitForCallEstablished();
+ }
+
+ rtc::scoped_refptr<const RTCStatsReport> GetStatsFromCaller() {
+ return GetStats(caller_->pc());
+ }
+ rtc::scoped_refptr<const RTCStatsReport> GetStatsFromCaller(
+ rtc::scoped_refptr<RtpSenderInterface> selector) {
+ return GetStats(caller_->pc(), selector);
+ }
+ rtc::scoped_refptr<const RTCStatsReport> GetStatsFromCaller(
+ rtc::scoped_refptr<RtpReceiverInterface> selector) {
+ return GetStats(caller_->pc(), selector);
+ }
+
+ rtc::scoped_refptr<const RTCStatsReport> GetStatsFromCallee() {
+ return GetStats(callee_->pc());
+ }
+ rtc::scoped_refptr<const RTCStatsReport> GetStatsFromCallee(
+ rtc::scoped_refptr<RtpSenderInterface> selector) {
+ return GetStats(callee_->pc(), selector);
+ }
+ rtc::scoped_refptr<const RTCStatsReport> GetStatsFromCallee(
+ rtc::scoped_refptr<RtpReceiverInterface> selector) {
+ return GetStats(callee_->pc(), selector);
+ }
+
+ protected:
+ static rtc::scoped_refptr<const RTCStatsReport> GetStats(
+ PeerConnectionInterface* pc) {
+ rtc::scoped_refptr<RTCStatsObtainer> stats_obtainer =
+ RTCStatsObtainer::Create();
+ pc->GetStats(stats_obtainer.get());
+ EXPECT_TRUE_WAIT(stats_obtainer->report() != nullptr, kGetStatsTimeoutMs);
+ return stats_obtainer->report();
+ }
+
+ template <typename T>
+ static rtc::scoped_refptr<const RTCStatsReport> GetStats(
+ PeerConnectionInterface* pc,
+ rtc::scoped_refptr<T> selector) {
+ rtc::scoped_refptr<RTCStatsObtainer> stats_obtainer =
+ RTCStatsObtainer::Create();
+ pc->GetStats(selector, stats_obtainer);
+ EXPECT_TRUE_WAIT(stats_obtainer->report() != nullptr, kGetStatsTimeoutMs);
+ return stats_obtainer->report();
+ }
+
+ // `network_thread_` uses `virtual_socket_server_` so they must be
+ // constructed/destructed in the correct order.
+ rtc::VirtualSocketServer virtual_socket_server_;
+ std::unique_ptr<rtc::Thread> network_thread_;
+ std::unique_ptr<rtc::Thread> worker_thread_;
+ rtc::scoped_refptr<PeerConnectionTestWrapper> caller_;
+ rtc::scoped_refptr<PeerConnectionTestWrapper> callee_;
+};
+
+class RTCStatsVerifier {
+ public:
+ RTCStatsVerifier(const RTCStatsReport* report, const RTCStats* stats)
+ : report_(report), stats_(stats), all_tests_successful_(true) {
+ RTC_CHECK(report_);
+ RTC_CHECK(stats_);
+ for (const RTCStatsMemberInterface* member : stats_->Members()) {
+ untested_members_.insert(member);
+ }
+ }
+
+ void MarkMemberTested(const RTCStatsMemberInterface& member,
+ bool test_successful) {
+ untested_members_.erase(&member);
+ all_tests_successful_ &= test_successful;
+ }
+
+ void TestMemberIsDefined(const RTCStatsMemberInterface& member) {
+ EXPECT_TRUE(member.is_defined())
+ << stats_->type() << "." << member.name() << "[" << stats_->id()
+ << "] was undefined.";
+ MarkMemberTested(member, member.is_defined());
+ }
+
+ void TestMemberIsUndefined(const RTCStatsMemberInterface& member) {
+ EXPECT_FALSE(member.is_defined())
+ << stats_->type() << "." << member.name() << "[" << stats_->id()
+ << "] was defined (" << member.ValueToString() << ").";
+ MarkMemberTested(member, !member.is_defined());
+ }
+
+ template <typename T>
+ void TestMemberIsPositive(const RTCStatsMemberInterface& member) {
+ EXPECT_TRUE(member.is_defined())
+ << stats_->type() << "." << member.name() << "[" << stats_->id()
+ << "] was undefined.";
+ if (!member.is_defined()) {
+ MarkMemberTested(member, false);
+ return;
+ }
+ bool is_positive = *member.cast_to<RTCStatsMember<T>>() > T(0);
+ EXPECT_TRUE(is_positive)
+ << stats_->type() << "." << member.name() << "[" << stats_->id()
+ << "] was not positive (" << member.ValueToString() << ").";
+ MarkMemberTested(member, is_positive);
+ }
+
+ template <typename T>
+ void TestMemberIsNonNegative(const RTCStatsMemberInterface& member) {
+ EXPECT_TRUE(member.is_defined())
+ << stats_->type() << "." << member.name() << "[" << stats_->id()
+ << "] was undefined.";
+ if (!member.is_defined()) {
+ MarkMemberTested(member, false);
+ return;
+ }
+ bool is_non_negative = *member.cast_to<RTCStatsMember<T>>() >= T(0);
+ EXPECT_TRUE(is_non_negative)
+ << stats_->type() << "." << member.name() << "[" << stats_->id()
+ << "] was not non-negative (" << member.ValueToString() << ").";
+ MarkMemberTested(member, is_non_negative);
+ }
+
+ void TestMemberIsIDReference(const RTCStatsMemberInterface& member,
+ const char* expected_type) {
+ TestMemberIsIDReference(member, expected_type, false);
+ }
+
+ void TestMemberIsOptionalIDReference(const RTCStatsMemberInterface& member,
+ const char* expected_type) {
+ TestMemberIsIDReference(member, expected_type, true);
+ }
+
+ bool ExpectAllMembersSuccessfullyTested() {
+ if (untested_members_.empty())
+ return all_tests_successful_;
+ for (const RTCStatsMemberInterface* member : untested_members_) {
+ EXPECT_TRUE(false) << stats_->type() << "." << member->name() << "["
+ << stats_->id() << "] was not tested.";
+ }
+ return false;
+ }
+
+ private:
+ void TestMemberIsIDReference(const RTCStatsMemberInterface& member,
+ const char* expected_type,
+ bool optional) {
+ if (optional && !member.is_defined()) {
+ MarkMemberTested(member, true);
+ return;
+ }
+ bool valid_reference = false;
+ if (member.is_defined()) {
+ if (member.type() == RTCStatsMemberInterface::kString) {
+ // A single ID.
+ const RTCStatsMember<std::string>& id =
+ member.cast_to<RTCStatsMember<std::string>>();
+ const RTCStats* referenced_stats = report_->Get(*id);
+ valid_reference =
+ referenced_stats && referenced_stats->type() == expected_type;
+ } else if (member.type() == RTCStatsMemberInterface::kSequenceString) {
+ // A vector of IDs.
+ valid_reference = true;
+ const RTCStatsMember<std::vector<std::string>>& ids =
+ member.cast_to<RTCStatsMember<std::vector<std::string>>>();
+ for (const std::string& id : *ids) {
+ const RTCStats* referenced_stats = report_->Get(id);
+ if (!referenced_stats || referenced_stats->type() != expected_type) {
+ valid_reference = false;
+ break;
+ }
+ }
+ }
+ }
+ EXPECT_TRUE(valid_reference)
+ << stats_->type() << "." << member.name()
+ << " is not a reference to an "
+ "existing dictionary of type "
+ << expected_type << " (value: "
+ << (member.is_defined() ? member.ValueToString() : "null") << ").";
+ MarkMemberTested(member, valid_reference);
+ }
+
+ rtc::scoped_refptr<const RTCStatsReport> report_;
+ const RTCStats* stats_;
+ std::set<const RTCStatsMemberInterface*> untested_members_;
+ bool all_tests_successful_;
+};
+
+class RTCStatsReportVerifier {
+ public:
+ static std::set<const char*> StatsTypes() {
+ std::set<const char*> stats_types;
+ stats_types.insert(RTCCertificateStats::kType);
+ stats_types.insert(RTCCodecStats::kType);
+ stats_types.insert(RTCDataChannelStats::kType);
+ stats_types.insert(RTCIceCandidatePairStats::kType);
+ stats_types.insert(RTCLocalIceCandidateStats::kType);
+ stats_types.insert(RTCRemoteIceCandidateStats::kType);
+ stats_types.insert(RTCPeerConnectionStats::kType);
+ stats_types.insert(RTCInboundRtpStreamStats::kType);
+ stats_types.insert(RTCOutboundRtpStreamStats::kType);
+ stats_types.insert(RTCTransportStats::kType);
+ return stats_types;
+ }
+
+ explicit RTCStatsReportVerifier(const RTCStatsReport* report)
+ : report_(report) {}
+
+ void VerifyReport(std::vector<const char*> allowed_missing_stats) {
+ std::set<const char*> missing_stats = StatsTypes();
+ bool verify_successful = true;
+ std::vector<const RTCTransportStats*> transport_stats =
+ report_->GetStatsOfType<RTCTransportStats>();
+ EXPECT_EQ(transport_stats.size(), 1U);
+ std::string selected_candidate_pair_id =
+ *transport_stats[0]->selected_candidate_pair_id;
+ for (const RTCStats& stats : *report_) {
+ missing_stats.erase(stats.type());
+ if (stats.type() == RTCCertificateStats::kType) {
+ verify_successful &=
+ VerifyRTCCertificateStats(stats.cast_to<RTCCertificateStats>());
+ } else if (stats.type() == RTCCodecStats::kType) {
+ verify_successful &=
+ VerifyRTCCodecStats(stats.cast_to<RTCCodecStats>());
+ } else if (stats.type() == RTCDataChannelStats::kType) {
+ verify_successful &=
+ VerifyRTCDataChannelStats(stats.cast_to<RTCDataChannelStats>());
+ } else if (stats.type() == RTCIceCandidatePairStats::kType) {
+ verify_successful &= VerifyRTCIceCandidatePairStats(
+ stats.cast_to<RTCIceCandidatePairStats>(),
+ stats.id() == selected_candidate_pair_id);
+ } else if (stats.type() == RTCLocalIceCandidateStats::kType) {
+ verify_successful &= VerifyRTCLocalIceCandidateStats(
+ stats.cast_to<RTCLocalIceCandidateStats>());
+ } else if (stats.type() == RTCRemoteIceCandidateStats::kType) {
+ verify_successful &= VerifyRTCRemoteIceCandidateStats(
+ stats.cast_to<RTCRemoteIceCandidateStats>());
+ } else if (stats.type() == RTCPeerConnectionStats::kType) {
+ verify_successful &= VerifyRTCPeerConnectionStats(
+ stats.cast_to<RTCPeerConnectionStats>());
+ } else if (stats.type() == RTCInboundRtpStreamStats::kType) {
+ verify_successful &= VerifyRTCInboundRtpStreamStats(
+ stats.cast_to<RTCInboundRtpStreamStats>());
+ } else if (stats.type() == RTCOutboundRtpStreamStats::kType) {
+ verify_successful &= VerifyRTCOutboundRtpStreamStats(
+ stats.cast_to<RTCOutboundRtpStreamStats>());
+ } else if (stats.type() == RTCRemoteInboundRtpStreamStats::kType) {
+ verify_successful &= VerifyRTCRemoteInboundRtpStreamStats(
+ stats.cast_to<RTCRemoteInboundRtpStreamStats>());
+ } else if (stats.type() == RTCRemoteOutboundRtpStreamStats::kType) {
+ verify_successful &= VerifyRTCRemoteOutboundRtpStreamStats(
+ stats.cast_to<RTCRemoteOutboundRtpStreamStats>());
+ } else if (stats.type() == RTCAudioSourceStats::kType) {
+ // RTCAudioSourceStats::kType and RTCVideoSourceStats::kType both have
+ // the value "media-source", but they are distinguishable with pointer
+ // equality (==). In JavaScript they would be distinguished with `kind`.
+ verify_successful &=
+ VerifyRTCAudioSourceStats(stats.cast_to<RTCAudioSourceStats>());
+ } else if (stats.type() == RTCVideoSourceStats::kType) {
+ // RTCAudioSourceStats::kType and RTCVideoSourceStats::kType both have
+ // the value "media-source", but they are distinguishable with pointer
+ // equality (==). In JavaScript they would be distinguished with `kind`.
+ verify_successful &=
+ VerifyRTCVideoSourceStats(stats.cast_to<RTCVideoSourceStats>());
+ } else if (stats.type() == RTCTransportStats::kType) {
+ verify_successful &=
+ VerifyRTCTransportStats(stats.cast_to<RTCTransportStats>());
+ } else if (stats.type() == RTCAudioPlayoutStats::kType) {
+ verify_successful &=
+ VerifyRTCAudioPlayoutStats(stats.cast_to<RTCAudioPlayoutStats>());
+ } else {
+ EXPECT_TRUE(false) << "Unrecognized stats type: " << stats.type();
+ verify_successful = false;
+ }
+ }
+ for (const char* missing : missing_stats) {
+ if (!absl::c_linear_search(allowed_missing_stats, missing)) {
+ verify_successful = false;
+ EXPECT_TRUE(false) << "Missing expected stats type: " << missing;
+ }
+ }
+ EXPECT_TRUE(verify_successful)
+ << "One or more problems with the stats. This is the report:\n"
+ << report_->ToJson();
+ }
+
+ bool VerifyRTCCertificateStats(const RTCCertificateStats& certificate) {
+ RTCStatsVerifier verifier(report_.get(), &certificate);
+ verifier.TestMemberIsDefined(certificate.fingerprint);
+ verifier.TestMemberIsDefined(certificate.fingerprint_algorithm);
+ verifier.TestMemberIsDefined(certificate.base64_certificate);
+ verifier.TestMemberIsOptionalIDReference(certificate.issuer_certificate_id,
+ RTCCertificateStats::kType);
+ return verifier.ExpectAllMembersSuccessfullyTested();
+ }
+
+ bool VerifyRTCCodecStats(const RTCCodecStats& codec) {
+ RTCStatsVerifier verifier(report_.get(), &codec);
+ verifier.TestMemberIsIDReference(codec.transport_id,
+ RTCTransportStats::kType);
+ verifier.TestMemberIsDefined(codec.payload_type);
+ verifier.TestMemberIsDefined(codec.mime_type);
+ verifier.TestMemberIsPositive<uint32_t>(codec.clock_rate);
+
+ if (codec.mime_type->rfind("audio", 0) == 0)
+ verifier.TestMemberIsPositive<uint32_t>(codec.channels);
+ else
+ verifier.TestMemberIsUndefined(codec.channels);
+
+ // sdp_fmtp_line is an optional field.
+ verifier.MarkMemberTested(codec.sdp_fmtp_line, true);
+ return verifier.ExpectAllMembersSuccessfullyTested();
+ }
+
+ bool VerifyRTCDataChannelStats(const RTCDataChannelStats& data_channel) {
+ RTCStatsVerifier verifier(report_.get(), &data_channel);
+ verifier.TestMemberIsDefined(data_channel.label);
+ verifier.TestMemberIsDefined(data_channel.protocol);
+ verifier.TestMemberIsDefined(data_channel.data_channel_identifier);
+ verifier.TestMemberIsDefined(data_channel.state);
+ verifier.TestMemberIsNonNegative<uint32_t>(data_channel.messages_sent);
+ verifier.TestMemberIsNonNegative<uint64_t>(data_channel.bytes_sent);
+ verifier.TestMemberIsNonNegative<uint32_t>(data_channel.messages_received);
+ verifier.TestMemberIsNonNegative<uint64_t>(data_channel.bytes_received);
+ return verifier.ExpectAllMembersSuccessfullyTested();
+ }
+
+ bool VerifyRTCIceCandidatePairStats(
+ const RTCIceCandidatePairStats& candidate_pair,
+ bool is_selected_pair) {
+ RTCStatsVerifier verifier(report_.get(), &candidate_pair);
+ verifier.TestMemberIsIDReference(candidate_pair.transport_id,
+ RTCTransportStats::kType);
+ verifier.TestMemberIsIDReference(candidate_pair.local_candidate_id,
+ RTCLocalIceCandidateStats::kType);
+ verifier.TestMemberIsIDReference(candidate_pair.remote_candidate_id,
+ RTCRemoteIceCandidateStats::kType);
+ verifier.TestMemberIsDefined(candidate_pair.state);
+ verifier.TestMemberIsNonNegative<uint64_t>(candidate_pair.priority);
+ verifier.TestMemberIsDefined(candidate_pair.nominated);
+ verifier.TestMemberIsDefined(candidate_pair.writable);
+ verifier.TestMemberIsNonNegative<uint64_t>(candidate_pair.packets_sent);
+ verifier.TestMemberIsNonNegative<uint64_t>(
+ candidate_pair.packets_discarded_on_send);
+ verifier.TestMemberIsNonNegative<uint64_t>(candidate_pair.packets_received);
+ verifier.TestMemberIsNonNegative<uint64_t>(candidate_pair.bytes_sent);
+ verifier.TestMemberIsNonNegative<uint64_t>(
+ candidate_pair.bytes_discarded_on_send);
+ verifier.TestMemberIsNonNegative<uint64_t>(candidate_pair.bytes_received);
+ verifier.TestMemberIsNonNegative<double>(
+ candidate_pair.total_round_trip_time);
+ verifier.TestMemberIsNonNegative<double>(
+ candidate_pair.current_round_trip_time);
+ if (is_selected_pair) {
+ verifier.TestMemberIsNonNegative<double>(
+ candidate_pair.available_outgoing_bitrate);
+ // A pair should be nominated in order to be selected.
+ EXPECT_TRUE(*candidate_pair.nominated);
+ } else {
+ verifier.TestMemberIsUndefined(candidate_pair.available_outgoing_bitrate);
+ }
+ verifier.TestMemberIsUndefined(candidate_pair.available_incoming_bitrate);
+ verifier.TestMemberIsNonNegative<uint64_t>(
+ candidate_pair.requests_received);
+ verifier.TestMemberIsNonNegative<uint64_t>(candidate_pair.requests_sent);
+ verifier.TestMemberIsNonNegative<uint64_t>(
+ candidate_pair.responses_received);
+ verifier.TestMemberIsNonNegative<uint64_t>(candidate_pair.responses_sent);
+ verifier.TestMemberIsNonNegative<uint64_t>(
+ candidate_pair.consent_requests_sent);
+ verifier.TestMemberIsDefined(candidate_pair.last_packet_received_timestamp);
+ verifier.TestMemberIsDefined(candidate_pair.last_packet_sent_timestamp);
+
+ return verifier.ExpectAllMembersSuccessfullyTested();
+ }
+
+ bool VerifyRTCIceCandidateStats(const RTCIceCandidateStats& candidate) {
+ RTCStatsVerifier verifier(report_.get(), &candidate);
+ verifier.TestMemberIsIDReference(candidate.transport_id,
+ RTCTransportStats::kType);
+ verifier.TestMemberIsDefined(candidate.is_remote);
+ if (*candidate.is_remote) {
+ verifier.TestMemberIsUndefined(candidate.network_type);
+ verifier.TestMemberIsUndefined(candidate.network_adapter_type);
+ verifier.TestMemberIsUndefined(candidate.vpn);
+ } else {
+ verifier.TestMemberIsDefined(candidate.network_type);
+ verifier.TestMemberIsDefined(candidate.network_adapter_type);
+ verifier.TestMemberIsDefined(candidate.vpn);
+ }
+ verifier.TestMemberIsDefined(candidate.ip);
+ verifier.TestMemberIsDefined(candidate.address);
+ verifier.TestMemberIsNonNegative<int32_t>(candidate.port);
+ verifier.TestMemberIsDefined(candidate.protocol);
+ verifier.TestMemberIsDefined(candidate.candidate_type);
+ verifier.TestMemberIsNonNegative<int32_t>(candidate.priority);
+ verifier.TestMemberIsUndefined(candidate.url);
+ verifier.TestMemberIsUndefined(candidate.relay_protocol);
+ verifier.TestMemberIsDefined(candidate.foundation);
+ verifier.TestMemberIsUndefined(candidate.related_address);
+ verifier.TestMemberIsUndefined(candidate.related_port);
+ verifier.TestMemberIsDefined(candidate.username_fragment);
+ verifier.TestMemberIsUndefined(candidate.tcp_type);
+ return verifier.ExpectAllMembersSuccessfullyTested();
+ }
+
+ bool VerifyRTCLocalIceCandidateStats(
+ const RTCLocalIceCandidateStats& local_candidate) {
+ return VerifyRTCIceCandidateStats(local_candidate);
+ }
+
+ bool VerifyRTCRemoteIceCandidateStats(
+ const RTCRemoteIceCandidateStats& remote_candidate) {
+ return VerifyRTCIceCandidateStats(remote_candidate);
+ }
+
+ bool VerifyRTCPeerConnectionStats(
+ const RTCPeerConnectionStats& peer_connection) {
+ RTCStatsVerifier verifier(report_.get(), &peer_connection);
+ verifier.TestMemberIsNonNegative<uint32_t>(
+ peer_connection.data_channels_opened);
+ verifier.TestMemberIsNonNegative<uint32_t>(
+ peer_connection.data_channels_closed);
+ return verifier.ExpectAllMembersSuccessfullyTested();
+ }
+
+ void VerifyRTCRtpStreamStats(const RTCRtpStreamStats& stream,
+ RTCStatsVerifier& verifier) {
+ verifier.TestMemberIsDefined(stream.ssrc);
+ verifier.TestMemberIsDefined(stream.kind);
+ verifier.TestMemberIsIDReference(stream.transport_id,
+ RTCTransportStats::kType);
+ verifier.TestMemberIsIDReference(stream.codec_id, RTCCodecStats::kType);
+ }
+
+ void VerifyRTCSentRtpStreamStats(const RTCSentRtpStreamStats& sent_stream,
+ RTCStatsVerifier& verifier) {
+ VerifyRTCRtpStreamStats(sent_stream, verifier);
+ verifier.TestMemberIsNonNegative<uint64_t>(sent_stream.packets_sent);
+ verifier.TestMemberIsNonNegative<uint64_t>(sent_stream.bytes_sent);
+ }
+
+ bool VerifyRTCInboundRtpStreamStats(
+ const RTCInboundRtpStreamStats& inbound_stream) {
+ RTCStatsVerifier verifier(report_.get(), &inbound_stream);
+ VerifyRTCReceivedRtpStreamStats(inbound_stream, verifier);
+ verifier.TestMemberIsOptionalIDReference(
+ inbound_stream.remote_id, RTCRemoteOutboundRtpStreamStats::kType);
+ verifier.TestMemberIsDefined(inbound_stream.mid);
+ verifier.TestMemberIsDefined(inbound_stream.track_identifier);
+ if (inbound_stream.kind.is_defined() && *inbound_stream.kind == "video") {
+ verifier.TestMemberIsNonNegative<uint64_t>(inbound_stream.qp_sum);
+ verifier.TestMemberIsDefined(inbound_stream.decoder_implementation);
+ verifier.TestMemberIsDefined(inbound_stream.power_efficient_decoder);
+ } else {
+ verifier.TestMemberIsUndefined(inbound_stream.qp_sum);
+ verifier.TestMemberIsUndefined(inbound_stream.decoder_implementation);
+ verifier.TestMemberIsUndefined(inbound_stream.power_efficient_decoder);
+ }
+ verifier.TestMemberIsNonNegative<uint32_t>(inbound_stream.packets_received);
+ if (inbound_stream.kind.is_defined() && *inbound_stream.kind == "audio") {
+ verifier.TestMemberIsNonNegative<uint64_t>(
+ inbound_stream.packets_discarded);
+ verifier.TestMemberIsNonNegative<uint64_t>(
+ inbound_stream.fec_packets_received);
+ verifier.TestMemberIsNonNegative<uint64_t>(
+ inbound_stream.fec_packets_discarded);
+ verifier.TestMemberIsUndefined(inbound_stream.fec_bytes_received);
+ } else {
+ verifier.TestMemberIsUndefined(inbound_stream.packets_discarded);
+ // FEC stats are only present when FlexFEC was negotiated which is guarded
+ // by the WebRTC-FlexFEC-03-Advertised/Enabled/ field trial and off by
+ // default.
+ verifier.TestMemberIsUndefined(inbound_stream.fec_bytes_received);
+ verifier.TestMemberIsUndefined(inbound_stream.fec_packets_received);
+ verifier.TestMemberIsUndefined(inbound_stream.fec_packets_discarded);
+ verifier.TestMemberIsUndefined(inbound_stream.fec_ssrc);
+ }
+ verifier.TestMemberIsNonNegative<uint64_t>(inbound_stream.bytes_received);
+ verifier.TestMemberIsNonNegative<uint64_t>(
+ inbound_stream.header_bytes_received);
+ verifier.TestMemberIsDefined(inbound_stream.last_packet_received_timestamp);
+ if (inbound_stream.frames_received.ValueOrDefault(0) > 0) {
+ verifier.TestMemberIsNonNegative<uint32_t>(inbound_stream.frame_width);
+ verifier.TestMemberIsNonNegative<uint32_t>(inbound_stream.frame_height);
+ } else {
+ verifier.TestMemberIsUndefined(inbound_stream.frame_width);
+ verifier.TestMemberIsUndefined(inbound_stream.frame_height);
+ }
+ if (inbound_stream.frames_per_second.is_defined()) {
+ verifier.TestMemberIsNonNegative<double>(
+ inbound_stream.frames_per_second);
+ } else {
+ verifier.TestMemberIsUndefined(inbound_stream.frames_per_second);
+ }
+ verifier.TestMemberIsNonNegative<double>(
+ inbound_stream.jitter_buffer_delay);
+ verifier.TestMemberIsNonNegative<uint64_t>(
+ inbound_stream.jitter_buffer_emitted_count);
+ verifier.TestMemberIsNonNegative<double>(
+ inbound_stream.jitter_buffer_target_delay);
+ verifier.TestMemberIsNonNegative<double>(
+ inbound_stream.jitter_buffer_minimum_delay);
+ if (inbound_stream.kind.is_defined() && *inbound_stream.kind == "video") {
+ verifier.TestMemberIsUndefined(inbound_stream.total_samples_received);
+ verifier.TestMemberIsUndefined(inbound_stream.concealed_samples);
+ verifier.TestMemberIsUndefined(inbound_stream.silent_concealed_samples);
+ verifier.TestMemberIsUndefined(inbound_stream.concealment_events);
+ verifier.TestMemberIsUndefined(
+ inbound_stream.inserted_samples_for_deceleration);
+ verifier.TestMemberIsUndefined(
+ inbound_stream.removed_samples_for_acceleration);
+ verifier.TestMemberIsUndefined(inbound_stream.audio_level);
+ verifier.TestMemberIsUndefined(inbound_stream.total_audio_energy);
+ verifier.TestMemberIsUndefined(inbound_stream.total_samples_duration);
+ verifier.TestMemberIsNonNegative<uint32_t>(
+ inbound_stream.frames_received);
+ verifier.TestMemberIsNonNegative<uint32_t>(inbound_stream.fir_count);
+ verifier.TestMemberIsNonNegative<uint32_t>(inbound_stream.pli_count);
+ verifier.TestMemberIsNonNegative<uint32_t>(inbound_stream.nack_count);
+ } else {
+ verifier.TestMemberIsUndefined(inbound_stream.fir_count);
+ verifier.TestMemberIsUndefined(inbound_stream.pli_count);
+ verifier.TestMemberIsUndefined(inbound_stream.nack_count);
+ verifier.TestMemberIsPositive<uint64_t>(
+ inbound_stream.total_samples_received);
+ verifier.TestMemberIsNonNegative<uint64_t>(
+ inbound_stream.concealed_samples);
+ verifier.TestMemberIsNonNegative<uint64_t>(
+ inbound_stream.silent_concealed_samples);
+ verifier.TestMemberIsNonNegative<uint64_t>(
+ inbound_stream.concealment_events);
+ verifier.TestMemberIsNonNegative<uint64_t>(
+ inbound_stream.inserted_samples_for_deceleration);
+ verifier.TestMemberIsNonNegative<uint64_t>(
+ inbound_stream.removed_samples_for_acceleration);
+ verifier.TestMemberIsNonNegative<double>(
+ inbound_stream.jitter_buffer_target_delay);
+ verifier.TestMemberIsNonNegative<double>(
+ inbound_stream.jitter_buffer_minimum_delay);
+ verifier.TestMemberIsPositive<double>(inbound_stream.audio_level);
+ verifier.TestMemberIsPositive<double>(inbound_stream.total_audio_energy);
+ verifier.TestMemberIsPositive<double>(
+ inbound_stream.total_samples_duration);
+ verifier.TestMemberIsUndefined(inbound_stream.frames_received);
+ }
+
+ // RTX stats are typically only defined for video where RTX is negotiated.
+ if (inbound_stream.kind.is_defined() && *inbound_stream.kind == "video") {
+ verifier.TestMemberIsNonNegative<uint64_t>(
+ inbound_stream.retransmitted_packets_received);
+ verifier.TestMemberIsNonNegative<uint64_t>(
+ inbound_stream.retransmitted_bytes_received);
+ verifier.TestMemberIsNonNegative<uint32_t>(inbound_stream.rtx_ssrc);
+ } else {
+ verifier.TestMemberIsUndefined(
+ inbound_stream.retransmitted_packets_received);
+ verifier.TestMemberIsUndefined(
+ inbound_stream.retransmitted_bytes_received);
+ verifier.TestMemberIsUndefined(inbound_stream.rtx_ssrc);
+ verifier.TestMemberIsUndefined(inbound_stream.fec_ssrc);
+ }
+
+ // Test runtime too short to get an estimate (at least two RTCP sender
+ // reports need to be received).
+ verifier.MarkMemberTested(inbound_stream.estimated_playout_timestamp, true);
+ if (inbound_stream.kind.is_defined() && *inbound_stream.kind == "video") {
+ verifier.TestMemberIsDefined(inbound_stream.frames_decoded);
+ verifier.TestMemberIsDefined(inbound_stream.key_frames_decoded);
+ verifier.TestMemberIsNonNegative<uint32_t>(inbound_stream.frames_dropped);
+ verifier.TestMemberIsNonNegative<double>(
+ inbound_stream.total_decode_time);
+ verifier.TestMemberIsNonNegative<double>(
+ inbound_stream.total_processing_delay);
+ verifier.TestMemberIsNonNegative<double>(
+ inbound_stream.total_assembly_time);
+ verifier.TestMemberIsDefined(
+ inbound_stream.frames_assembled_from_multiple_packets);
+ verifier.TestMemberIsNonNegative<double>(
+ inbound_stream.total_inter_frame_delay);
+ verifier.TestMemberIsNonNegative<double>(
+ inbound_stream.total_squared_inter_frame_delay);
+ verifier.TestMemberIsNonNegative<uint32_t>(inbound_stream.pause_count);
+ verifier.TestMemberIsNonNegative<double>(
+ inbound_stream.total_pauses_duration);
+ verifier.TestMemberIsNonNegative<uint32_t>(inbound_stream.freeze_count);
+ verifier.TestMemberIsNonNegative<double>(
+ inbound_stream.total_freezes_duration);
+ // The integration test is not set up to test screen share; don't require
+ // this to be present.
+ verifier.MarkMemberTested(inbound_stream.content_type, true);
+ verifier.TestMemberIsUndefined(inbound_stream.jitter_buffer_flushes);
+ verifier.TestMemberIsUndefined(
+ inbound_stream.delayed_packet_outage_samples);
+ verifier.TestMemberIsUndefined(
+ inbound_stream.relative_packet_arrival_delay);
+ verifier.TestMemberIsUndefined(inbound_stream.interruption_count);
+ verifier.TestMemberIsUndefined(
+ inbound_stream.total_interruption_duration);
+ verifier.TestMemberIsNonNegative<double>(
+ inbound_stream.min_playout_delay);
+ verifier.TestMemberIsDefined(inbound_stream.goog_timing_frame_info);
+ } else {
+ verifier.TestMemberIsUndefined(inbound_stream.frames_decoded);
+ verifier.TestMemberIsUndefined(inbound_stream.key_frames_decoded);
+ verifier.TestMemberIsUndefined(inbound_stream.frames_dropped);
+ verifier.TestMemberIsUndefined(inbound_stream.total_decode_time);
+ verifier.TestMemberIsUndefined(inbound_stream.total_processing_delay);
+ verifier.TestMemberIsUndefined(inbound_stream.total_assembly_time);
+ verifier.TestMemberIsUndefined(
+ inbound_stream.frames_assembled_from_multiple_packets);
+ verifier.TestMemberIsUndefined(inbound_stream.total_inter_frame_delay);
+ verifier.TestMemberIsUndefined(
+ inbound_stream.total_squared_inter_frame_delay);
+ verifier.TestMemberIsUndefined(inbound_stream.pause_count);
+ verifier.TestMemberIsUndefined(inbound_stream.total_pauses_duration);
+ verifier.TestMemberIsUndefined(inbound_stream.freeze_count);
+ verifier.TestMemberIsUndefined(inbound_stream.total_freezes_duration);
+ verifier.TestMemberIsUndefined(inbound_stream.content_type);
+ verifier.TestMemberIsNonNegative<uint64_t>(
+ inbound_stream.jitter_buffer_flushes);
+ verifier.TestMemberIsNonNegative<uint64_t>(
+ inbound_stream.delayed_packet_outage_samples);
+ verifier.TestMemberIsNonNegative<double>(
+ inbound_stream.relative_packet_arrival_delay);
+ verifier.TestMemberIsNonNegative<uint32_t>(
+ inbound_stream.interruption_count);
+ verifier.TestMemberIsNonNegative<double>(
+ inbound_stream.total_interruption_duration);
+ verifier.TestMemberIsUndefined(inbound_stream.min_playout_delay);
+ verifier.TestMemberIsUndefined(inbound_stream.goog_timing_frame_info);
+ }
+ if (inbound_stream.kind.is_defined() && *inbound_stream.kind == "audio") {
+ verifier.TestMemberIsDefined(inbound_stream.playout_id);
+ } else {
+ verifier.TestMemberIsUndefined(inbound_stream.playout_id);
+ }
+
+ return verifier.ExpectAllMembersSuccessfullyTested();
+ }
+
+ bool VerifyRTCOutboundRtpStreamStats(
+ const RTCOutboundRtpStreamStats& outbound_stream) {
+ RTCStatsVerifier verifier(report_.get(), &outbound_stream);
+ VerifyRTCSentRtpStreamStats(outbound_stream, verifier);
+
+ verifier.TestMemberIsDefined(outbound_stream.mid);
+ verifier.TestMemberIsDefined(outbound_stream.active);
+ if (outbound_stream.kind.is_defined() && *outbound_stream.kind == "video") {
+ verifier.TestMemberIsIDReference(outbound_stream.media_source_id,
+ RTCVideoSourceStats::kType);
+ verifier.TestMemberIsNonNegative<uint32_t>(outbound_stream.fir_count);
+ verifier.TestMemberIsNonNegative<uint32_t>(outbound_stream.pli_count);
+ if (*outbound_stream.frames_encoded > 0) {
+ verifier.TestMemberIsNonNegative<uint64_t>(outbound_stream.qp_sum);
+ } else {
+ verifier.TestMemberIsUndefined(outbound_stream.qp_sum);
+ }
+ } else {
+ verifier.TestMemberIsUndefined(outbound_stream.fir_count);
+ verifier.TestMemberIsUndefined(outbound_stream.pli_count);
+ verifier.TestMemberIsIDReference(outbound_stream.media_source_id,
+ RTCAudioSourceStats::kType);
+ verifier.TestMemberIsUndefined(outbound_stream.qp_sum);
+ }
+ verifier.TestMemberIsNonNegative<uint32_t>(outbound_stream.nack_count);
+ verifier.TestMemberIsOptionalIDReference(
+ outbound_stream.remote_id, RTCRemoteInboundRtpStreamStats::kType);
+ verifier.TestMemberIsNonNegative<double>(
+ outbound_stream.total_packet_send_delay);
+ verifier.TestMemberIsNonNegative<uint64_t>(
+ outbound_stream.retransmitted_packets_sent);
+ verifier.TestMemberIsNonNegative<uint64_t>(
+ outbound_stream.header_bytes_sent);
+ verifier.TestMemberIsNonNegative<uint64_t>(
+ outbound_stream.retransmitted_bytes_sent);
+ verifier.TestMemberIsNonNegative<double>(outbound_stream.target_bitrate);
+ if (outbound_stream.kind.is_defined() && *outbound_stream.kind == "video") {
+ verifier.TestMemberIsDefined(outbound_stream.frames_encoded);
+ verifier.TestMemberIsDefined(outbound_stream.key_frames_encoded);
+ verifier.TestMemberIsNonNegative<double>(
+ outbound_stream.total_encode_time);
+ verifier.TestMemberIsNonNegative<uint64_t>(
+ outbound_stream.total_encoded_bytes_target);
+ verifier.TestMemberIsDefined(outbound_stream.quality_limitation_reason);
+ verifier.TestMemberIsDefined(
+ outbound_stream.quality_limitation_durations);
+ verifier.TestMemberIsNonNegative<uint32_t>(
+ outbound_stream.quality_limitation_resolution_changes);
+ // The integration test is not set up to test screen share; don't require
+ // this to be present.
+ verifier.MarkMemberTested(outbound_stream.content_type, true);
+ verifier.TestMemberIsDefined(outbound_stream.encoder_implementation);
+ verifier.TestMemberIsDefined(outbound_stream.power_efficient_encoder);
+ // Unless an implementation-specific amount of time has passed and at
+ // least one frame has been encoded, undefined is reported. Because it
+ // is hard to tell what is the case here, we treat FPS as optional.
+ // TODO(hbos): Update the tests to run until all implemented metrics
+ // should be populated.
+ if (outbound_stream.frames_per_second.is_defined()) {
+ verifier.TestMemberIsNonNegative<double>(
+ outbound_stream.frames_per_second);
+ } else {
+ verifier.TestMemberIsUndefined(outbound_stream.frames_per_second);
+ }
+ verifier.TestMemberIsNonNegative<uint32_t>(outbound_stream.frame_height);
+ verifier.TestMemberIsNonNegative<uint32_t>(outbound_stream.frame_width);
+ verifier.TestMemberIsNonNegative<uint32_t>(outbound_stream.frames_sent);
+ verifier.TestMemberIsNonNegative<uint32_t>(
+ outbound_stream.huge_frames_sent);
+ verifier.MarkMemberTested(outbound_stream.rid, true);
+ verifier.TestMemberIsDefined(outbound_stream.scalability_mode);
+ verifier.TestMemberIsNonNegative<uint32_t>(outbound_stream.rtx_ssrc);
+ } else {
+ verifier.TestMemberIsUndefined(outbound_stream.frames_encoded);
+ verifier.TestMemberIsUndefined(outbound_stream.key_frames_encoded);
+ verifier.TestMemberIsUndefined(outbound_stream.total_encode_time);
+ verifier.TestMemberIsUndefined(
+ outbound_stream.total_encoded_bytes_target);
+ verifier.TestMemberIsUndefined(outbound_stream.quality_limitation_reason);
+ verifier.TestMemberIsUndefined(
+ outbound_stream.quality_limitation_durations);
+ verifier.TestMemberIsUndefined(
+ outbound_stream.quality_limitation_resolution_changes);
+ verifier.TestMemberIsUndefined(outbound_stream.content_type);
+ // TODO(hbos): Implement for audio as well.
+ verifier.TestMemberIsUndefined(outbound_stream.encoder_implementation);
+ verifier.TestMemberIsUndefined(outbound_stream.power_efficient_encoder);
+ verifier.TestMemberIsUndefined(outbound_stream.rid);
+ verifier.TestMemberIsUndefined(outbound_stream.frames_per_second);
+ verifier.TestMemberIsUndefined(outbound_stream.frame_height);
+ verifier.TestMemberIsUndefined(outbound_stream.frame_width);
+ verifier.TestMemberIsUndefined(outbound_stream.frames_sent);
+ verifier.TestMemberIsUndefined(outbound_stream.huge_frames_sent);
+ verifier.TestMemberIsUndefined(outbound_stream.scalability_mode);
+ verifier.TestMemberIsUndefined(outbound_stream.rtx_ssrc);
+ }
+ return verifier.ExpectAllMembersSuccessfullyTested();
+ }
+
+ void VerifyRTCReceivedRtpStreamStats(
+ const RTCReceivedRtpStreamStats& received_rtp,
+ RTCStatsVerifier& verifier) {
+ VerifyRTCRtpStreamStats(received_rtp, verifier);
+ verifier.TestMemberIsNonNegative<double>(received_rtp.jitter);
+ verifier.TestMemberIsDefined(received_rtp.packets_lost);
+ }
+
+ bool VerifyRTCRemoteInboundRtpStreamStats(
+ const RTCRemoteInboundRtpStreamStats& remote_inbound_stream) {
+ RTCStatsVerifier verifier(report_.get(), &remote_inbound_stream);
+ VerifyRTCReceivedRtpStreamStats(remote_inbound_stream, verifier);
+ verifier.TestMemberIsDefined(remote_inbound_stream.fraction_lost);
+ verifier.TestMemberIsIDReference(remote_inbound_stream.local_id,
+ RTCOutboundRtpStreamStats::kType);
+ verifier.TestMemberIsNonNegative<double>(
+ remote_inbound_stream.round_trip_time);
+ verifier.TestMemberIsNonNegative<double>(
+ remote_inbound_stream.total_round_trip_time);
+ verifier.TestMemberIsNonNegative<int32_t>(
+ remote_inbound_stream.round_trip_time_measurements);
+ return verifier.ExpectAllMembersSuccessfullyTested();
+ }
+
+ bool VerifyRTCRemoteOutboundRtpStreamStats(
+ const RTCRemoteOutboundRtpStreamStats& remote_outbound_stream) {
+ RTCStatsVerifier verifier(report_.get(), &remote_outbound_stream);
+ VerifyRTCRtpStreamStats(remote_outbound_stream, verifier);
+ VerifyRTCSentRtpStreamStats(remote_outbound_stream, verifier);
+ verifier.TestMemberIsIDReference(remote_outbound_stream.local_id,
+ RTCOutboundRtpStreamStats::kType);
+ verifier.TestMemberIsNonNegative<double>(
+ remote_outbound_stream.remote_timestamp);
+ verifier.TestMemberIsDefined(remote_outbound_stream.reports_sent);
+ return verifier.ExpectAllMembersSuccessfullyTested();
+ }
+
+ void VerifyRTCMediaSourceStats(const RTCMediaSourceStats& media_source,
+ RTCStatsVerifier* verifier) {
+ verifier->TestMemberIsDefined(media_source.track_identifier);
+ verifier->TestMemberIsDefined(media_source.kind);
+ if (media_source.kind.is_defined()) {
+ EXPECT_TRUE((*media_source.kind == "audio" &&
+ media_source.type() == RTCAudioSourceStats::kType) ||
+ (*media_source.kind == "video" &&
+ media_source.type() == RTCVideoSourceStats::kType));
+ }
+ }
+
+ bool VerifyRTCAudioSourceStats(const RTCAudioSourceStats& audio_source) {
+ RTCStatsVerifier verifier(report_.get(), &audio_source);
+ VerifyRTCMediaSourceStats(audio_source, &verifier);
+ // Audio level, unlike audio energy, only gets updated at a certain
+ // frequency, so we don't require that one to be positive to avoid a race
+ // (https://crbug.com/webrtc/10962).
+ verifier.TestMemberIsNonNegative<double>(audio_source.audio_level);
+ verifier.TestMemberIsPositive<double>(audio_source.total_audio_energy);
+ verifier.TestMemberIsPositive<double>(audio_source.total_samples_duration);
+ // TODO(hbos): `echo_return_loss` and `echo_return_loss_enhancement` are
+ // flaky on msan bot (sometimes defined, sometimes undefined). Should the
+ // test run until available or is there a way to have it always be
+ // defined? crbug.com/627816
+ verifier.MarkMemberTested(audio_source.echo_return_loss, true);
+ verifier.MarkMemberTested(audio_source.echo_return_loss_enhancement, true);
+ return verifier.ExpectAllMembersSuccessfullyTested();
+ }
+
+ bool VerifyRTCVideoSourceStats(const RTCVideoSourceStats& video_source) {
+ RTCStatsVerifier verifier(report_.get(), &video_source);
+ VerifyRTCMediaSourceStats(video_source, &verifier);
+ // TODO(hbos): This integration test uses fakes that doesn't support
+ // VideoTrackSourceInterface::Stats. When this is fixed we should
+ // TestMemberIsNonNegative<uint32_t>() for `width` and `height` instead to
+ // reflect real code.
+ verifier.TestMemberIsUndefined(video_source.width);
+ verifier.TestMemberIsUndefined(video_source.height);
+ verifier.TestMemberIsNonNegative<uint32_t>(video_source.frames);
+ verifier.TestMemberIsNonNegative<double>(video_source.frames_per_second);
+ return verifier.ExpectAllMembersSuccessfullyTested();
+ }
+
+ bool VerifyRTCTransportStats(const RTCTransportStats& transport) {
+ RTCStatsVerifier verifier(report_.get(), &transport);
+ verifier.TestMemberIsNonNegative<uint64_t>(transport.bytes_sent);
+ verifier.TestMemberIsNonNegative<uint64_t>(transport.packets_sent);
+ verifier.TestMemberIsNonNegative<uint64_t>(transport.bytes_received);
+ verifier.TestMemberIsNonNegative<uint64_t>(transport.packets_received);
+ verifier.TestMemberIsOptionalIDReference(transport.rtcp_transport_stats_id,
+ RTCTransportStats::kType);
+ verifier.TestMemberIsDefined(transport.dtls_state);
+ verifier.TestMemberIsIDReference(transport.selected_candidate_pair_id,
+ RTCIceCandidatePairStats::kType);
+ verifier.TestMemberIsIDReference(transport.local_certificate_id,
+ RTCCertificateStats::kType);
+ verifier.TestMemberIsIDReference(transport.remote_certificate_id,
+ RTCCertificateStats::kType);
+ verifier.TestMemberIsDefined(transport.tls_version);
+ verifier.TestMemberIsDefined(transport.dtls_cipher);
+ verifier.TestMemberIsDefined(transport.dtls_role);
+ verifier.TestMemberIsDefined(transport.srtp_cipher);
+ verifier.TestMemberIsPositive<uint32_t>(
+ transport.selected_candidate_pair_changes);
+ verifier.TestMemberIsDefined(transport.ice_role);
+ verifier.TestMemberIsDefined(transport.ice_local_username_fragment);
+ verifier.TestMemberIsDefined(transport.ice_state);
+ return verifier.ExpectAllMembersSuccessfullyTested();
+ }
+
+ bool VerifyRTCAudioPlayoutStats(const RTCAudioPlayoutStats& audio_playout) {
+ RTCStatsVerifier verifier(report_.get(), &audio_playout);
+ verifier.TestMemberIsDefined(audio_playout.kind);
+ if (audio_playout.kind.is_defined()) {
+ EXPECT_EQ(*audio_playout.kind, "audio");
+ }
+ verifier.TestMemberIsNonNegative<uint64_t>(
+ audio_playout.synthesized_samples_events);
+ verifier.TestMemberIsNonNegative<double>(
+ audio_playout.synthesized_samples_duration);
+ verifier.TestMemberIsNonNegative<uint64_t>(
+ audio_playout.total_samples_count);
+ verifier.TestMemberIsNonNegative<double>(
+ audio_playout.total_samples_duration);
+ verifier.TestMemberIsNonNegative<double>(audio_playout.total_playout_delay);
+ return verifier.ExpectAllMembersSuccessfullyTested();
+ }
+
+ private:
+ rtc::scoped_refptr<const RTCStatsReport> report_;
+};
+
+#ifdef WEBRTC_HAVE_SCTP
+TEST_F(RTCStatsIntegrationTest, GetStatsFromCaller) {
+ StartCall();
+
+ rtc::scoped_refptr<const RTCStatsReport> report = GetStatsFromCaller();
+ RTCStatsReportVerifier(report.get()).VerifyReport({});
+
+#if RTC_TRACE_EVENTS_ENABLED
+ EXPECT_EQ(report->ToJson(), RTCStatsReportTraceListener::last_trace());
+#endif
+}
+
+TEST_F(RTCStatsIntegrationTest, GetStatsFromCallee) {
+ StartCall();
+
+ rtc::scoped_refptr<const RTCStatsReport> report;
+ // Wait for round trip time measurements to be defined.
+ constexpr int kMaxWaitMs = 10000;
+ auto GetStatsReportAndReturnTrueIfRttIsDefined = [&report, this] {
+ report = GetStatsFromCallee();
+ auto inbound_stats =
+ report->GetStatsOfType<RTCRemoteInboundRtpStreamStats>();
+ return !inbound_stats.empty() &&
+ inbound_stats.front()->round_trip_time.is_defined() &&
+ inbound_stats.front()->round_trip_time_measurements.is_defined();
+ };
+ EXPECT_TRUE_WAIT(GetStatsReportAndReturnTrueIfRttIsDefined(), kMaxWaitMs);
+ RTCStatsReportVerifier(report.get()).VerifyReport({});
+
+#if RTC_TRACE_EVENTS_ENABLED
+ EXPECT_EQ(report->ToJson(), RTCStatsReportTraceListener::last_trace());
+#endif
+}
+
+// These tests exercise the integration of the stats selection algorithm inside
+// of PeerConnection. See rtcstatstraveral_unittest.cc for more detailed stats
+// traversal tests on particular stats graphs.
+TEST_F(RTCStatsIntegrationTest, GetStatsWithSenderSelector) {
+ StartCall();
+ ASSERT_FALSE(caller_->pc()->GetSenders().empty());
+ rtc::scoped_refptr<const RTCStatsReport> report =
+ GetStatsFromCaller(caller_->pc()->GetSenders()[0]);
+ std::vector<const char*> allowed_missing_stats = {
+ // TODO(hbos): Include RTC[Audio/Video]ReceiverStats when implemented.
+ // TODO(hbos): Include RTCRemoteOutboundRtpStreamStats when implemented.
+ // TODO(hbos): Include RTCRtpContributingSourceStats when implemented.
+ RTCInboundRtpStreamStats::kType,
+ RTCPeerConnectionStats::kType,
+ RTCDataChannelStats::kType,
+ };
+ RTCStatsReportVerifier(report.get()).VerifyReport(allowed_missing_stats);
+ EXPECT_TRUE(report->size());
+}
+
+TEST_F(RTCStatsIntegrationTest, GetStatsWithReceiverSelector) {
+ StartCall();
+
+ ASSERT_FALSE(caller_->pc()->GetReceivers().empty());
+ rtc::scoped_refptr<const RTCStatsReport> report =
+ GetStatsFromCaller(caller_->pc()->GetReceivers()[0]);
+ std::vector<const char*> allowed_missing_stats = {
+ // TODO(hbos): Include RTC[Audio/Video]SenderStats when implemented.
+ // TODO(hbos): Include RTCRemoteInboundRtpStreamStats when implemented.
+ // TODO(hbos): Include RTCRtpContributingSourceStats when implemented.
+ RTCOutboundRtpStreamStats::kType,
+ RTCPeerConnectionStats::kType,
+ RTCDataChannelStats::kType,
+ };
+ RTCStatsReportVerifier(report.get()).VerifyReport(allowed_missing_stats);
+ EXPECT_TRUE(report->size());
+}
+
+TEST_F(RTCStatsIntegrationTest, GetStatsWithInvalidSenderSelector) {
+ StartCall();
+
+ ASSERT_FALSE(callee_->pc()->GetSenders().empty());
+ // The selector is invalid for the caller because it belongs to the callee.
+ auto invalid_selector = callee_->pc()->GetSenders()[0];
+ rtc::scoped_refptr<const RTCStatsReport> report =
+ GetStatsFromCaller(invalid_selector);
+ EXPECT_FALSE(report->size());
+}
+
+TEST_F(RTCStatsIntegrationTest, GetStatsWithInvalidReceiverSelector) {
+ StartCall();
+
+ ASSERT_FALSE(callee_->pc()->GetReceivers().empty());
+ // The selector is invalid for the caller because it belongs to the callee.
+ auto invalid_selector = callee_->pc()->GetReceivers()[0];
+ rtc::scoped_refptr<const RTCStatsReport> report =
+ GetStatsFromCaller(invalid_selector);
+ EXPECT_FALSE(report->size());
+}
+
+// TODO(bugs.webrtc.org/10041) For now this is equivalent to the following
+// test GetsStatsWhileClosingPeerConnection, because pc() is closed by
+// PeerConnectionTestWrapper. See: bugs.webrtc.org/9847
+TEST_F(RTCStatsIntegrationTest,
+ DISABLED_GetStatsWhileDestroyingPeerConnection) {
+ StartCall();
+
+ rtc::scoped_refptr<RTCStatsObtainer> stats_obtainer =
+ RTCStatsObtainer::Create();
+ caller_->pc()->GetStats(stats_obtainer.get());
+ // This will destroy the peer connection.
+ caller_ = nullptr;
+ // Any pending stats requests should have completed in the act of destroying
+ // the peer connection.
+ ASSERT_TRUE(stats_obtainer->report());
+#if RTC_TRACE_EVENTS_ENABLED
+ EXPECT_EQ(stats_obtainer->report()->ToJson(),
+ RTCStatsReportTraceListener::last_trace());
+#endif
+}
+
+TEST_F(RTCStatsIntegrationTest, GetsStatsWhileClosingPeerConnection) {
+ StartCall();
+
+ rtc::scoped_refptr<RTCStatsObtainer> stats_obtainer =
+ RTCStatsObtainer::Create();
+ caller_->pc()->GetStats(stats_obtainer.get());
+ caller_->pc()->Close();
+
+ ASSERT_TRUE(stats_obtainer->report());
+#if RTC_TRACE_EVENTS_ENABLED
+ EXPECT_EQ(stats_obtainer->report()->ToJson(),
+ RTCStatsReportTraceListener::last_trace());
+#endif
+}
+
+// GetStatsReferencedIds() is optimized to recognize what is or isn't a
+// referenced ID based on dictionary type information and knowing what members
+// are used as references, as opposed to iterating all members to find the ones
+// with the "Id" or "Ids" suffix. As such, GetStatsReferencedIds() is tested as
+// an integration test instead of a unit test in order to guard against adding
+// new references and forgetting to update GetStatsReferencedIds().
+TEST_F(RTCStatsIntegrationTest, GetStatsReferencedIds) {
+ StartCall();
+
+ rtc::scoped_refptr<const RTCStatsReport> report = GetStatsFromCallee();
+ for (const RTCStats& stats : *report) {
+ // Find all references by looking at all string members with the "Id" or
+ // "Ids" suffix.
+ std::set<const std::string*> expected_ids;
+ for (const auto* member : stats.Members()) {
+ if (!member->is_defined())
+ continue;
+ if (member->type() == RTCStatsMemberInterface::kString) {
+ if (absl::EndsWith(member->name(), "Id")) {
+ const auto& id = member->cast_to<const RTCStatsMember<std::string>>();
+ expected_ids.insert(&(*id));
+ }
+ } else if (member->type() == RTCStatsMemberInterface::kSequenceString) {
+ if (absl::EndsWith(member->name(), "Ids")) {
+ const auto& ids =
+ member->cast_to<const RTCStatsMember<std::vector<std::string>>>();
+ for (const std::string& id : *ids)
+ expected_ids.insert(&id);
+ }
+ }
+ }
+
+ std::vector<const std::string*> neighbor_ids = GetStatsReferencedIds(stats);
+ EXPECT_EQ(neighbor_ids.size(), expected_ids.size());
+ for (const std::string* neighbor_id : neighbor_ids) {
+ EXPECT_THAT(expected_ids, Contains(neighbor_id));
+ }
+ for (const std::string* expected_id : expected_ids) {
+ EXPECT_THAT(neighbor_ids, Contains(expected_id));
+ }
+ }
+}
+
+TEST_F(RTCStatsIntegrationTest, GetStatsContainsNoDuplicateMembers) {
+ StartCall();
+
+ rtc::scoped_refptr<const RTCStatsReport> report = GetStatsFromCallee();
+ for (const RTCStats& stats : *report) {
+ std::set<std::string> member_names;
+ for (const auto* member : stats.Members()) {
+ EXPECT_TRUE(member_names.find(member->name()) == member_names.end())
+ << member->name() << " is a duplicate!";
+ member_names.insert(member->name());
+ }
+ }
+}
+#endif // WEBRTC_HAVE_SCTP
+
+} // namespace
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/rtc_stats_traversal.cc b/third_party/libwebrtc/pc/rtc_stats_traversal.cc
new file mode 100644
index 0000000000..04de55028c
--- /dev/null
+++ b/third_party/libwebrtc/pc/rtc_stats_traversal.cc
@@ -0,0 +1,133 @@
+/*
+ * Copyright 2018 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/rtc_stats_traversal.h"
+
+#include <memory>
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "api/stats/rtcstats_objects.h"
+#include "rtc_base/checks.h"
+
+namespace webrtc {
+
+namespace {
+
+void TraverseAndTakeVisitedStats(RTCStatsReport* report,
+ RTCStatsReport* visited_report,
+ const std::string& current_id) {
+ // Mark current stats object as visited by moving it `report` to
+ // `visited_report`.
+ std::unique_ptr<const RTCStats> current = report->Take(current_id);
+ if (!current) {
+ // This node has already been visited (or it is an invalid id).
+ return;
+ }
+ std::vector<const std::string*> neighbor_ids =
+ GetStatsReferencedIds(*current);
+ visited_report->AddStats(std::move(current));
+
+ // Recursively traverse all neighbors.
+ for (const auto* neighbor_id : neighbor_ids) {
+ TraverseAndTakeVisitedStats(report, visited_report, *neighbor_id);
+ }
+}
+
+void AddIdIfDefined(const RTCStatsMember<std::string>& id,
+ std::vector<const std::string*>* neighbor_ids) {
+ if (id.is_defined())
+ neighbor_ids->push_back(&(*id));
+}
+
+} // namespace
+
+rtc::scoped_refptr<RTCStatsReport> TakeReferencedStats(
+ rtc::scoped_refptr<RTCStatsReport> report,
+ const std::vector<std::string>& ids) {
+ rtc::scoped_refptr<RTCStatsReport> result =
+ RTCStatsReport::Create(report->timestamp());
+ for (const auto& id : ids) {
+ TraverseAndTakeVisitedStats(report.get(), result.get(), id);
+ }
+ return result;
+}
+
+std::vector<const std::string*> GetStatsReferencedIds(const RTCStats& stats) {
+ std::vector<const std::string*> neighbor_ids;
+ const char* type = stats.type();
+ if (type == RTCCertificateStats::kType) {
+ const auto& certificate = static_cast<const RTCCertificateStats&>(stats);
+ AddIdIfDefined(certificate.issuer_certificate_id, &neighbor_ids);
+ } else if (type == RTCCodecStats::kType) {
+ const auto& codec = static_cast<const RTCCodecStats&>(stats);
+ AddIdIfDefined(codec.transport_id, &neighbor_ids);
+ } else if (type == RTCDataChannelStats::kType) {
+ // RTCDataChannelStats does not have any neighbor references.
+ } else if (type == RTCIceCandidatePairStats::kType) {
+ const auto& candidate_pair =
+ static_cast<const RTCIceCandidatePairStats&>(stats);
+ AddIdIfDefined(candidate_pair.transport_id, &neighbor_ids);
+ AddIdIfDefined(candidate_pair.local_candidate_id, &neighbor_ids);
+ AddIdIfDefined(candidate_pair.remote_candidate_id, &neighbor_ids);
+ } else if (type == RTCLocalIceCandidateStats::kType ||
+ type == RTCRemoteIceCandidateStats::kType) {
+ const auto& local_or_remote_candidate =
+ static_cast<const RTCIceCandidateStats&>(stats);
+ AddIdIfDefined(local_or_remote_candidate.transport_id, &neighbor_ids);
+ } else if (type == RTCPeerConnectionStats::kType) {
+ // RTCPeerConnectionStats does not have any neighbor references.
+ } else if (type == RTCInboundRtpStreamStats::kType) {
+ const auto& inbound_rtp =
+ static_cast<const RTCInboundRtpStreamStats&>(stats);
+ AddIdIfDefined(inbound_rtp.remote_id, &neighbor_ids);
+ AddIdIfDefined(inbound_rtp.transport_id, &neighbor_ids);
+ AddIdIfDefined(inbound_rtp.codec_id, &neighbor_ids);
+ AddIdIfDefined(inbound_rtp.playout_id, &neighbor_ids);
+ } else if (type == RTCOutboundRtpStreamStats::kType) {
+ const auto& outbound_rtp =
+ static_cast<const RTCOutboundRtpStreamStats&>(stats);
+ AddIdIfDefined(outbound_rtp.remote_id, &neighbor_ids);
+ AddIdIfDefined(outbound_rtp.transport_id, &neighbor_ids);
+ AddIdIfDefined(outbound_rtp.codec_id, &neighbor_ids);
+ AddIdIfDefined(outbound_rtp.media_source_id, &neighbor_ids);
+ } else if (type == RTCRemoteInboundRtpStreamStats::kType) {
+ const auto& remote_inbound_rtp =
+ static_cast<const RTCRemoteInboundRtpStreamStats&>(stats);
+ AddIdIfDefined(remote_inbound_rtp.transport_id, &neighbor_ids);
+ AddIdIfDefined(remote_inbound_rtp.codec_id, &neighbor_ids);
+ AddIdIfDefined(remote_inbound_rtp.local_id, &neighbor_ids);
+ } else if (type == RTCRemoteOutboundRtpStreamStats::kType) {
+ const auto& remote_outbound_rtp =
+ static_cast<const RTCRemoteOutboundRtpStreamStats&>(stats);
+ // Inherited from `RTCRTPStreamStats`.
+ AddIdIfDefined(remote_outbound_rtp.transport_id, &neighbor_ids);
+ AddIdIfDefined(remote_outbound_rtp.codec_id, &neighbor_ids);
+ // Direct members of `RTCRemoteOutboundRtpStreamStats`.
+ AddIdIfDefined(remote_outbound_rtp.local_id, &neighbor_ids);
+ } else if (type == RTCAudioSourceStats::kType ||
+ type == RTCVideoSourceStats::kType) {
+ // RTC[Audio/Video]SourceStats does not have any neighbor references.
+ } else if (type == RTCTransportStats::kType) {
+ const auto& transport = static_cast<const RTCTransportStats&>(stats);
+ AddIdIfDefined(transport.rtcp_transport_stats_id, &neighbor_ids);
+ AddIdIfDefined(transport.selected_candidate_pair_id, &neighbor_ids);
+ AddIdIfDefined(transport.local_certificate_id, &neighbor_ids);
+ AddIdIfDefined(transport.remote_certificate_id, &neighbor_ids);
+ } else if (type == RTCAudioPlayoutStats::kType) {
+ // RTCAudioPlayoutStats does not have any neighbor references.
+ } else {
+ RTC_DCHECK_NOTREACHED() << "Unrecognized type: " << type;
+ }
+ return neighbor_ids;
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/rtc_stats_traversal.h b/third_party/libwebrtc/pc/rtc_stats_traversal.h
new file mode 100644
index 0000000000..ec4d51cc52
--- /dev/null
+++ b/third_party/libwebrtc/pc/rtc_stats_traversal.h
@@ -0,0 +1,44 @@
+/*
+ * Copyright 2018 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_RTC_STATS_TRAVERSAL_H_
+#define PC_RTC_STATS_TRAVERSAL_H_
+
+#include <string>
+#include <vector>
+
+#include "api/scoped_refptr.h"
+#include "api/stats/rtc_stats.h"
+#include "api/stats/rtc_stats_report.h"
+
+namespace webrtc {
+
+// Traverses the stats graph, taking all stats objects that are directly or
+// indirectly accessible from and including the stats objects identified by
+// `ids`, returning them as a new stats report.
+// This is meant to be used to implement the stats selection algorithm.
+// https://w3c.github.io/webrtc-pc/#dfn-stats-selection-algorithm
+rtc::scoped_refptr<RTCStatsReport> TakeReferencedStats(
+ rtc::scoped_refptr<RTCStatsReport> report,
+ const std::vector<std::string>& ids);
+
+// Gets pointers to the string values of any members in `stats` that are used as
+// references for looking up other stats objects in the same report by ID. The
+// pointers are valid for the lifetime of `stats` assumings its members are not
+// modified.
+//
+// For example, RTCCodecStats contains "transportId"
+// (RTCCodecStats::transport_id) referencing an RTCTransportStats.
+// https://w3c.github.io/webrtc-stats/#dom-rtccodecstats-transportid
+std::vector<const std::string*> GetStatsReferencedIds(const RTCStats& stats);
+
+} // namespace webrtc
+
+#endif // PC_RTC_STATS_TRAVERSAL_H_
diff --git a/third_party/libwebrtc/pc/rtc_stats_traversal_unittest.cc b/third_party/libwebrtc/pc/rtc_stats_traversal_unittest.cc
new file mode 100644
index 0000000000..72ad255564
--- /dev/null
+++ b/third_party/libwebrtc/pc/rtc_stats_traversal_unittest.cc
@@ -0,0 +1,210 @@
+/*
+ * Copyright 2018 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/rtc_stats_traversal.h"
+
+#include <memory>
+#include <vector>
+
+#include "api/stats/rtcstats_objects.h"
+#include "test/gtest.h"
+
+// This file contains tests for TakeReferencedStats().
+// GetStatsNeighborIds() is tested in rtcstats_integrationtest.cc.
+
+namespace webrtc {
+
+class RTCStatsTraversalTest : public ::testing::Test {
+ public:
+ RTCStatsTraversalTest() {
+ transport_ = new RTCTransportStats("transport", Timestamp::Zero());
+ candidate_pair_ =
+ new RTCIceCandidatePairStats("candidate-pair", Timestamp::Zero());
+ local_candidate_ =
+ new RTCLocalIceCandidateStats("local-candidate", Timestamp::Zero());
+ remote_candidate_ =
+ new RTCRemoteIceCandidateStats("remote-candidate", Timestamp::Zero());
+ initial_report_ = RTCStatsReport::Create(Timestamp::Zero());
+ initial_report_->AddStats(std::unique_ptr<const RTCStats>(transport_));
+ initial_report_->AddStats(std::unique_ptr<const RTCStats>(candidate_pair_));
+ initial_report_->AddStats(
+ std::unique_ptr<const RTCStats>(local_candidate_));
+ initial_report_->AddStats(
+ std::unique_ptr<const RTCStats>(remote_candidate_));
+ result_ = RTCStatsReport::Create(Timestamp::Zero());
+ }
+
+ void TakeReferencedStats(std::vector<const RTCStats*> start_nodes) {
+ std::vector<std::string> start_ids;
+ start_ids.reserve(start_nodes.size());
+ for (const RTCStats* start_node : start_nodes) {
+ start_ids.push_back(start_node->id());
+ }
+ result_ = webrtc::TakeReferencedStats(initial_report_, start_ids);
+ }
+
+ void EXPECT_VISITED(const RTCStats* stats) {
+ EXPECT_FALSE(initial_report_->Get(stats->id()))
+ << '"' << stats->id()
+ << "\" should be visited but it was not removed from initial report.";
+ EXPECT_TRUE(result_->Get(stats->id()))
+ << '"' << stats->id()
+ << "\" should be visited but it was not added to the resulting report.";
+ }
+
+ void EXPECT_UNVISITED(const RTCStats* stats) {
+ EXPECT_TRUE(initial_report_->Get(stats->id()))
+ << '"' << stats->id()
+ << "\" should not be visited but it was removed from initial report.";
+ EXPECT_FALSE(result_->Get(stats->id()))
+ << '"' << stats->id()
+ << "\" should not be visited but it was added to the resulting report.";
+ }
+
+ protected:
+ rtc::scoped_refptr<RTCStatsReport> initial_report_;
+ rtc::scoped_refptr<RTCStatsReport> result_;
+ // Raw pointers to stats owned by the reports.
+ RTCTransportStats* transport_;
+ RTCIceCandidatePairStats* candidate_pair_;
+ RTCIceCandidateStats* local_candidate_;
+ RTCIceCandidateStats* remote_candidate_;
+};
+
+TEST_F(RTCStatsTraversalTest, NoReachableConnections) {
+ // Everything references transport but transport doesn't reference anything.
+ //
+ // candidate-pair
+ // | | |
+ // v | v
+ // local-candidate | remote-candidate
+ // | | |
+ // v v v
+ // start:transport
+ candidate_pair_->transport_id = "transport";
+ candidate_pair_->local_candidate_id = "local-candidate";
+ candidate_pair_->remote_candidate_id = "remote-candidate";
+ local_candidate_->transport_id = "transport";
+ remote_candidate_->transport_id = "transport";
+ TakeReferencedStats({transport_});
+ EXPECT_VISITED(transport_);
+ EXPECT_UNVISITED(candidate_pair_);
+ EXPECT_UNVISITED(local_candidate_);
+ EXPECT_UNVISITED(remote_candidate_);
+}
+
+TEST_F(RTCStatsTraversalTest, SelfReference) {
+ transport_->rtcp_transport_stats_id = "transport";
+ TakeReferencedStats({transport_});
+ EXPECT_VISITED(transport_);
+ EXPECT_UNVISITED(candidate_pair_);
+ EXPECT_UNVISITED(local_candidate_);
+ EXPECT_UNVISITED(remote_candidate_);
+}
+
+TEST_F(RTCStatsTraversalTest, BogusReference) {
+ transport_->rtcp_transport_stats_id = "bogus-reference";
+ TakeReferencedStats({transport_});
+ EXPECT_VISITED(transport_);
+ EXPECT_UNVISITED(candidate_pair_);
+ EXPECT_UNVISITED(local_candidate_);
+ EXPECT_UNVISITED(remote_candidate_);
+}
+
+TEST_F(RTCStatsTraversalTest, Tree) {
+ // start:candidate-pair
+ // | |
+ // v v
+ // local-candidate remote-candidate
+ // |
+ // v
+ // transport
+ candidate_pair_->local_candidate_id = "local-candidate";
+ candidate_pair_->remote_candidate_id = "remote-candidate";
+ local_candidate_->transport_id = "transport";
+ TakeReferencedStats({candidate_pair_});
+ EXPECT_VISITED(transport_);
+ EXPECT_VISITED(candidate_pair_);
+ EXPECT_VISITED(local_candidate_);
+ EXPECT_VISITED(remote_candidate_);
+}
+
+TEST_F(RTCStatsTraversalTest, MultiplePathsToSameNode) {
+ // start:candidate-pair
+ // | |
+ // v v
+ // local-candidate remote-candidate
+ // | |
+ // v v
+ // transport
+ candidate_pair_->local_candidate_id = "local-candidate";
+ candidate_pair_->remote_candidate_id = "remote-candidate";
+ local_candidate_->transport_id = "transport";
+ remote_candidate_->transport_id = "transport";
+ TakeReferencedStats({candidate_pair_});
+ EXPECT_VISITED(transport_);
+ EXPECT_VISITED(candidate_pair_);
+ EXPECT_VISITED(local_candidate_);
+ EXPECT_VISITED(remote_candidate_);
+}
+
+TEST_F(RTCStatsTraversalTest, CyclicGraph) {
+ // candidate-pair
+ // | ^
+ // v |
+ // start:local-candidate | remote-candidate
+ // | |
+ // v |
+ // transport
+ local_candidate_->transport_id = "transport";
+ transport_->selected_candidate_pair_id = "candidate-pair";
+ candidate_pair_->local_candidate_id = "local-candidate";
+ TakeReferencedStats({local_candidate_});
+ EXPECT_VISITED(transport_);
+ EXPECT_VISITED(candidate_pair_);
+ EXPECT_VISITED(local_candidate_);
+ EXPECT_UNVISITED(remote_candidate_);
+}
+
+TEST_F(RTCStatsTraversalTest, MultipleStarts) {
+ // start:candidate-pair
+ // |
+ // v
+ // local-candidate remote-candidate
+ // |
+ // v
+ // start:transport
+ candidate_pair_->remote_candidate_id = "remote-candidate";
+ local_candidate_->transport_id = "transport";
+ TakeReferencedStats({candidate_pair_, transport_});
+ EXPECT_VISITED(transport_);
+ EXPECT_VISITED(candidate_pair_);
+ EXPECT_UNVISITED(local_candidate_);
+ EXPECT_VISITED(remote_candidate_);
+}
+
+TEST_F(RTCStatsTraversalTest, MultipleStartsLeadingToSameNode) {
+ // candidate-pair
+ //
+ //
+ // start:local-candidate start:remote-candidate
+ // | |
+ // v v
+ // transport
+ local_candidate_->transport_id = "transport";
+ remote_candidate_->transport_id = "transport";
+ TakeReferencedStats({local_candidate_, remote_candidate_});
+ EXPECT_VISITED(transport_);
+ EXPECT_UNVISITED(candidate_pair_);
+ EXPECT_VISITED(local_candidate_);
+ EXPECT_VISITED(remote_candidate_);
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/rtcp_mux_filter.cc b/third_party/libwebrtc/pc/rtcp_mux_filter.cc
new file mode 100644
index 0000000000..62adea2243
--- /dev/null
+++ b/third_party/libwebrtc/pc/rtcp_mux_filter.cc
@@ -0,0 +1,125 @@
+/*
+ * Copyright 2004 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/rtcp_mux_filter.h"
+
+#include "rtc_base/logging.h"
+
+namespace cricket {
+
+RtcpMuxFilter::RtcpMuxFilter() : state_(ST_INIT), offer_enable_(false) {}
+
+bool RtcpMuxFilter::IsFullyActive() const {
+ return state_ == ST_ACTIVE;
+}
+
+bool RtcpMuxFilter::IsProvisionallyActive() const {
+ return state_ == ST_SENTPRANSWER || state_ == ST_RECEIVEDPRANSWER;
+}
+
+bool RtcpMuxFilter::IsActive() const {
+ return IsFullyActive() || IsProvisionallyActive();
+}
+
+void RtcpMuxFilter::SetActive() {
+ state_ = ST_ACTIVE;
+}
+
+bool RtcpMuxFilter::SetOffer(bool offer_enable, ContentSource src) {
+ if (state_ == ST_ACTIVE) {
+ // Fail if we try to deactivate and no-op if we try and activate.
+ return offer_enable;
+ }
+
+ if (!ExpectOffer(offer_enable, src)) {
+ RTC_LOG(LS_ERROR) << "Invalid state for change of RTCP mux offer";
+ return false;
+ }
+
+ offer_enable_ = offer_enable;
+ state_ = (src == CS_LOCAL) ? ST_SENTOFFER : ST_RECEIVEDOFFER;
+ return true;
+}
+
+bool RtcpMuxFilter::SetProvisionalAnswer(bool answer_enable,
+ ContentSource src) {
+ if (state_ == ST_ACTIVE) {
+ // Fail if we try to deactivate and no-op if we try and activate.
+ return answer_enable;
+ }
+
+ if (!ExpectAnswer(src)) {
+ RTC_LOG(LS_ERROR) << "Invalid state for RTCP mux provisional answer";
+ return false;
+ }
+
+ if (offer_enable_) {
+ if (answer_enable) {
+ if (src == CS_REMOTE)
+ state_ = ST_RECEIVEDPRANSWER;
+ else // CS_LOCAL
+ state_ = ST_SENTPRANSWER;
+ } else {
+ // The provisional answer doesn't want to use RTCP mux.
+ // Go back to the original state after the offer was set and wait for next
+ // provisional or final answer.
+ if (src == CS_REMOTE)
+ state_ = ST_SENTOFFER;
+ else // CS_LOCAL
+ state_ = ST_RECEIVEDOFFER;
+ }
+ } else if (answer_enable) {
+ // If the offer didn't specify RTCP mux, the answer shouldn't either.
+ RTC_LOG(LS_WARNING) << "Invalid parameters in RTCP mux provisional answer";
+ return false;
+ }
+
+ return true;
+}
+
+bool RtcpMuxFilter::SetAnswer(bool answer_enable, ContentSource src) {
+ if (state_ == ST_ACTIVE) {
+ // Fail if we try to deactivate and no-op if we try and activate.
+ return answer_enable;
+ }
+
+ if (!ExpectAnswer(src)) {
+ RTC_LOG(LS_ERROR) << "Invalid state for RTCP mux answer, state is "
+ << state_ << ", source is " << src;
+ return false;
+ }
+
+ if (offer_enable_ && answer_enable) {
+ state_ = ST_ACTIVE;
+ } else if (answer_enable) {
+ // If the offer didn't specify RTCP mux, the answer shouldn't either.
+ RTC_LOG(LS_WARNING) << "Invalid parameters in RTCP mux answer";
+ return false;
+ } else {
+ state_ = ST_INIT;
+ }
+ return true;
+}
+
+bool RtcpMuxFilter::ExpectOffer(bool offer_enable, ContentSource source) {
+ return ((state_ == ST_INIT) ||
+ (state_ == ST_ACTIVE && offer_enable == offer_enable_) ||
+ (state_ == ST_SENTOFFER && source == CS_LOCAL) ||
+ (state_ == ST_RECEIVEDOFFER && source == CS_REMOTE));
+}
+
+bool RtcpMuxFilter::ExpectAnswer(ContentSource source) {
+ return ((state_ == ST_SENTOFFER && source == CS_REMOTE) ||
+ (state_ == ST_RECEIVEDOFFER && source == CS_LOCAL) ||
+ (state_ == ST_SENTPRANSWER && source == CS_LOCAL) ||
+ (state_ == ST_RECEIVEDPRANSWER && source == CS_REMOTE));
+}
+
+} // namespace cricket
diff --git a/third_party/libwebrtc/pc/rtcp_mux_filter.h b/third_party/libwebrtc/pc/rtcp_mux_filter.h
new file mode 100644
index 0000000000..48050de3d8
--- /dev/null
+++ b/third_party/libwebrtc/pc/rtcp_mux_filter.h
@@ -0,0 +1,78 @@
+/*
+ * Copyright 2004 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_RTCP_MUX_FILTER_H_
+#define PC_RTCP_MUX_FILTER_H_
+
+#include "pc/session_description.h"
+
+namespace cricket {
+
+// RTCP Muxer, as defined in RFC 5761 (http://tools.ietf.org/html/rfc5761)
+class RtcpMuxFilter {
+ public:
+ RtcpMuxFilter();
+
+ // Whether RTCP mux has been negotiated with a final answer (not provisional).
+ bool IsFullyActive() const;
+
+ // Whether RTCP mux has been negotiated with a provisional answer; this means
+ // a later answer could disable RTCP mux, and so the RTCP transport should
+ // not be disposed yet.
+ bool IsProvisionallyActive() const;
+
+ // Whether the filter is active, i.e. has RTCP mux been properly negotiated,
+ // either with a final or provisional answer.
+ bool IsActive() const;
+
+ // Make the filter active (fully, not provisionally) regardless of the
+ // current state. This should be used when an endpoint *requires* RTCP mux.
+ void SetActive();
+
+ // Specifies whether the offer indicates the use of RTCP mux.
+ bool SetOffer(bool offer_enable, ContentSource src);
+
+ // Specifies whether the provisional answer indicates the use of RTCP mux.
+ bool SetProvisionalAnswer(bool answer_enable, ContentSource src);
+
+ // Specifies whether the answer indicates the use of RTCP mux.
+ bool SetAnswer(bool answer_enable, ContentSource src);
+
+ private:
+ bool ExpectOffer(bool offer_enable, ContentSource source);
+ bool ExpectAnswer(ContentSource source);
+ enum State {
+ // RTCP mux filter unused.
+ ST_INIT,
+ // Offer with RTCP mux enabled received.
+ // RTCP mux filter is not active.
+ ST_RECEIVEDOFFER,
+ // Offer with RTCP mux enabled sent.
+ // RTCP mux filter can demux incoming packets but is not active.
+ ST_SENTOFFER,
+ // RTCP mux filter is active but the sent answer is only provisional.
+ // When the final answer is set, the state transitions to ST_ACTIVE or
+ // ST_INIT.
+ ST_SENTPRANSWER,
+ // RTCP mux filter is active but the received answer is only provisional.
+ // When the final answer is set, the state transitions to ST_ACTIVE or
+ // ST_INIT.
+ ST_RECEIVEDPRANSWER,
+ // Offer and answer set, RTCP mux enabled. It is not possible to de-activate
+ // the filter.
+ ST_ACTIVE
+ };
+ State state_;
+ bool offer_enable_;
+};
+
+} // namespace cricket
+
+#endif // PC_RTCP_MUX_FILTER_H_
diff --git a/third_party/libwebrtc/pc/rtcp_mux_filter_unittest.cc b/third_party/libwebrtc/pc/rtcp_mux_filter_unittest.cc
new file mode 100644
index 0000000000..586da23df8
--- /dev/null
+++ b/third_party/libwebrtc/pc/rtcp_mux_filter_unittest.cc
@@ -0,0 +1,192 @@
+/*
+ * Copyright 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/rtcp_mux_filter.h"
+
+#include "test/gtest.h"
+
+TEST(RtcpMuxFilterTest, IsActiveSender) {
+ cricket::RtcpMuxFilter filter;
+ // Init state - not active
+ EXPECT_FALSE(filter.IsActive());
+ EXPECT_FALSE(filter.IsProvisionallyActive());
+ EXPECT_FALSE(filter.IsFullyActive());
+ // After sent offer, demux should not be active.
+ filter.SetOffer(true, cricket::CS_LOCAL);
+ EXPECT_FALSE(filter.IsActive());
+ EXPECT_FALSE(filter.IsProvisionallyActive());
+ EXPECT_FALSE(filter.IsFullyActive());
+ // Remote accepted, filter is now active.
+ filter.SetAnswer(true, cricket::CS_REMOTE);
+ EXPECT_TRUE(filter.IsActive());
+ EXPECT_FALSE(filter.IsProvisionallyActive());
+ EXPECT_TRUE(filter.IsFullyActive());
+}
+
+// Test that we can receive provisional answer and final answer.
+TEST(RtcpMuxFilterTest, ReceivePrAnswer) {
+ cricket::RtcpMuxFilter filter;
+ filter.SetOffer(true, cricket::CS_LOCAL);
+ // Received provisional answer with mux enabled.
+ EXPECT_TRUE(filter.SetProvisionalAnswer(true, cricket::CS_REMOTE));
+ // We are now provisionally active since both sender and receiver support mux.
+ EXPECT_TRUE(filter.IsActive());
+ EXPECT_TRUE(filter.IsProvisionallyActive());
+ EXPECT_FALSE(filter.IsFullyActive());
+ // Received provisional answer with mux disabled.
+ EXPECT_TRUE(filter.SetProvisionalAnswer(false, cricket::CS_REMOTE));
+ // We are now inactive since the receiver doesn't support mux.
+ EXPECT_FALSE(filter.IsActive());
+ EXPECT_FALSE(filter.IsProvisionallyActive());
+ EXPECT_FALSE(filter.IsFullyActive());
+ // Received final answer with mux enabled.
+ EXPECT_TRUE(filter.SetAnswer(true, cricket::CS_REMOTE));
+ EXPECT_TRUE(filter.IsActive());
+ EXPECT_FALSE(filter.IsProvisionallyActive());
+ EXPECT_TRUE(filter.IsFullyActive());
+}
+
+TEST(RtcpMuxFilterTest, IsActiveReceiver) {
+ cricket::RtcpMuxFilter filter;
+ // Init state - not active.
+ EXPECT_FALSE(filter.IsActive());
+ EXPECT_FALSE(filter.IsProvisionallyActive());
+ EXPECT_FALSE(filter.IsFullyActive());
+ // After received offer, demux should not be active
+ filter.SetOffer(true, cricket::CS_REMOTE);
+ EXPECT_FALSE(filter.IsActive());
+ EXPECT_FALSE(filter.IsProvisionallyActive());
+ EXPECT_FALSE(filter.IsFullyActive());
+ // We accept, filter is now active
+ filter.SetAnswer(true, cricket::CS_LOCAL);
+ EXPECT_TRUE(filter.IsActive());
+ EXPECT_FALSE(filter.IsProvisionallyActive());
+ EXPECT_TRUE(filter.IsFullyActive());
+}
+
+// Test that we can send provisional answer and final answer.
+TEST(RtcpMuxFilterTest, SendPrAnswer) {
+ cricket::RtcpMuxFilter filter;
+ filter.SetOffer(true, cricket::CS_REMOTE);
+ // Send provisional answer with mux enabled.
+ EXPECT_TRUE(filter.SetProvisionalAnswer(true, cricket::CS_LOCAL));
+ EXPECT_TRUE(filter.IsActive());
+ EXPECT_TRUE(filter.IsProvisionallyActive());
+ EXPECT_FALSE(filter.IsFullyActive());
+ // Received provisional answer with mux disabled.
+ EXPECT_TRUE(filter.SetProvisionalAnswer(false, cricket::CS_LOCAL));
+ EXPECT_FALSE(filter.IsActive());
+ EXPECT_FALSE(filter.IsProvisionallyActive());
+ EXPECT_FALSE(filter.IsFullyActive());
+ // Send final answer with mux enabled.
+ EXPECT_TRUE(filter.SetAnswer(true, cricket::CS_LOCAL));
+ EXPECT_TRUE(filter.IsActive());
+ EXPECT_FALSE(filter.IsProvisionallyActive());
+ EXPECT_TRUE(filter.IsFullyActive());
+}
+
+// Test that we can enable the filter in an update.
+// We can not disable the filter later since that would mean we need to
+// recreate a rtcp transport channel.
+TEST(RtcpMuxFilterTest, EnableFilterDuringUpdate) {
+ cricket::RtcpMuxFilter filter;
+ EXPECT_FALSE(filter.IsActive());
+ EXPECT_TRUE(filter.SetOffer(false, cricket::CS_REMOTE));
+ EXPECT_TRUE(filter.SetAnswer(false, cricket::CS_LOCAL));
+ EXPECT_FALSE(filter.IsActive());
+
+ EXPECT_TRUE(filter.SetOffer(true, cricket::CS_REMOTE));
+ EXPECT_TRUE(filter.SetAnswer(true, cricket::CS_LOCAL));
+ EXPECT_TRUE(filter.IsActive());
+
+ EXPECT_FALSE(filter.SetOffer(false, cricket::CS_REMOTE));
+ EXPECT_FALSE(filter.SetAnswer(false, cricket::CS_LOCAL));
+ EXPECT_TRUE(filter.IsActive());
+}
+
+// Test that SetOffer can be called twice.
+TEST(RtcpMuxFilterTest, SetOfferTwice) {
+ cricket::RtcpMuxFilter filter;
+
+ EXPECT_TRUE(filter.SetOffer(true, cricket::CS_REMOTE));
+ EXPECT_TRUE(filter.SetOffer(true, cricket::CS_REMOTE));
+ EXPECT_TRUE(filter.SetAnswer(true, cricket::CS_LOCAL));
+ EXPECT_TRUE(filter.IsActive());
+
+ cricket::RtcpMuxFilter filter2;
+ EXPECT_TRUE(filter2.SetOffer(false, cricket::CS_LOCAL));
+ EXPECT_TRUE(filter2.SetOffer(false, cricket::CS_LOCAL));
+ EXPECT_TRUE(filter2.SetAnswer(false, cricket::CS_REMOTE));
+ EXPECT_FALSE(filter2.IsActive());
+}
+
+// Test that the filter can be enabled twice.
+TEST(RtcpMuxFilterTest, EnableFilterTwiceDuringUpdate) {
+ cricket::RtcpMuxFilter filter;
+
+ EXPECT_TRUE(filter.SetOffer(true, cricket::CS_REMOTE));
+ EXPECT_TRUE(filter.SetAnswer(true, cricket::CS_LOCAL));
+ EXPECT_TRUE(filter.IsActive());
+
+ EXPECT_TRUE(filter.SetOffer(true, cricket::CS_REMOTE));
+ EXPECT_TRUE(filter.SetAnswer(true, cricket::CS_LOCAL));
+ EXPECT_TRUE(filter.IsActive());
+}
+
+// Test that the filter can be kept disabled during updates.
+TEST(RtcpMuxFilterTest, KeepFilterDisabledDuringUpdate) {
+ cricket::RtcpMuxFilter filter;
+
+ EXPECT_TRUE(filter.SetOffer(false, cricket::CS_REMOTE));
+ EXPECT_TRUE(filter.SetAnswer(false, cricket::CS_LOCAL));
+ EXPECT_FALSE(filter.IsActive());
+
+ EXPECT_TRUE(filter.SetOffer(false, cricket::CS_REMOTE));
+ EXPECT_TRUE(filter.SetAnswer(false, cricket::CS_LOCAL));
+ EXPECT_FALSE(filter.IsActive());
+}
+
+// Test that we can SetActive and then can't deactivate.
+TEST(RtcpMuxFilterTest, SetActiveCantDeactivate) {
+ cricket::RtcpMuxFilter filter;
+
+ filter.SetActive();
+ EXPECT_TRUE(filter.IsActive());
+
+ EXPECT_FALSE(filter.SetOffer(false, cricket::CS_LOCAL));
+ EXPECT_TRUE(filter.IsActive());
+ EXPECT_TRUE(filter.SetOffer(true, cricket::CS_LOCAL));
+ EXPECT_TRUE(filter.IsActive());
+
+ EXPECT_FALSE(filter.SetProvisionalAnswer(false, cricket::CS_REMOTE));
+ EXPECT_TRUE(filter.IsActive());
+ EXPECT_TRUE(filter.SetProvisionalAnswer(true, cricket::CS_REMOTE));
+ EXPECT_TRUE(filter.IsActive());
+
+ EXPECT_FALSE(filter.SetAnswer(false, cricket::CS_REMOTE));
+ EXPECT_TRUE(filter.IsActive());
+ EXPECT_TRUE(filter.SetAnswer(true, cricket::CS_REMOTE));
+ EXPECT_TRUE(filter.IsActive());
+
+ EXPECT_FALSE(filter.SetOffer(false, cricket::CS_REMOTE));
+ EXPECT_TRUE(filter.IsActive());
+ EXPECT_TRUE(filter.SetOffer(true, cricket::CS_REMOTE));
+ EXPECT_TRUE(filter.IsActive());
+
+ EXPECT_FALSE(filter.SetProvisionalAnswer(false, cricket::CS_LOCAL));
+ EXPECT_TRUE(filter.IsActive());
+ EXPECT_TRUE(filter.SetProvisionalAnswer(true, cricket::CS_LOCAL));
+ EXPECT_TRUE(filter.IsActive());
+
+ EXPECT_FALSE(filter.SetAnswer(false, cricket::CS_LOCAL));
+ EXPECT_TRUE(filter.IsActive());
+ EXPECT_TRUE(filter.SetAnswer(true, cricket::CS_LOCAL));
+ EXPECT_TRUE(filter.IsActive());
+}
diff --git a/third_party/libwebrtc/pc/rtp_media_utils.cc b/third_party/libwebrtc/pc/rtp_media_utils.cc
new file mode 100644
index 0000000000..52c5bb0eac
--- /dev/null
+++ b/third_party/libwebrtc/pc/rtp_media_utils.cc
@@ -0,0 +1,98 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/rtp_media_utils.h"
+
+#include "rtc_base/checks.h"
+
+namespace webrtc {
+
+RtpTransceiverDirection RtpTransceiverDirectionFromSendRecv(bool send,
+ bool recv) {
+ if (send && recv) {
+ return RtpTransceiverDirection::kSendRecv;
+ } else if (send && !recv) {
+ return RtpTransceiverDirection::kSendOnly;
+ } else if (!send && recv) {
+ return RtpTransceiverDirection::kRecvOnly;
+ } else {
+ return RtpTransceiverDirection::kInactive;
+ }
+}
+
+bool RtpTransceiverDirectionHasSend(RtpTransceiverDirection direction) {
+ return direction == RtpTransceiverDirection::kSendRecv ||
+ direction == RtpTransceiverDirection::kSendOnly;
+}
+
+bool RtpTransceiverDirectionHasRecv(RtpTransceiverDirection direction) {
+ return direction == RtpTransceiverDirection::kSendRecv ||
+ direction == RtpTransceiverDirection::kRecvOnly;
+}
+
+RtpTransceiverDirection RtpTransceiverDirectionReversed(
+ RtpTransceiverDirection direction) {
+ switch (direction) {
+ case RtpTransceiverDirection::kSendRecv:
+ case RtpTransceiverDirection::kInactive:
+ case RtpTransceiverDirection::kStopped:
+ return direction;
+ case RtpTransceiverDirection::kSendOnly:
+ return RtpTransceiverDirection::kRecvOnly;
+ case RtpTransceiverDirection::kRecvOnly:
+ return RtpTransceiverDirection::kSendOnly;
+ default:
+ RTC_DCHECK_NOTREACHED();
+ return direction;
+ }
+}
+
+RtpTransceiverDirection RtpTransceiverDirectionWithSendSet(
+ RtpTransceiverDirection direction,
+ bool send) {
+ return RtpTransceiverDirectionFromSendRecv(
+ send, RtpTransceiverDirectionHasRecv(direction));
+}
+
+RtpTransceiverDirection RtpTransceiverDirectionWithRecvSet(
+ RtpTransceiverDirection direction,
+ bool recv) {
+ return RtpTransceiverDirectionFromSendRecv(
+ RtpTransceiverDirectionHasSend(direction), recv);
+}
+
+const char* RtpTransceiverDirectionToString(RtpTransceiverDirection direction) {
+ switch (direction) {
+ case RtpTransceiverDirection::kSendRecv:
+ return "kSendRecv";
+ case RtpTransceiverDirection::kSendOnly:
+ return "kSendOnly";
+ case RtpTransceiverDirection::kRecvOnly:
+ return "kRecvOnly";
+ case RtpTransceiverDirection::kInactive:
+ return "kInactive";
+ case RtpTransceiverDirection::kStopped:
+ return "kStopped";
+ }
+ RTC_DCHECK_NOTREACHED();
+ return "";
+}
+
+RtpTransceiverDirection RtpTransceiverDirectionIntersection(
+ RtpTransceiverDirection lhs,
+ RtpTransceiverDirection rhs) {
+ return RtpTransceiverDirectionFromSendRecv(
+ RtpTransceiverDirectionHasSend(lhs) &&
+ RtpTransceiverDirectionHasSend(rhs),
+ RtpTransceiverDirectionHasRecv(lhs) &&
+ RtpTransceiverDirectionHasRecv(rhs));
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/rtp_media_utils.h b/third_party/libwebrtc/pc/rtp_media_utils.h
new file mode 100644
index 0000000000..240274fe05
--- /dev/null
+++ b/third_party/libwebrtc/pc/rtp_media_utils.h
@@ -0,0 +1,64 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_RTP_MEDIA_UTILS_H_
+#define PC_RTP_MEDIA_UTILS_H_
+
+#include <ostream> // no-presubmit-check TODO(webrtc:8982)
+
+#include "api/rtp_transceiver_direction.h"
+
+namespace webrtc {
+
+// Returns the RtpTransceiverDirection that satisfies specified send and receive
+// conditions.
+RtpTransceiverDirection RtpTransceiverDirectionFromSendRecv(bool send,
+ bool recv);
+
+// Returns true only if the direction will send media.
+bool RtpTransceiverDirectionHasSend(RtpTransceiverDirection direction);
+
+// Returns true only if the direction will receive media.
+bool RtpTransceiverDirectionHasRecv(RtpTransceiverDirection direction);
+
+// Returns the RtpTransceiverDirection which is the reverse of the given
+// direction.
+RtpTransceiverDirection RtpTransceiverDirectionReversed(
+ RtpTransceiverDirection direction);
+
+// Returns the RtpTransceiverDirection with its send component set to `send`.
+RtpTransceiverDirection RtpTransceiverDirectionWithSendSet(
+ RtpTransceiverDirection direction,
+ bool send = true);
+
+// Returns the RtpTransceiverDirection with its recv component set to `recv`.
+RtpTransceiverDirection RtpTransceiverDirectionWithRecvSet(
+ RtpTransceiverDirection direction,
+ bool recv = true);
+
+// Returns an unspecified string representation of the given direction.
+const char* RtpTransceiverDirectionToString(RtpTransceiverDirection direction);
+
+// Returns the intersection of the directions of two transceivers.
+RtpTransceiverDirection RtpTransceiverDirectionIntersection(
+ RtpTransceiverDirection lhs,
+ RtpTransceiverDirection rhs);
+
+#ifdef WEBRTC_UNIT_TEST
+inline std::ostream& operator<<( // no-presubmit-check TODO(webrtc:8982)
+ std::ostream& os, // no-presubmit-check TODO(webrtc:8982)
+ RtpTransceiverDirection direction) {
+ return os << RtpTransceiverDirectionToString(direction);
+}
+#endif // WEBRTC_UNIT_TEST
+
+} // namespace webrtc
+
+#endif // PC_RTP_MEDIA_UTILS_H_
diff --git a/third_party/libwebrtc/pc/rtp_media_utils_unittest.cc b/third_party/libwebrtc/pc/rtp_media_utils_unittest.cc
new file mode 100644
index 0000000000..5ee49e356d
--- /dev/null
+++ b/third_party/libwebrtc/pc/rtp_media_utils_unittest.cc
@@ -0,0 +1,97 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/rtp_media_utils.h"
+
+#include <tuple>
+
+#include "test/gtest.h"
+
+namespace webrtc {
+
+using ::testing::Bool;
+using ::testing::Combine;
+using ::testing::Values;
+using ::testing::ValuesIn;
+
+RtpTransceiverDirection kAllDirections[] = {
+ RtpTransceiverDirection::kSendRecv, RtpTransceiverDirection::kSendOnly,
+ RtpTransceiverDirection::kRecvOnly, RtpTransceiverDirection::kInactive};
+
+class EnumerateAllDirectionsTest
+ : public ::testing::TestWithParam<RtpTransceiverDirection> {};
+
+// Test that converting the direction to send/recv and back again results in the
+// same direction.
+TEST_P(EnumerateAllDirectionsTest, TestIdentity) {
+ RtpTransceiverDirection direction = GetParam();
+
+ bool send = RtpTransceiverDirectionHasSend(direction);
+ bool recv = RtpTransceiverDirectionHasRecv(direction);
+
+ EXPECT_EQ(direction, RtpTransceiverDirectionFromSendRecv(send, recv));
+}
+
+// Test that reversing the direction is equivalent to swapping send/recv.
+TEST_P(EnumerateAllDirectionsTest, TestReversedSwapped) {
+ RtpTransceiverDirection direction = GetParam();
+
+ bool send = RtpTransceiverDirectionHasSend(direction);
+ bool recv = RtpTransceiverDirectionHasRecv(direction);
+
+ EXPECT_EQ(RtpTransceiverDirectionFromSendRecv(recv, send),
+ RtpTransceiverDirectionReversed(direction));
+}
+
+// Test that reversing the direction twice results in the same direction.
+TEST_P(EnumerateAllDirectionsTest, TestReversedIdentity) {
+ RtpTransceiverDirection direction = GetParam();
+
+ EXPECT_EQ(direction, RtpTransceiverDirectionReversed(
+ RtpTransceiverDirectionReversed(direction)));
+}
+
+INSTANTIATE_TEST_SUITE_P(RtpTransceiverDirectionTest,
+ EnumerateAllDirectionsTest,
+ ValuesIn(kAllDirections));
+
+class EnumerateAllDirectionsAndBool
+ : public ::testing::TestWithParam<
+ std::tuple<RtpTransceiverDirection, bool>> {};
+
+TEST_P(EnumerateAllDirectionsAndBool, TestWithSendSet) {
+ RtpTransceiverDirection direction = std::get<0>(GetParam());
+ bool send = std::get<1>(GetParam());
+
+ RtpTransceiverDirection result =
+ RtpTransceiverDirectionWithSendSet(direction, send);
+
+ EXPECT_EQ(send, RtpTransceiverDirectionHasSend(result));
+ EXPECT_EQ(RtpTransceiverDirectionHasRecv(direction),
+ RtpTransceiverDirectionHasRecv(result));
+}
+
+TEST_P(EnumerateAllDirectionsAndBool, TestWithRecvSet) {
+ RtpTransceiverDirection direction = std::get<0>(GetParam());
+ bool recv = std::get<1>(GetParam());
+
+ RtpTransceiverDirection result =
+ RtpTransceiverDirectionWithRecvSet(direction, recv);
+
+ EXPECT_EQ(RtpTransceiverDirectionHasSend(direction),
+ RtpTransceiverDirectionHasSend(result));
+ EXPECT_EQ(recv, RtpTransceiverDirectionHasRecv(result));
+}
+
+INSTANTIATE_TEST_SUITE_P(RtpTransceiverDirectionTest,
+ EnumerateAllDirectionsAndBool,
+ Combine(ValuesIn(kAllDirections), Bool()));
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/rtp_parameters_conversion.cc b/third_party/libwebrtc/pc/rtp_parameters_conversion.cc
new file mode 100644
index 0000000000..2463cefe58
--- /dev/null
+++ b/third_party/libwebrtc/pc/rtp_parameters_conversion.cc
@@ -0,0 +1,378 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/rtp_parameters_conversion.h"
+
+#include <cstdint>
+#include <set>
+#include <string>
+#include <type_traits>
+#include <utility>
+
+#include "api/array_view.h"
+#include "api/media_types.h"
+#include "api/rtc_error.h"
+#include "media/base/codec.h"
+#include "media/base/media_constants.h"
+#include "media/base/rtp_utils.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/strings/string_builder.h"
+
+namespace webrtc {
+
+RTCErrorOr<cricket::FeedbackParam> ToCricketFeedbackParam(
+ const RtcpFeedback& feedback) {
+ switch (feedback.type) {
+ case RtcpFeedbackType::CCM:
+ if (!feedback.message_type) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER,
+ "Missing message type in CCM RtcpFeedback.");
+ } else if (*feedback.message_type != RtcpFeedbackMessageType::FIR) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER,
+ "Invalid message type in CCM RtcpFeedback.");
+ }
+ return cricket::FeedbackParam(cricket::kRtcpFbParamCcm,
+ cricket::kRtcpFbCcmParamFir);
+ case RtcpFeedbackType::LNTF:
+ if (feedback.message_type) {
+ LOG_AND_RETURN_ERROR(
+ RTCErrorType::INVALID_PARAMETER,
+ "Didn't expect message type in LNTF RtcpFeedback.");
+ }
+ return cricket::FeedbackParam(cricket::kRtcpFbParamLntf);
+ case RtcpFeedbackType::NACK:
+ if (!feedback.message_type) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER,
+ "Missing message type in NACK RtcpFeedback.");
+ }
+ switch (*feedback.message_type) {
+ case RtcpFeedbackMessageType::GENERIC_NACK:
+ return cricket::FeedbackParam(cricket::kRtcpFbParamNack);
+ case RtcpFeedbackMessageType::PLI:
+ return cricket::FeedbackParam(cricket::kRtcpFbParamNack,
+ cricket::kRtcpFbNackParamPli);
+ default:
+ LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER,
+ "Invalid message type in NACK RtcpFeedback.");
+ }
+ case RtcpFeedbackType::REMB:
+ if (feedback.message_type) {
+ LOG_AND_RETURN_ERROR(
+ RTCErrorType::INVALID_PARAMETER,
+ "Didn't expect message type in REMB RtcpFeedback.");
+ }
+ return cricket::FeedbackParam(cricket::kRtcpFbParamRemb);
+ case RtcpFeedbackType::TRANSPORT_CC:
+ if (feedback.message_type) {
+ LOG_AND_RETURN_ERROR(
+ RTCErrorType::INVALID_PARAMETER,
+ "Didn't expect message type in transport-cc RtcpFeedback.");
+ }
+ return cricket::FeedbackParam(cricket::kRtcpFbParamTransportCc);
+ }
+ RTC_CHECK_NOTREACHED();
+}
+
+RTCErrorOr<cricket::Codec> ToCricketCodec(const RtpCodecParameters& codec) {
+ switch (codec.kind) {
+ case cricket::MEDIA_TYPE_AUDIO:
+ if (codec.kind != cricket::MEDIA_TYPE_AUDIO) {
+ LOG_AND_RETURN_ERROR(
+ RTCErrorType::INVALID_PARAMETER,
+ "Can't use video codec with audio sender or receiver.");
+ }
+ if (!codec.num_channels) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER,
+ "Missing number of channels for audio codec.");
+ }
+ if (*codec.num_channels <= 0) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_RANGE,
+ "Number of channels must be positive.");
+ }
+ if (!codec.clock_rate) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER,
+ "Missing codec clock rate.");
+ }
+ if (*codec.clock_rate <= 0) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_RANGE,
+ "Clock rate must be positive.");
+ }
+ break;
+ case cricket::MEDIA_TYPE_VIDEO:
+ if (codec.kind != cricket::MEDIA_TYPE_VIDEO) {
+ LOG_AND_RETURN_ERROR(
+ RTCErrorType::INVALID_PARAMETER,
+ "Can't use audio codec with video sender or receiver.");
+ }
+ if (codec.num_channels) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER,
+ "Video codec shouldn't have num_channels.");
+ }
+ if (!codec.clock_rate) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER,
+ "Missing codec clock rate.");
+ }
+ if (*codec.clock_rate != cricket::kVideoCodecClockrate) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER,
+ "Video clock rate must be 90000.");
+ }
+ break;
+ default:
+ LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER,
+ "Unknown codec type");
+ }
+
+ if (!cricket::IsValidRtpPayloadType(codec.payload_type)) {
+ char buf[40];
+ rtc::SimpleStringBuilder sb(buf);
+ sb << "Invalid payload type: " << codec.payload_type;
+ LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_RANGE, sb.str());
+ }
+
+ cricket::Codec cricket_codec = [&]() {
+ if (codec.kind == cricket::MEDIA_TYPE_AUDIO) {
+ return cricket::CreateAudioCodec(codec.payload_type, codec.name,
+ *codec.clock_rate, *codec.num_channels);
+ }
+ RTC_DCHECK(codec.kind == cricket::MEDIA_TYPE_VIDEO);
+ return cricket::CreateVideoCodec(codec.payload_type, codec.name);
+ }();
+
+ for (const RtcpFeedback& feedback : codec.rtcp_feedback) {
+ auto result = ToCricketFeedbackParam(feedback);
+ if (!result.ok()) {
+ return result.MoveError();
+ }
+ cricket_codec.AddFeedbackParam(result.MoveValue());
+ }
+ cricket_codec.params = codec.parameters;
+ return std::move(cricket_codec);
+}
+
+RTCErrorOr<std::vector<cricket::Codec>> ToCricketCodecs(
+ const std::vector<RtpCodecParameters>& codecs) {
+ std::vector<cricket::Codec> cricket_codecs;
+ std::set<int> seen_payload_types;
+ for (const RtpCodecParameters& codec : codecs) {
+ auto result = ToCricketCodec(codec);
+ if (!result.ok()) {
+ return result.MoveError();
+ }
+ if (!seen_payload_types.insert(codec.payload_type).second) {
+ char buf[40];
+ rtc::SimpleStringBuilder sb(buf);
+ sb << "Duplicate payload type: " << codec.payload_type;
+ LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, sb.str());
+ }
+ cricket_codecs.push_back(result.MoveValue());
+ }
+ return std::move(cricket_codecs);
+}
+
+RTCErrorOr<cricket::StreamParamsVec> ToCricketStreamParamsVec(
+ const std::vector<RtpEncodingParameters>& encodings) {
+ if (encodings.size() > 1u) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::UNSUPPORTED_PARAMETER,
+ "ORTC API implementation doesn't currently "
+ "support simulcast or layered encodings.");
+ } else if (encodings.empty()) {
+ return cricket::StreamParamsVec();
+ }
+ cricket::StreamParamsVec cricket_streams;
+ const RtpEncodingParameters& encoding = encodings[0];
+ if (encoding.ssrc) {
+ cricket::StreamParams stream_params;
+ stream_params.add_ssrc(*encoding.ssrc);
+ cricket_streams.push_back(std::move(stream_params));
+ }
+ return std::move(cricket_streams);
+}
+
+absl::optional<RtcpFeedback> ToRtcpFeedback(
+ const cricket::FeedbackParam& cricket_feedback) {
+ if (cricket_feedback.id() == cricket::kRtcpFbParamCcm) {
+ if (cricket_feedback.param() == cricket::kRtcpFbCcmParamFir) {
+ return RtcpFeedback(RtcpFeedbackType::CCM, RtcpFeedbackMessageType::FIR);
+ } else {
+ RTC_LOG(LS_WARNING) << "Unsupported parameter for CCM RTCP feedback: "
+ << cricket_feedback.param();
+ return absl::nullopt;
+ }
+ } else if (cricket_feedback.id() == cricket::kRtcpFbParamLntf) {
+ if (cricket_feedback.param().empty()) {
+ return RtcpFeedback(RtcpFeedbackType::LNTF);
+ } else {
+ RTC_LOG(LS_WARNING) << "Unsupported parameter for LNTF RTCP feedback: "
+ << cricket_feedback.param();
+ return absl::nullopt;
+ }
+ } else if (cricket_feedback.id() == cricket::kRtcpFbParamNack) {
+ if (cricket_feedback.param().empty()) {
+ return RtcpFeedback(RtcpFeedbackType::NACK,
+ RtcpFeedbackMessageType::GENERIC_NACK);
+ } else if (cricket_feedback.param() == cricket::kRtcpFbNackParamPli) {
+ return RtcpFeedback(RtcpFeedbackType::NACK, RtcpFeedbackMessageType::PLI);
+ } else {
+ RTC_LOG(LS_WARNING) << "Unsupported parameter for NACK RTCP feedback: "
+ << cricket_feedback.param();
+ return absl::nullopt;
+ }
+ } else if (cricket_feedback.id() == cricket::kRtcpFbParamRemb) {
+ if (!cricket_feedback.param().empty()) {
+ RTC_LOG(LS_WARNING) << "Unsupported parameter for REMB RTCP feedback: "
+ << cricket_feedback.param();
+ return absl::nullopt;
+ } else {
+ return RtcpFeedback(RtcpFeedbackType::REMB);
+ }
+ } else if (cricket_feedback.id() == cricket::kRtcpFbParamTransportCc) {
+ if (!cricket_feedback.param().empty()) {
+ RTC_LOG(LS_WARNING)
+ << "Unsupported parameter for transport-cc RTCP feedback: "
+ << cricket_feedback.param();
+ return absl::nullopt;
+ } else {
+ return RtcpFeedback(RtcpFeedbackType::TRANSPORT_CC);
+ }
+ }
+ RTC_LOG(LS_WARNING) << "Unsupported RTCP feedback type: "
+ << cricket_feedback.id();
+ return absl::nullopt;
+}
+
+std::vector<RtpEncodingParameters> ToRtpEncodings(
+ const cricket::StreamParamsVec& stream_params) {
+ std::vector<RtpEncodingParameters> rtp_encodings;
+ for (const cricket::StreamParams& stream_param : stream_params) {
+ RtpEncodingParameters rtp_encoding;
+ rtp_encoding.ssrc.emplace(stream_param.first_ssrc());
+ rtp_encodings.push_back(std::move(rtp_encoding));
+ }
+ return rtp_encodings;
+}
+
+RtpCodecCapability ToRtpCodecCapability(const cricket::Codec& cricket_codec) {
+ RtpCodecCapability codec;
+ codec.name = cricket_codec.name;
+ codec.kind = cricket_codec.type == cricket::Codec::Type::kAudio
+ ? cricket::MEDIA_TYPE_AUDIO
+ : cricket::MEDIA_TYPE_VIDEO;
+ codec.clock_rate.emplace(cricket_codec.clockrate);
+ codec.preferred_payload_type.emplace(cricket_codec.id);
+ for (const cricket::FeedbackParam& cricket_feedback :
+ cricket_codec.feedback_params.params()) {
+ absl::optional<RtcpFeedback> feedback = ToRtcpFeedback(cricket_feedback);
+ if (feedback) {
+ codec.rtcp_feedback.push_back(feedback.value());
+ }
+ }
+ switch (cricket_codec.type) {
+ case cricket::Codec::Type::kAudio:
+ codec.num_channels = static_cast<int>(cricket_codec.channels);
+ break;
+ case cricket::Codec::Type::kVideo:
+ codec.scalability_modes = cricket_codec.scalability_modes;
+ break;
+ }
+ codec.parameters.insert(cricket_codec.params.begin(),
+ cricket_codec.params.end());
+ return codec;
+}
+
+RtpCodecParameters ToRtpCodecParameters(const cricket::Codec& cricket_codec) {
+ RtpCodecParameters codec_param;
+ codec_param.name = cricket_codec.name;
+ codec_param.kind = cricket_codec.type == cricket::Codec::Type::kAudio
+ ? cricket::MEDIA_TYPE_AUDIO
+ : cricket::MEDIA_TYPE_VIDEO;
+ codec_param.clock_rate.emplace(cricket_codec.clockrate);
+ codec_param.payload_type = cricket_codec.id;
+ for (const cricket::FeedbackParam& cricket_feedback :
+ cricket_codec.feedback_params.params()) {
+ absl::optional<RtcpFeedback> feedback = ToRtcpFeedback(cricket_feedback);
+ if (feedback) {
+ codec_param.rtcp_feedback.push_back(feedback.value());
+ }
+ }
+ switch (cricket_codec.type) {
+ case cricket::Codec::Type::kAudio:
+ codec_param.num_channels = static_cast<int>(cricket_codec.channels);
+ break;
+ case cricket::Codec::Type::kVideo:
+ // Nothing to do.
+ break;
+ }
+ codec_param.parameters = cricket_codec.params;
+ return codec_param;
+}
+
+RtpCapabilities ToRtpCapabilities(
+ const std::vector<cricket::Codec>& cricket_codecs,
+ const cricket::RtpHeaderExtensions& cricket_extensions) {
+ RtpCapabilities capabilities;
+ bool have_red = false;
+ bool have_ulpfec = false;
+ bool have_flexfec = false;
+ bool have_rtx = false;
+ for (const cricket::Codec& cricket_codec : cricket_codecs) {
+ if (cricket_codec.name == cricket::kRedCodecName) {
+ have_red = true;
+ } else if (cricket_codec.name == cricket::kUlpfecCodecName) {
+ have_ulpfec = true;
+ } else if (cricket_codec.name == cricket::kFlexfecCodecName) {
+ have_flexfec = true;
+ } else if (cricket_codec.name == cricket::kRtxCodecName) {
+ if (have_rtx) {
+ // There should only be one RTX codec entry
+ continue;
+ }
+ have_rtx = true;
+ }
+ auto codec_capability = ToRtpCodecCapability(cricket_codec);
+ if (cricket_codec.name == cricket::kRtxCodecName) {
+ // RTX codec should not have any parameter
+ codec_capability.parameters.clear();
+ }
+ capabilities.codecs.push_back(codec_capability);
+ }
+ for (const RtpExtension& cricket_extension : cricket_extensions) {
+ capabilities.header_extensions.emplace_back(cricket_extension.uri,
+ cricket_extension.id);
+ }
+ if (have_red) {
+ capabilities.fec.push_back(FecMechanism::RED);
+ }
+ if (have_red && have_ulpfec) {
+ capabilities.fec.push_back(FecMechanism::RED_AND_ULPFEC);
+ }
+ if (have_flexfec) {
+ capabilities.fec.push_back(FecMechanism::FLEXFEC);
+ }
+ return capabilities;
+}
+
+RtpParameters ToRtpParameters(
+ const std::vector<cricket::Codec>& cricket_codecs,
+ const cricket::RtpHeaderExtensions& cricket_extensions,
+ const cricket::StreamParamsVec& stream_params) {
+ RtpParameters rtp_parameters;
+ for (const cricket::Codec& cricket_codec : cricket_codecs) {
+ rtp_parameters.codecs.push_back(ToRtpCodecParameters(cricket_codec));
+ }
+ for (const RtpExtension& cricket_extension : cricket_extensions) {
+ rtp_parameters.header_extensions.emplace_back(cricket_extension.uri,
+ cricket_extension.id);
+ }
+ rtp_parameters.encodings = ToRtpEncodings(stream_params);
+ return rtp_parameters;
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/rtp_parameters_conversion.h b/third_party/libwebrtc/pc/rtp_parameters_conversion.h
new file mode 100644
index 0000000000..2cc39dd0e6
--- /dev/null
+++ b/third_party/libwebrtc/pc/rtp_parameters_conversion.h
@@ -0,0 +1,97 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_RTP_PARAMETERS_CONVERSION_H_
+#define PC_RTP_PARAMETERS_CONVERSION_H_
+
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "api/rtc_error.h"
+#include "api/rtp_parameters.h"
+#include "media/base/codec.h"
+#include "media/base/stream_params.h"
+#include "pc/session_description.h"
+
+namespace webrtc {
+
+// NOTE: Some functions are templated for convenience, such that template-based
+// code dealing with AudioContentDescription and VideoContentDescription can
+// use this easily. Such methods are usable with cricket::AudioCodec and
+// cricket::VideoCodec.
+
+//***************************************************************************
+// Functions for converting from new webrtc:: structures to old cricket::
+// structures.
+//
+// As the return values imply, all of these functions do validation of the
+// parameters and return an error if they're invalid. It's expected that any
+// default values (such as video clock rate of 90000) have been filled by the
+// time the webrtc:: structure is being converted to the cricket:: one.
+//
+// These are expected to be used when parameters are passed into an RtpSender
+// or RtpReceiver, and need to be validated and converted so they can be
+// applied to the media engine level.
+//***************************************************************************
+
+// Returns error on invalid input. Certain message types are only valid for
+// certain feedback types.
+RTCErrorOr<cricket::FeedbackParam> ToCricketFeedbackParam(
+ const RtcpFeedback& feedback);
+
+// Verifies that the codec kind is correct, and it has mandatory parameters
+// filled, with values in valid ranges.
+RTCErrorOr<cricket::Codec> ToCricketCodec(const RtpCodecParameters& codec);
+
+// Verifies that payload types aren't duplicated, in addition to normal
+// validation.
+RTCErrorOr<std::vector<cricket::Codec>> ToCricketCodecs(
+ const std::vector<RtpCodecParameters>& codecs);
+
+// SSRCs are allowed to be ommitted. This may be used for receive parameters
+// where SSRCs are unsignaled.
+RTCErrorOr<cricket::StreamParamsVec> ToCricketStreamParamsVec(
+ const std::vector<RtpEncodingParameters>& encodings);
+
+//*****************************************************************************
+// Functions for converting from old cricket:: structures to new webrtc::
+// structures. Unlike the above functions, these are permissive with regards to
+// input validation; it's assumed that any necessary validation already
+// occurred.
+//
+// These are expected to be used either to convert from audio/video engine
+// capabilities to RtpCapabilities, or to convert from already-parsed SDP
+// (in the form of cricket:: structures) to webrtc:: structures. The latter
+// functionality is not yet implemented.
+//*****************************************************************************
+
+// Returns empty value if `cricket_feedback` is a feedback type not
+// supported/recognized.
+absl::optional<RtcpFeedback> ToRtcpFeedback(
+ const cricket::FeedbackParam& cricket_feedback);
+
+std::vector<RtpEncodingParameters> ToRtpEncodings(
+ const cricket::StreamParamsVec& stream_params);
+
+RtpCodecParameters ToRtpCodecParameters(const cricket::Codec& cricket_codec);
+RtpCodecCapability ToRtpCodecCapability(const cricket::Codec& cricket_codec);
+
+RtpCapabilities ToRtpCapabilities(
+ const std::vector<cricket::Codec>& cricket_codecs,
+ const cricket::RtpHeaderExtensions& cricket_extensions);
+
+RtpParameters ToRtpParameters(
+ const std::vector<cricket::Codec>& cricket_codecs,
+ const cricket::RtpHeaderExtensions& cricket_extensions,
+ const cricket::StreamParamsVec& stream_params);
+
+} // namespace webrtc
+
+#endif // PC_RTP_PARAMETERS_CONVERSION_H_
diff --git a/third_party/libwebrtc/pc/rtp_parameters_conversion_unittest.cc b/third_party/libwebrtc/pc/rtp_parameters_conversion_unittest.cc
new file mode 100644
index 0000000000..9b48bfacc9
--- /dev/null
+++ b/third_party/libwebrtc/pc/rtp_parameters_conversion_unittest.cc
@@ -0,0 +1,602 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/rtp_parameters_conversion.h"
+
+#include <cstdint>
+#include <map>
+#include <string>
+
+#include "api/media_types.h"
+#include "media/base/codec.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+
+using ::testing::UnorderedElementsAre;
+
+namespace webrtc {
+
+TEST(RtpParametersConversionTest, ToCricketFeedbackParam) {
+ auto result = ToCricketFeedbackParam(
+ {RtcpFeedbackType::CCM, RtcpFeedbackMessageType::FIR});
+ EXPECT_EQ(cricket::FeedbackParam("ccm", "fir"), result.value());
+
+ result = ToCricketFeedbackParam(RtcpFeedback(RtcpFeedbackType::LNTF));
+ EXPECT_EQ(cricket::FeedbackParam("goog-lntf"), result.value());
+
+ result = ToCricketFeedbackParam(
+ {RtcpFeedbackType::NACK, RtcpFeedbackMessageType::GENERIC_NACK});
+ EXPECT_EQ(cricket::FeedbackParam("nack"), result.value());
+
+ result = ToCricketFeedbackParam(
+ {RtcpFeedbackType::NACK, RtcpFeedbackMessageType::PLI});
+ EXPECT_EQ(cricket::FeedbackParam("nack", "pli"), result.value());
+
+ result = ToCricketFeedbackParam(RtcpFeedback(RtcpFeedbackType::REMB));
+ EXPECT_EQ(cricket::FeedbackParam("goog-remb"), result.value());
+
+ result = ToCricketFeedbackParam(RtcpFeedback(RtcpFeedbackType::TRANSPORT_CC));
+ EXPECT_EQ(cricket::FeedbackParam("transport-cc"), result.value());
+}
+
+TEST(RtpParametersConversionTest, ToCricketFeedbackParamErrors) {
+ // CCM with missing or invalid message type.
+ auto result = ToCricketFeedbackParam(RtcpFeedback(RtcpFeedbackType::CCM));
+ EXPECT_EQ(RTCErrorType::INVALID_PARAMETER, result.error().type());
+
+ result = ToCricketFeedbackParam(
+ {RtcpFeedbackType::CCM, RtcpFeedbackMessageType::PLI});
+ EXPECT_EQ(RTCErrorType::INVALID_PARAMETER, result.error().type());
+
+ // LNTF with message type (should be left empty).
+ result = ToCricketFeedbackParam(
+ {RtcpFeedbackType::LNTF, RtcpFeedbackMessageType::GENERIC_NACK});
+ EXPECT_EQ(RTCErrorType::INVALID_PARAMETER, result.error().type());
+
+ // NACK with missing or invalid message type.
+ result = ToCricketFeedbackParam(RtcpFeedback(RtcpFeedbackType::NACK));
+ EXPECT_EQ(RTCErrorType::INVALID_PARAMETER, result.error().type());
+
+ result = ToCricketFeedbackParam(
+ {RtcpFeedbackType::NACK, RtcpFeedbackMessageType::FIR});
+ EXPECT_EQ(RTCErrorType::INVALID_PARAMETER, result.error().type());
+
+ // REMB with message type (should be left empty).
+ result = ToCricketFeedbackParam(
+ {RtcpFeedbackType::REMB, RtcpFeedbackMessageType::GENERIC_NACK});
+ EXPECT_EQ(RTCErrorType::INVALID_PARAMETER, result.error().type());
+
+ // TRANSPORT_CC with message type (should be left empty).
+ result = ToCricketFeedbackParam(
+ {RtcpFeedbackType::TRANSPORT_CC, RtcpFeedbackMessageType::FIR});
+ EXPECT_EQ(RTCErrorType::INVALID_PARAMETER, result.error().type());
+}
+
+TEST(RtpParametersConversionTest, ToAudioCodec) {
+ RtpCodecParameters codec;
+ codec.name = "AuDiO";
+ codec.kind = cricket::MEDIA_TYPE_AUDIO;
+ codec.payload_type = 120;
+ codec.clock_rate.emplace(36000);
+ codec.num_channels.emplace(6);
+ codec.parameters["foo"] = "bar";
+ codec.rtcp_feedback.emplace_back(RtcpFeedbackType::TRANSPORT_CC);
+ auto result = ToCricketCodec(codec);
+ ASSERT_TRUE(result.ok());
+
+ EXPECT_EQ("AuDiO", result.value().name);
+ EXPECT_EQ(120, result.value().id);
+ EXPECT_EQ(36000, result.value().clockrate);
+ EXPECT_EQ(6u, result.value().channels);
+ ASSERT_EQ(1u, result.value().params.size());
+ EXPECT_EQ("bar", result.value().params["foo"]);
+ EXPECT_EQ(1u, result.value().feedback_params.params().size());
+ EXPECT_TRUE(result.value().feedback_params.Has(
+ cricket::FeedbackParam("transport-cc")));
+}
+
+TEST(RtpParametersConversionTest, ToVideoCodec) {
+ RtpCodecParameters codec;
+ codec.name = "coolcodec";
+ codec.kind = cricket::MEDIA_TYPE_VIDEO;
+ codec.payload_type = 101;
+ codec.clock_rate.emplace(90000);
+ codec.parameters["foo"] = "bar";
+ codec.parameters["PING"] = "PONG";
+ codec.rtcp_feedback.emplace_back(RtcpFeedbackType::LNTF);
+ codec.rtcp_feedback.emplace_back(RtcpFeedbackType::TRANSPORT_CC);
+ codec.rtcp_feedback.emplace_back(RtcpFeedbackType::NACK,
+ RtcpFeedbackMessageType::PLI);
+ auto result = ToCricketCodec(codec);
+ ASSERT_TRUE(result.ok());
+
+ EXPECT_EQ("coolcodec", result.value().name);
+ EXPECT_EQ(101, result.value().id);
+ EXPECT_EQ(90000, result.value().clockrate);
+ ASSERT_EQ(2u, result.value().params.size());
+ EXPECT_EQ("bar", result.value().params["foo"]);
+ EXPECT_EQ("PONG", result.value().params["PING"]);
+ EXPECT_EQ(3u, result.value().feedback_params.params().size());
+ EXPECT_TRUE(
+ result.value().feedback_params.Has(cricket::FeedbackParam("goog-lntf")));
+ EXPECT_TRUE(result.value().feedback_params.Has(
+ cricket::FeedbackParam("transport-cc")));
+ EXPECT_TRUE(result.value().feedback_params.Has(
+ cricket::FeedbackParam("nack", "pli")));
+}
+
+// Trying to convert to an AudioCodec if the kind is "video" should fail.
+TEST(RtpParametersConversionTest, ToCricketCodecInvalidKind) {
+ RtpCodecParameters audio_codec;
+ audio_codec.name = "opus";
+ audio_codec.kind = cricket::MEDIA_TYPE_VIDEO;
+ audio_codec.payload_type = 111;
+ audio_codec.clock_rate.emplace(48000);
+ audio_codec.num_channels.emplace(2);
+
+ RtpCodecParameters video_codec;
+ video_codec.name = "VP8";
+ video_codec.kind = cricket::MEDIA_TYPE_AUDIO;
+ video_codec.payload_type = 102;
+ video_codec.clock_rate.emplace(90000);
+
+ auto audio_result = ToCricketCodec(audio_codec);
+ EXPECT_EQ(RTCErrorType::INVALID_PARAMETER, audio_result.error().type());
+
+ auto video_result = ToCricketCodec(video_codec);
+ EXPECT_EQ(RTCErrorType::INVALID_PARAMETER, video_result.error().type());
+
+ // Sanity check that if the kind is correct, the conversion succeeds.
+ audio_codec.kind = cricket::MEDIA_TYPE_AUDIO;
+ video_codec.kind = cricket::MEDIA_TYPE_VIDEO;
+ audio_result = ToCricketCodec(audio_codec);
+ EXPECT_TRUE(audio_result.ok());
+ video_result = ToCricketCodec(video_codec);
+ EXPECT_TRUE(video_result.ok());
+}
+
+TEST(RtpParametersConversionTest, ToAudioCodecInvalidParameters) {
+ // Missing channels.
+ RtpCodecParameters codec;
+ codec.name = "opus";
+ codec.kind = cricket::MEDIA_TYPE_AUDIO;
+ codec.payload_type = 111;
+ codec.clock_rate.emplace(48000);
+ auto result = ToCricketCodec(codec);
+ EXPECT_EQ(RTCErrorType::INVALID_PARAMETER, result.error().type());
+
+ // Negative number of channels.
+ codec.num_channels.emplace(-1);
+ result = ToCricketCodec(codec);
+ EXPECT_EQ(RTCErrorType::INVALID_RANGE, result.error().type());
+
+ // Missing clock rate.
+ codec.num_channels.emplace(2);
+ codec.clock_rate.reset();
+ result = ToCricketCodec(codec);
+ EXPECT_EQ(RTCErrorType::INVALID_PARAMETER, result.error().type());
+
+ // Negative clock rate.
+ codec.clock_rate.emplace(-48000);
+ result = ToCricketCodec(codec);
+ EXPECT_EQ(RTCErrorType::INVALID_RANGE, result.error().type());
+
+ // Sanity check that conversion succeeds if these errors are fixed.
+ codec.clock_rate.emplace(48000);
+ result = ToCricketCodec(codec);
+ EXPECT_TRUE(result.ok());
+}
+
+TEST(RtpParametersConversionTest, ToVideoCodecInvalidParameters) {
+ // Missing clock rate.
+ RtpCodecParameters codec;
+ codec.name = "VP8";
+ codec.kind = cricket::MEDIA_TYPE_VIDEO;
+ codec.payload_type = 102;
+ auto result = ToCricketCodec(codec);
+ EXPECT_EQ(RTCErrorType::INVALID_PARAMETER, result.error().type());
+
+ // Invalid clock rate.
+ codec.clock_rate.emplace(48000);
+ result = ToCricketCodec(codec);
+ EXPECT_EQ(RTCErrorType::INVALID_PARAMETER, result.error().type());
+
+ // Channels set (should be unset).
+ codec.clock_rate.emplace(90000);
+ codec.num_channels.emplace(2);
+ result = ToCricketCodec(codec);
+ EXPECT_EQ(RTCErrorType::INVALID_PARAMETER, result.error().type());
+
+ // Sanity check that conversion succeeds if these errors are fixed.
+ codec.num_channels.reset();
+ result = ToCricketCodec(codec);
+ EXPECT_TRUE(result.ok());
+}
+
+TEST(RtpParametersConversionTest, ToCricketCodecInvalidPayloadType) {
+ RtpCodecParameters codec;
+ codec.name = "VP8";
+ codec.kind = cricket::MEDIA_TYPE_VIDEO;
+ codec.clock_rate.emplace(90000);
+
+ codec.payload_type = -1000;
+ auto result = ToCricketCodec(codec);
+ EXPECT_EQ(RTCErrorType::INVALID_RANGE, result.error().type());
+
+ // Max payload type is 127.
+ codec.payload_type = 128;
+ result = ToCricketCodec(codec);
+ EXPECT_EQ(RTCErrorType::INVALID_RANGE, result.error().type());
+
+ // Sanity check that conversion succeeds with a valid payload type.
+ codec.payload_type = 127;
+ result = ToCricketCodec(codec);
+ EXPECT_TRUE(result.ok());
+}
+
+// There are already tests for ToCricketFeedbackParam, but ensure that those
+// errors are propagated from ToCricketCodec.
+TEST(RtpParametersConversionTest, ToCricketCodecInvalidRtcpFeedback) {
+ RtpCodecParameters codec;
+ codec.name = "VP8";
+ codec.kind = cricket::MEDIA_TYPE_VIDEO;
+ codec.clock_rate.emplace(90000);
+ codec.payload_type = 99;
+ codec.rtcp_feedback.emplace_back(RtcpFeedbackType::CCM,
+ RtcpFeedbackMessageType::PLI);
+
+ auto result = ToCricketCodec(codec);
+ EXPECT_EQ(RTCErrorType::INVALID_PARAMETER, result.error().type());
+
+ // Sanity check that conversion succeeds without invalid feedback.
+ codec.rtcp_feedback.clear();
+ result = ToCricketCodec(codec);
+ EXPECT_TRUE(result.ok());
+}
+
+TEST(RtpParametersConversionTest, ToCricketCodecs) {
+ std::vector<RtpCodecParameters> codecs;
+ RtpCodecParameters codec;
+ codec.name = "VP8";
+ codec.kind = cricket::MEDIA_TYPE_VIDEO;
+ codec.clock_rate.emplace(90000);
+ codec.payload_type = 99;
+ codecs.push_back(codec);
+
+ codec.name = "VP9";
+ codec.payload_type = 100;
+ codecs.push_back(codec);
+
+ auto result = ToCricketCodecs(codecs);
+ ASSERT_TRUE(result.ok());
+ ASSERT_EQ(2u, result.value().size());
+ EXPECT_EQ("VP8", result.value()[0].name);
+ EXPECT_EQ(99, result.value()[0].id);
+ EXPECT_EQ("VP9", result.value()[1].name);
+ EXPECT_EQ(100, result.value()[1].id);
+}
+
+TEST(RtpParametersConversionTest, ToCricketCodecsDuplicatePayloadType) {
+ std::vector<RtpCodecParameters> codecs;
+ RtpCodecParameters codec;
+ codec.name = "VP8";
+ codec.kind = cricket::MEDIA_TYPE_VIDEO;
+ codec.clock_rate.emplace(90000);
+ codec.payload_type = 99;
+ codecs.push_back(codec);
+
+ codec.name = "VP9";
+ codec.payload_type = 99;
+ codecs.push_back(codec);
+
+ auto result = ToCricketCodecs(codecs);
+ EXPECT_EQ(RTCErrorType::INVALID_PARAMETER, result.error().type());
+
+ // Sanity check that this succeeds without the duplicate payload type.
+ codecs[1].payload_type = 120;
+ result = ToCricketCodecs(codecs);
+ EXPECT_TRUE(result.ok());
+}
+
+TEST(RtpParametersConversionTest, ToCricketStreamParamsVecSimple) {
+ std::vector<RtpEncodingParameters> encodings;
+ RtpEncodingParameters encoding;
+ encoding.ssrc.emplace(0xbaadf00d);
+ encodings.push_back(encoding);
+ auto result = ToCricketStreamParamsVec(encodings);
+ ASSERT_TRUE(result.ok());
+ ASSERT_EQ(1u, result.value().size());
+ EXPECT_EQ(1u, result.value()[0].ssrcs.size());
+ EXPECT_EQ(0xbaadf00d, result.value()[0].first_ssrc());
+}
+
+// No encodings should be accepted; an endpoint may want to prepare a
+// decoder/encoder without having something to receive/send yet.
+TEST(RtpParametersConversionTest, ToCricketStreamParamsVecNoEncodings) {
+ std::vector<RtpEncodingParameters> encodings;
+ auto result = ToCricketStreamParamsVec(encodings);
+ ASSERT_TRUE(result.ok());
+ EXPECT_EQ(0u, result.value().size());
+}
+
+// An encoding without SSRCs should be accepted. This could be the case when
+// SSRCs aren't signaled and payload-type based demuxing is used.
+TEST(RtpParametersConversionTest, ToCricketStreamParamsVecMissingSsrcs) {
+ std::vector<RtpEncodingParameters> encodings = {{}};
+ // Creates RtxParameters with empty SSRC.
+ auto result = ToCricketStreamParamsVec(encodings);
+ ASSERT_TRUE(result.ok());
+ EXPECT_EQ(0u, result.value().size());
+}
+
+// TODO(deadbeef): Update this test when we support multiple encodings.
+TEST(RtpParametersConversionTest, ToCricketStreamParamsVecMultipleEncodings) {
+ std::vector<RtpEncodingParameters> encodings = {{}, {}};
+ auto result = ToCricketStreamParamsVec(encodings);
+ EXPECT_EQ(RTCErrorType::UNSUPPORTED_PARAMETER, result.error().type());
+}
+
+TEST(RtpParametersConversionTest, ToRtcpFeedback) {
+ absl::optional<RtcpFeedback> result = ToRtcpFeedback({"ccm", "fir"});
+ EXPECT_EQ(RtcpFeedback(RtcpFeedbackType::CCM, RtcpFeedbackMessageType::FIR),
+ *result);
+
+ result = ToRtcpFeedback(cricket::FeedbackParam("goog-lntf"));
+ EXPECT_EQ(RtcpFeedback(RtcpFeedbackType::LNTF), *result);
+
+ result = ToRtcpFeedback(cricket::FeedbackParam("nack"));
+ EXPECT_EQ(RtcpFeedback(RtcpFeedbackType::NACK,
+ RtcpFeedbackMessageType::GENERIC_NACK),
+ *result);
+
+ result = ToRtcpFeedback({"nack", "pli"});
+ EXPECT_EQ(RtcpFeedback(RtcpFeedbackType::NACK, RtcpFeedbackMessageType::PLI),
+ *result);
+
+ result = ToRtcpFeedback(cricket::FeedbackParam("goog-remb"));
+ EXPECT_EQ(RtcpFeedback(RtcpFeedbackType::REMB), *result);
+
+ result = ToRtcpFeedback(cricket::FeedbackParam("transport-cc"));
+ EXPECT_EQ(RtcpFeedback(RtcpFeedbackType::TRANSPORT_CC), *result);
+}
+
+TEST(RtpParametersConversionTest, ToRtcpFeedbackErrors) {
+ // CCM with missing or invalid message type.
+ absl::optional<RtcpFeedback> result = ToRtcpFeedback({"ccm", "pli"});
+ EXPECT_FALSE(result);
+
+ result = ToRtcpFeedback(cricket::FeedbackParam("ccm"));
+ EXPECT_FALSE(result);
+
+ // LNTF with message type (should be left empty).
+ result = ToRtcpFeedback({"goog-lntf", "pli"});
+ EXPECT_FALSE(result);
+
+ // NACK with missing or invalid message type.
+ result = ToRtcpFeedback({"nack", "fir"});
+ EXPECT_FALSE(result);
+
+ // REMB with message type (should be left empty).
+ result = ToRtcpFeedback({"goog-remb", "pli"});
+ EXPECT_FALSE(result);
+
+ // TRANSPORT_CC with message type (should be left empty).
+ result = ToRtcpFeedback({"transport-cc", "fir"});
+ EXPECT_FALSE(result);
+
+ // Unknown message type.
+ result = ToRtcpFeedback(cricket::FeedbackParam("foo"));
+ EXPECT_FALSE(result);
+}
+
+TEST(RtpParametersConversionTest, ToAudioRtpCodecCapability) {
+ cricket::AudioCodec cricket_codec =
+ cricket::CreateAudioCodec(50, "foo", 22222, 4);
+ cricket_codec.params["foo"] = "bar";
+ cricket_codec.feedback_params.Add(cricket::FeedbackParam("transport-cc"));
+ RtpCodecCapability codec = ToRtpCodecCapability(cricket_codec);
+
+ EXPECT_EQ("foo", codec.name);
+ EXPECT_EQ(cricket::MEDIA_TYPE_AUDIO, codec.kind);
+ EXPECT_EQ(50, codec.preferred_payload_type);
+ EXPECT_EQ(22222, codec.clock_rate);
+ EXPECT_EQ(4, codec.num_channels);
+ ASSERT_EQ(1u, codec.parameters.size());
+ EXPECT_EQ("bar", codec.parameters["foo"]);
+ EXPECT_EQ(1u, codec.rtcp_feedback.size());
+ EXPECT_EQ(RtcpFeedback(RtcpFeedbackType::TRANSPORT_CC),
+ codec.rtcp_feedback[0]);
+}
+
+TEST(RtpParametersConversionTest, ToVideoRtpCodecCapability) {
+ cricket::VideoCodec cricket_codec = cricket::CreateVideoCodec(101, "VID");
+ cricket_codec.clockrate = 80000;
+ cricket_codec.params["foo"] = "bar";
+ cricket_codec.params["ANOTHER"] = "param";
+ cricket_codec.feedback_params.Add(cricket::FeedbackParam("transport-cc"));
+ cricket_codec.feedback_params.Add(cricket::FeedbackParam("goog-lntf"));
+ cricket_codec.feedback_params.Add({"nack", "pli"});
+ RtpCodecCapability codec = ToRtpCodecCapability(cricket_codec);
+
+ EXPECT_EQ("VID", codec.name);
+ EXPECT_EQ(cricket::MEDIA_TYPE_VIDEO, codec.kind);
+ EXPECT_EQ(101, codec.preferred_payload_type);
+ EXPECT_EQ(80000, codec.clock_rate);
+ ASSERT_EQ(2u, codec.parameters.size());
+ EXPECT_EQ("bar", codec.parameters["foo"]);
+ EXPECT_EQ("param", codec.parameters["ANOTHER"]);
+ EXPECT_EQ(3u, codec.rtcp_feedback.size());
+ EXPECT_EQ(RtcpFeedback(RtcpFeedbackType::TRANSPORT_CC),
+ codec.rtcp_feedback[0]);
+ EXPECT_EQ(RtcpFeedback(RtcpFeedbackType::LNTF), codec.rtcp_feedback[1]);
+ EXPECT_EQ(RtcpFeedback(RtcpFeedbackType::NACK, RtcpFeedbackMessageType::PLI),
+ codec.rtcp_feedback[2]);
+}
+
+TEST(RtpParametersConversionTest, ToRtpEncodingsWithEmptyStreamParamsVec) {
+ cricket::StreamParamsVec streams;
+ auto rtp_encodings = ToRtpEncodings(streams);
+ ASSERT_EQ(0u, rtp_encodings.size());
+}
+
+TEST(RtpParametersConversionTest, ToRtpEncodingsWithMultipleStreamParams) {
+ cricket::StreamParamsVec streams;
+ cricket::StreamParams stream1;
+ stream1.ssrcs.push_back(1111u);
+
+ cricket::StreamParams stream2;
+ stream2.ssrcs.push_back(2222u);
+
+ streams.push_back(stream1);
+ streams.push_back(stream2);
+
+ auto rtp_encodings = ToRtpEncodings(streams);
+ ASSERT_EQ(2u, rtp_encodings.size());
+ EXPECT_EQ(1111u, rtp_encodings[0].ssrc);
+ EXPECT_EQ(2222u, rtp_encodings[1].ssrc);
+}
+
+TEST(RtpParametersConversionTest, ToAudioRtpCodecParameters) {
+ cricket::AudioCodec cricket_codec =
+ cricket::CreateAudioCodec(50, "foo", 22222, 4);
+ cricket_codec.params["foo"] = "bar";
+ cricket_codec.feedback_params.Add(cricket::FeedbackParam("transport-cc"));
+ RtpCodecParameters codec = ToRtpCodecParameters(cricket_codec);
+
+ EXPECT_EQ("foo", codec.name);
+ EXPECT_EQ(cricket::MEDIA_TYPE_AUDIO, codec.kind);
+ EXPECT_EQ(50, codec.payload_type);
+ EXPECT_EQ(22222, codec.clock_rate);
+ EXPECT_EQ(4, codec.num_channels);
+ ASSERT_EQ(1u, codec.parameters.size());
+ EXPECT_EQ("bar", codec.parameters["foo"]);
+ EXPECT_EQ(1u, codec.rtcp_feedback.size());
+ EXPECT_EQ(RtcpFeedback(RtcpFeedbackType::TRANSPORT_CC),
+ codec.rtcp_feedback[0]);
+}
+
+TEST(RtpParametersConversionTest, ToVideoRtpCodecParameters) {
+ cricket::VideoCodec cricket_codec = cricket::CreateVideoCodec(101, "VID");
+ cricket_codec.clockrate = 80000;
+ cricket_codec.params["foo"] = "bar";
+ cricket_codec.params["ANOTHER"] = "param";
+ cricket_codec.feedback_params.Add(cricket::FeedbackParam("transport-cc"));
+ cricket_codec.feedback_params.Add(cricket::FeedbackParam("goog-lntf"));
+ cricket_codec.feedback_params.Add({"nack", "pli"});
+ RtpCodecParameters codec = ToRtpCodecParameters(cricket_codec);
+
+ EXPECT_EQ("VID", codec.name);
+ EXPECT_EQ(cricket::MEDIA_TYPE_VIDEO, codec.kind);
+ EXPECT_EQ(101, codec.payload_type);
+ EXPECT_EQ(80000, codec.clock_rate);
+ ASSERT_EQ(2u, codec.parameters.size());
+ EXPECT_EQ("bar", codec.parameters["foo"]);
+ EXPECT_EQ("param", codec.parameters["ANOTHER"]);
+ EXPECT_EQ(3u, codec.rtcp_feedback.size());
+ EXPECT_EQ(RtcpFeedback(RtcpFeedbackType::TRANSPORT_CC),
+ codec.rtcp_feedback[0]);
+ EXPECT_EQ(RtcpFeedback(RtcpFeedbackType::LNTF), codec.rtcp_feedback[1]);
+ EXPECT_EQ(RtcpFeedback(RtcpFeedbackType::NACK, RtcpFeedbackMessageType::PLI),
+ codec.rtcp_feedback[2]);
+}
+
+// An unknown feedback param should just be ignored.
+TEST(RtpParametersConversionTest, ToRtpCodecCapabilityUnknownFeedbackParam) {
+ cricket::AudioCodec cricket_codec =
+ cricket::CreateAudioCodec(50, "foo", 22222, 4);
+ cricket_codec.params["foo"] = "bar";
+ cricket_codec.feedback_params.Add({"unknown", "param"});
+ cricket_codec.feedback_params.Add(cricket::FeedbackParam("transport-cc"));
+ RtpCodecCapability codec = ToRtpCodecCapability(cricket_codec);
+
+ ASSERT_EQ(1u, codec.rtcp_feedback.size());
+ EXPECT_EQ(RtcpFeedback(RtcpFeedbackType::TRANSPORT_CC),
+ codec.rtcp_feedback[0]);
+}
+
+// Most of ToRtpCapabilities is tested by ToRtpCodecCapability, but we need to
+// test that the result of ToRtpCodecCapability ends up in the result, and that
+// the "fec" list is assembled correctly.
+TEST(RtpParametersConversionTest, ToRtpCapabilities) {
+ cricket::VideoCodec vp8 = cricket::CreateVideoCodec(101, "VP8");
+ vp8.clockrate = 90000;
+
+ cricket::VideoCodec red = cricket::CreateVideoCodec(102, "red");
+ red.clockrate = 90000;
+
+ cricket::VideoCodec ulpfec = cricket::CreateVideoCodec(103, "ulpfec");
+ ulpfec.clockrate = 90000;
+
+ cricket::VideoCodec flexfec = cricket::CreateVideoCodec(102, "flexfec-03");
+ flexfec.clockrate = 90000;
+
+ cricket::VideoCodec rtx = cricket::CreateVideoRtxCodec(014, 101);
+
+ cricket::VideoCodec rtx2 = cricket::CreateVideoRtxCodec(105, 109);
+
+ RtpCapabilities capabilities =
+ ToRtpCapabilities({vp8, ulpfec, rtx, rtx2}, {{"uri", 1}, {"uri2", 3}});
+ ASSERT_EQ(3u, capabilities.codecs.size());
+ EXPECT_EQ("VP8", capabilities.codecs[0].name);
+ EXPECT_EQ("ulpfec", capabilities.codecs[1].name);
+ EXPECT_EQ("rtx", capabilities.codecs[2].name);
+ EXPECT_EQ(0u, capabilities.codecs[2].parameters.size());
+ ASSERT_EQ(2u, capabilities.header_extensions.size());
+ EXPECT_EQ("uri", capabilities.header_extensions[0].uri);
+ EXPECT_EQ(1, capabilities.header_extensions[0].preferred_id);
+ EXPECT_EQ("uri2", capabilities.header_extensions[1].uri);
+ EXPECT_EQ(3, capabilities.header_extensions[1].preferred_id);
+ EXPECT_EQ(0u, capabilities.fec.size());
+
+ capabilities = ToRtpCapabilities({vp8, red, ulpfec, rtx},
+ cricket::RtpHeaderExtensions());
+ EXPECT_EQ(4u, capabilities.codecs.size());
+ EXPECT_THAT(
+ capabilities.fec,
+ UnorderedElementsAre(FecMechanism::RED, FecMechanism::RED_AND_ULPFEC));
+
+ capabilities =
+ ToRtpCapabilities({vp8, red, flexfec}, cricket::RtpHeaderExtensions());
+ EXPECT_EQ(3u, capabilities.codecs.size());
+ EXPECT_THAT(capabilities.fec,
+ UnorderedElementsAre(FecMechanism::RED, FecMechanism::FLEXFEC));
+}
+
+TEST(RtpParametersConversionTest, ToRtpParameters) {
+ cricket::VideoCodec vp8 = cricket::CreateVideoCodec(101, "VP8");
+ vp8.clockrate = 90000;
+
+ cricket::VideoCodec red = cricket::CreateVideoCodec(102, "red");
+ red.clockrate = 90000;
+
+ cricket::VideoCodec ulpfec = cricket::CreateVideoCodec(103, "ulpfec");
+ ulpfec.clockrate = 90000;
+
+ cricket::StreamParamsVec streams;
+ cricket::StreamParams stream;
+ stream.ssrcs.push_back(1234u);
+ streams.push_back(stream);
+
+ RtpParameters rtp_parameters =
+ ToRtpParameters({vp8, red, ulpfec}, {{"uri", 1}, {"uri2", 3}}, streams);
+ ASSERT_EQ(3u, rtp_parameters.codecs.size());
+ EXPECT_EQ("VP8", rtp_parameters.codecs[0].name);
+ EXPECT_EQ("red", rtp_parameters.codecs[1].name);
+ EXPECT_EQ("ulpfec", rtp_parameters.codecs[2].name);
+ ASSERT_EQ(2u, rtp_parameters.header_extensions.size());
+ EXPECT_EQ("uri", rtp_parameters.header_extensions[0].uri);
+ EXPECT_EQ(1, rtp_parameters.header_extensions[0].id);
+ EXPECT_EQ("uri2", rtp_parameters.header_extensions[1].uri);
+ EXPECT_EQ(3, rtp_parameters.header_extensions[1].id);
+ ASSERT_EQ(1u, rtp_parameters.encodings.size());
+ EXPECT_EQ(1234u, rtp_parameters.encodings[0].ssrc);
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/rtp_receiver.cc b/third_party/libwebrtc/pc/rtp_receiver.cc
new file mode 100644
index 0000000000..a2b3353c0e
--- /dev/null
+++ b/third_party/libwebrtc/pc/rtp_receiver.cc
@@ -0,0 +1,42 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/rtp_receiver.h"
+
+#include <stddef.h>
+
+#include <utility>
+#include <vector>
+
+#include "pc/media_stream.h"
+#include "pc/media_stream_proxy.h"
+#include "rtc_base/thread.h"
+
+namespace webrtc {
+
+// This function is only expected to be called on the signalling thread.
+int RtpReceiverInternal::GenerateUniqueId() {
+ static int g_unique_id = 0;
+
+ return ++g_unique_id;
+}
+
+std::vector<rtc::scoped_refptr<MediaStreamInterface>>
+RtpReceiverInternal::CreateStreamsFromIds(std::vector<std::string> stream_ids) {
+ std::vector<rtc::scoped_refptr<MediaStreamInterface>> streams(
+ stream_ids.size());
+ for (size_t i = 0; i < stream_ids.size(); ++i) {
+ streams[i] = MediaStreamProxy::Create(
+ rtc::Thread::Current(), MediaStream::Create(std::move(stream_ids[i])));
+ }
+ return streams;
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/rtp_receiver.h b/third_party/libwebrtc/pc/rtp_receiver.h
new file mode 100644
index 0000000000..16ab011f14
--- /dev/null
+++ b/third_party/libwebrtc/pc/rtp_receiver.h
@@ -0,0 +1,101 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This file contains classes that implement RtpReceiverInterface.
+// An RtpReceiver associates a MediaStreamTrackInterface with an underlying
+// transport (provided by cricket::VoiceChannel/cricket::VideoChannel)
+
+#ifndef PC_RTP_RECEIVER_H_
+#define PC_RTP_RECEIVER_H_
+
+#include <stdint.h>
+
+#include <string>
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "api/crypto/frame_decryptor_interface.h"
+#include "api/dtls_transport_interface.h"
+#include "api/media_stream_interface.h"
+#include "api/media_types.h"
+#include "api/rtp_parameters.h"
+#include "api/rtp_receiver_interface.h"
+#include "api/scoped_refptr.h"
+#include "api/video/video_frame.h"
+#include "api/video/video_sink_interface.h"
+#include "api/video/video_source_interface.h"
+#include "media/base/media_channel.h"
+#include "media/base/video_broadcaster.h"
+#include "pc/video_track_source.h"
+#include "rtc_base/thread.h"
+
+namespace webrtc {
+
+// Internal class used by PeerConnection.
+class RtpReceiverInternal : public RtpReceiverInterface {
+ public:
+ // Call on the signaling thread, to let the receiver know that the the
+ // embedded source object should enter a stopped/ended state and the track's
+ // state set to `kEnded`, a final state that cannot be reversed.
+ virtual void Stop() = 0;
+
+ // Sets the underlying MediaEngine channel associated with this RtpSender.
+ // A VoiceMediaChannel should be used for audio RtpSenders and
+ // a VideoMediaChannel should be used for video RtpSenders.
+ // NOTE:
+ // * SetMediaChannel(nullptr) must be called before the media channel is
+ // destroyed.
+ // * This method must be invoked on the worker thread.
+ virtual void SetMediaChannel(
+ cricket::MediaReceiveChannelInterface* media_channel) = 0;
+
+ // Configures the RtpReceiver with the underlying media channel, with the
+ // given SSRC as the stream identifier.
+ virtual void SetupMediaChannel(uint32_t ssrc) = 0;
+
+ // Configures the RtpReceiver with the underlying media channel to receive an
+ // unsignaled receive stream.
+ virtual void SetupUnsignaledMediaChannel() = 0;
+
+ virtual void set_transport(
+ rtc::scoped_refptr<DtlsTransportInterface> dtls_transport) = 0;
+ // This SSRC is used as an identifier for the receiver between the API layer
+ // and the WebRtcVideoEngine, WebRtcVoiceEngine layer.
+ virtual absl::optional<uint32_t> ssrc() const = 0;
+
+ // Call this to notify the RtpReceiver when the first packet has been received
+ // on the corresponding channel.
+ virtual void NotifyFirstPacketReceived() = 0;
+
+ // Set the associated remote media streams for this receiver. The remote track
+ // will be removed from any streams that are no longer present and added to
+ // any new streams.
+ virtual void set_stream_ids(std::vector<std::string> stream_ids) = 0;
+ // TODO(https://crbug.com/webrtc/9480): Remove SetStreams() in favor of
+ // set_stream_ids() as soon as downstream projects are no longer dependent on
+ // stream objects.
+ virtual void SetStreams(
+ const std::vector<rtc::scoped_refptr<MediaStreamInterface>>& streams) = 0;
+
+ // Returns an ID that changes if the attached track changes, but
+ // otherwise remains constant. Used to generate IDs for stats.
+ // The special value zero means that no track is attached.
+ virtual int AttachmentId() const = 0;
+
+ protected:
+ static int GenerateUniqueId();
+
+ static std::vector<rtc::scoped_refptr<MediaStreamInterface>>
+ CreateStreamsFromIds(std::vector<std::string> stream_ids);
+};
+
+} // namespace webrtc
+
+#endif // PC_RTP_RECEIVER_H_
diff --git a/third_party/libwebrtc/pc/rtp_receiver_proxy.h b/third_party/libwebrtc/pc/rtp_receiver_proxy.h
new file mode 100644
index 0000000000..d4114e0f0b
--- /dev/null
+++ b/third_party/libwebrtc/pc/rtp_receiver_proxy.h
@@ -0,0 +1,54 @@
+/*
+ * Copyright 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_RTP_RECEIVER_PROXY_H_
+#define PC_RTP_RECEIVER_PROXY_H_
+
+#include <string>
+#include <vector>
+
+#include "api/rtp_receiver_interface.h"
+#include "pc/proxy.h"
+
+namespace webrtc {
+
+// Define proxy for RtpReceiverInterface.
+// TODO(deadbeef): Move this to .cc file. What threads methods are called on is
+// an implementation detail.
+BEGIN_PROXY_MAP(RtpReceiver)
+PROXY_PRIMARY_THREAD_DESTRUCTOR()
+BYPASS_PROXY_CONSTMETHOD0(rtc::scoped_refptr<MediaStreamTrackInterface>, track)
+PROXY_CONSTMETHOD0(rtc::scoped_refptr<DtlsTransportInterface>, dtls_transport)
+PROXY_CONSTMETHOD0(std::vector<std::string>, stream_ids)
+PROXY_CONSTMETHOD0(std::vector<rtc::scoped_refptr<MediaStreamInterface>>,
+ streams)
+BYPASS_PROXY_CONSTMETHOD0(cricket::MediaType, media_type)
+BYPASS_PROXY_CONSTMETHOD0(std::string, id)
+PROXY_SECONDARY_CONSTMETHOD0(RtpParameters, GetParameters)
+PROXY_METHOD1(void, SetObserver, RtpReceiverObserverInterface*)
+PROXY_SECONDARY_METHOD1(void,
+ SetJitterBufferMinimumDelay,
+ absl::optional<double>)
+PROXY_SECONDARY_CONSTMETHOD0(std::vector<RtpSource>, GetSources)
+// TODO(bugs.webrtc.org/12772): Remove.
+PROXY_SECONDARY_METHOD1(void,
+ SetFrameDecryptor,
+ rtc::scoped_refptr<FrameDecryptorInterface>)
+// TODO(bugs.webrtc.org/12772): Remove.
+PROXY_SECONDARY_CONSTMETHOD0(rtc::scoped_refptr<FrameDecryptorInterface>,
+ GetFrameDecryptor)
+PROXY_SECONDARY_METHOD1(void,
+ SetDepacketizerToDecoderFrameTransformer,
+ rtc::scoped_refptr<FrameTransformerInterface>)
+END_PROXY_MAP(RtpReceiver)
+
+} // namespace webrtc
+
+#endif // PC_RTP_RECEIVER_PROXY_H_
diff --git a/third_party/libwebrtc/pc/rtp_sender.cc b/third_party/libwebrtc/pc/rtp_sender.cc
new file mode 100644
index 0000000000..cdae1595b3
--- /dev/null
+++ b/third_party/libwebrtc/pc/rtp_sender.cc
@@ -0,0 +1,899 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/rtp_sender.h"
+
+#include <algorithm>
+#include <atomic>
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "absl/algorithm/container.h"
+#include "api/audio_options.h"
+#include "api/media_stream_interface.h"
+#include "api/priority.h"
+#include "api/rtc_error.h"
+#include "media/base/media_engine.h"
+#include "pc/legacy_stats_collector_interface.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/helpers.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/trace_event.h"
+
+namespace webrtc {
+
+namespace {
+
+// This function is only expected to be called on the signaling thread.
+// On the other hand, some test or even production setups may use
+// several signaling threads.
+int GenerateUniqueId() {
+ static std::atomic<int> g_unique_id{0};
+
+ return ++g_unique_id;
+}
+
+// Returns true if a "per-sender" encoding parameter contains a value that isn't
+// its default. Currently max_bitrate_bps and bitrate_priority both are
+// implemented "per-sender," meaning that these encoding parameters
+// are used for the RtpSender as a whole, not for a specific encoding layer.
+// This is done by setting these encoding parameters at index 0 of
+// RtpParameters.encodings. This function can be used to check if these
+// parameters are set at any index other than 0 of RtpParameters.encodings,
+// because they are currently unimplemented to be used for a specific encoding
+// layer.
+bool PerSenderRtpEncodingParameterHasValue(
+ const RtpEncodingParameters& encoding_params) {
+ if (encoding_params.bitrate_priority != kDefaultBitratePriority ||
+ encoding_params.network_priority != Priority::kLow) {
+ return true;
+ }
+ return false;
+}
+
+void RemoveEncodingLayers(const std::vector<std::string>& rids,
+ std::vector<RtpEncodingParameters>* encodings) {
+ RTC_DCHECK(encodings);
+ encodings->erase(
+ std::remove_if(encodings->begin(), encodings->end(),
+ [&rids](const RtpEncodingParameters& encoding) {
+ return absl::c_linear_search(rids, encoding.rid);
+ }),
+ encodings->end());
+}
+
+RtpParameters RestoreEncodingLayers(
+ const RtpParameters& parameters,
+ const std::vector<std::string>& removed_rids,
+ const std::vector<RtpEncodingParameters>& all_layers) {
+ RTC_CHECK_EQ(parameters.encodings.size() + removed_rids.size(),
+ all_layers.size());
+ RtpParameters result(parameters);
+ result.encodings.clear();
+ size_t index = 0;
+ for (const RtpEncodingParameters& encoding : all_layers) {
+ if (absl::c_linear_search(removed_rids, encoding.rid)) {
+ result.encodings.push_back(encoding);
+ continue;
+ }
+ result.encodings.push_back(parameters.encodings[index++]);
+ }
+ return result;
+}
+
+class SignalingThreadCallback {
+ public:
+ SignalingThreadCallback(rtc::Thread* signaling_thread,
+ SetParametersCallback callback)
+ : signaling_thread_(signaling_thread), callback_(std::move(callback)) {}
+ SignalingThreadCallback(SignalingThreadCallback&& other)
+ : signaling_thread_(other.signaling_thread_),
+ callback_(std::move(other.callback_)) {
+ other.callback_ = nullptr;
+ }
+
+ ~SignalingThreadCallback() {
+ if (callback_) {
+ Resolve(RTCError(RTCErrorType::INTERNAL_ERROR));
+
+ RTC_CHECK_NOTREACHED();
+ }
+ }
+
+ void operator()(const RTCError& error) { Resolve(error); }
+
+ private:
+ void Resolve(const RTCError& error) {
+ if (!signaling_thread_->IsCurrent()) {
+ signaling_thread_->PostTask(
+ [callback = std::move(callback_), error]() mutable {
+ webrtc::InvokeSetParametersCallback(callback, error);
+ });
+ callback_ = nullptr;
+ return;
+ }
+
+ webrtc::InvokeSetParametersCallback(callback_, error);
+ callback_ = nullptr;
+ }
+
+ rtc::Thread* signaling_thread_;
+ SetParametersCallback callback_;
+};
+
+} // namespace
+
+// Returns true if any RtpParameters member that isn't implemented contains a
+// value.
+bool UnimplementedRtpParameterHasValue(const RtpParameters& parameters) {
+ if (!parameters.mid.empty()) {
+ return true;
+ }
+ for (size_t i = 0; i < parameters.encodings.size(); ++i) {
+ // Encoding parameters that are per-sender should only contain value at
+ // index 0.
+ if (i != 0 &&
+ PerSenderRtpEncodingParameterHasValue(parameters.encodings[i])) {
+ return true;
+ }
+ }
+ return false;
+}
+
+RtpSenderBase::RtpSenderBase(rtc::Thread* worker_thread,
+ const std::string& id,
+ SetStreamsObserver* set_streams_observer)
+ : signaling_thread_(rtc::Thread::Current()),
+ worker_thread_(worker_thread),
+ id_(id),
+ set_streams_observer_(set_streams_observer) {
+ RTC_DCHECK(worker_thread);
+ init_parameters_.encodings.emplace_back();
+}
+
+void RtpSenderBase::SetFrameEncryptor(
+ rtc::scoped_refptr<FrameEncryptorInterface> frame_encryptor) {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ frame_encryptor_ = std::move(frame_encryptor);
+ // Special Case: Set the frame encryptor to any value on any existing channel.
+ if (media_channel_ && ssrc_ && !stopped_) {
+ worker_thread_->BlockingCall(
+ [&] { media_channel_->SetFrameEncryptor(ssrc_, frame_encryptor_); });
+ }
+}
+
+void RtpSenderBase::SetEncoderSelector(
+ std::unique_ptr<VideoEncoderFactory::EncoderSelectorInterface>
+ encoder_selector) {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ encoder_selector_ = std::move(encoder_selector);
+ SetEncoderSelectorOnChannel();
+}
+
+void RtpSenderBase::SetEncoderSelectorOnChannel() {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ if (media_channel_ && ssrc_ && !stopped_) {
+ worker_thread_->BlockingCall([&] {
+ media_channel_->SetEncoderSelector(ssrc_, encoder_selector_.get());
+ });
+ }
+}
+
+void RtpSenderBase::SetMediaChannel(
+ cricket::MediaSendChannelInterface* media_channel) {
+ RTC_DCHECK(media_channel == nullptr ||
+ media_channel->media_type() == media_type());
+ media_channel_ = media_channel;
+}
+
+RtpParameters RtpSenderBase::GetParametersInternal() const {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ if (stopped_) {
+ return RtpParameters();
+ }
+ if (!media_channel_ || !ssrc_) {
+ return init_parameters_;
+ }
+ return worker_thread_->BlockingCall([&] {
+ RtpParameters result = media_channel_->GetRtpSendParameters(ssrc_);
+ RemoveEncodingLayers(disabled_rids_, &result.encodings);
+ return result;
+ });
+}
+
+RtpParameters RtpSenderBase::GetParametersInternalWithAllLayers() const {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ if (stopped_) {
+ return RtpParameters();
+ }
+ if (!media_channel_ || !ssrc_) {
+ return init_parameters_;
+ }
+ return worker_thread_->BlockingCall([&] {
+ RtpParameters result = media_channel_->GetRtpSendParameters(ssrc_);
+ return result;
+ });
+}
+
+RtpParameters RtpSenderBase::GetParameters() const {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ RtpParameters result = GetParametersInternal();
+ last_transaction_id_ = rtc::CreateRandomUuid();
+ result.transaction_id = last_transaction_id_.value();
+ return result;
+}
+
+void RtpSenderBase::SetParametersInternal(const RtpParameters& parameters,
+ SetParametersCallback callback,
+ bool blocking) {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ RTC_DCHECK(!stopped_);
+
+ if (UnimplementedRtpParameterHasValue(parameters)) {
+ RTCError error(
+ RTCErrorType::UNSUPPORTED_PARAMETER,
+ "Attempted to set an unimplemented parameter of RtpParameters.");
+ RTC_LOG(LS_ERROR) << error.message() << " ("
+ << ::webrtc::ToString(error.type()) << ")";
+ webrtc::InvokeSetParametersCallback(callback, error);
+ return;
+ }
+ if (!media_channel_ || !ssrc_) {
+ auto result = cricket::CheckRtpParametersInvalidModificationAndValues(
+ init_parameters_, parameters, codec_preferences_, absl::nullopt);
+ if (result.ok()) {
+ init_parameters_ = parameters;
+ }
+ webrtc::InvokeSetParametersCallback(callback, result);
+ return;
+ }
+ auto task = [&, callback = std::move(callback),
+ parameters = std::move(parameters)]() mutable {
+ RtpParameters rtp_parameters = parameters;
+ RtpParameters old_parameters = media_channel_->GetRtpSendParameters(ssrc_);
+ if (!disabled_rids_.empty()) {
+ // Need to add the inactive layers.
+ rtp_parameters = RestoreEncodingLayers(parameters, disabled_rids_,
+ old_parameters.encodings);
+ }
+
+ RTCError result = cricket::CheckRtpParametersInvalidModificationAndValues(
+ old_parameters, rtp_parameters);
+ if (!result.ok()) {
+ webrtc::InvokeSetParametersCallback(callback, result);
+ return;
+ }
+
+ result = CheckCodecParameters(rtp_parameters);
+ if (!result.ok()) {
+ webrtc::InvokeSetParametersCallback(callback, result);
+ return;
+ }
+
+ media_channel_->SetRtpSendParameters(ssrc_, rtp_parameters,
+ std::move(callback));
+ };
+ if (blocking)
+ worker_thread_->BlockingCall(task);
+ else
+ worker_thread_->PostTask(std::move(task));
+}
+
+RTCError RtpSenderBase::SetParametersInternalWithAllLayers(
+ const RtpParameters& parameters) {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ RTC_DCHECK(!stopped_);
+
+ if (UnimplementedRtpParameterHasValue(parameters)) {
+ LOG_AND_RETURN_ERROR(
+ RTCErrorType::UNSUPPORTED_PARAMETER,
+ "Attempted to set an unimplemented parameter of RtpParameters.");
+ }
+ if (!media_channel_ || !ssrc_) {
+ auto result = cricket::CheckRtpParametersInvalidModificationAndValues(
+ init_parameters_, parameters, codec_preferences_, absl::nullopt);
+ if (result.ok()) {
+ init_parameters_ = parameters;
+ }
+ return result;
+ }
+ return worker_thread_->BlockingCall([&] {
+ RtpParameters rtp_parameters = parameters;
+ return media_channel_->SetRtpSendParameters(ssrc_, rtp_parameters, nullptr);
+ });
+}
+
+RTCError RtpSenderBase::CheckSetParameters(const RtpParameters& parameters) {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ if (is_transceiver_stopped_) {
+ LOG_AND_RETURN_ERROR(
+ RTCErrorType::INVALID_STATE,
+ "Cannot set parameters on sender of a stopped transceiver.");
+ }
+ if (stopped_) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_STATE,
+ "Cannot set parameters on a stopped sender.");
+ }
+ if (!last_transaction_id_) {
+ LOG_AND_RETURN_ERROR(
+ RTCErrorType::INVALID_STATE,
+ "Failed to set parameters since getParameters() has never been called"
+ " on this sender");
+ }
+ if (last_transaction_id_ != parameters.transaction_id) {
+ LOG_AND_RETURN_ERROR(
+ RTCErrorType::INVALID_MODIFICATION,
+ "Failed to set parameters since the transaction_id doesn't match"
+ " the last value returned from getParameters()");
+ }
+
+ return RTCError::OK();
+}
+
+RTCError RtpSenderBase::CheckCodecParameters(const RtpParameters& parameters) {
+ absl::optional<cricket::Codec> send_codec = media_channel_->GetSendCodec();
+
+ // Match the currently used codec against the codec preferences to gather
+ // the SVC capabilities.
+ absl::optional<cricket::Codec> send_codec_with_svc_info;
+ if (send_codec && send_codec->type == cricket::Codec::Type::kVideo) {
+ auto codec_match =
+ absl::c_find_if(codec_preferences_, [&](auto& codec_preference) {
+ return send_codec->Matches(codec_preference);
+ });
+ if (codec_match != codec_preferences_.end()) {
+ send_codec_with_svc_info = *codec_match;
+ }
+ }
+
+ return cricket::CheckScalabilityModeValues(parameters, codec_preferences_,
+ send_codec_with_svc_info);
+}
+
+RTCError RtpSenderBase::SetParameters(const RtpParameters& parameters) {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ TRACE_EVENT0("webrtc", "RtpSenderBase::SetParameters");
+ RTCError result = CheckSetParameters(parameters);
+ if (!result.ok())
+ return result;
+
+ // Some tests rely on working in single thread mode without a run loop and a
+ // blocking call is required to keep them working. The encoder configuration
+ // also involves another thread with an asynchronous task, thus we still do
+ // need to wait for the callback to be resolved this way.
+ std::unique_ptr<rtc::Event> done_event = std::make_unique<rtc::Event>();
+ SetParametersInternal(
+ parameters,
+ [done = done_event.get(), &result](RTCError error) {
+ result = error;
+ done->Set();
+ },
+ true);
+ done_event->Wait(rtc::Event::kForever);
+ last_transaction_id_.reset();
+ return result;
+}
+
+void RtpSenderBase::SetParametersAsync(const RtpParameters& parameters,
+ SetParametersCallback callback) {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ RTC_DCHECK(callback);
+ TRACE_EVENT0("webrtc", "RtpSenderBase::SetParametersAsync");
+ RTCError result = CheckSetParameters(parameters);
+ if (!result.ok()) {
+ webrtc::InvokeSetParametersCallback(callback, result);
+ return;
+ }
+
+ SetParametersInternal(
+ parameters,
+ SignalingThreadCallback(
+ signaling_thread_,
+ [this, callback = std::move(callback)](RTCError error) mutable {
+ last_transaction_id_.reset();
+ webrtc::InvokeSetParametersCallback(callback, error);
+ }),
+ false);
+}
+
+void RtpSenderBase::set_stream_ids(const std::vector<std::string>& stream_ids) {
+ stream_ids_.clear();
+ absl::c_copy_if(stream_ids, std::back_inserter(stream_ids_),
+ [this](const std::string& stream_id) {
+ return !absl::c_linear_search(stream_ids_, stream_id);
+ });
+}
+
+void RtpSenderBase::SetStreams(const std::vector<std::string>& stream_ids) {
+ set_stream_ids(stream_ids);
+ if (set_streams_observer_)
+ set_streams_observer_->OnSetStreams();
+}
+
+bool RtpSenderBase::SetTrack(MediaStreamTrackInterface* track) {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ TRACE_EVENT0("webrtc", "RtpSenderBase::SetTrack");
+ if (stopped_) {
+ RTC_LOG(LS_ERROR) << "SetTrack can't be called on a stopped RtpSender.";
+ return false;
+ }
+ if (track && track->kind() != track_kind()) {
+ RTC_LOG(LS_ERROR) << "SetTrack with " << track->kind()
+ << " called on RtpSender with " << track_kind()
+ << " track.";
+ return false;
+ }
+
+ // Detach from old track.
+ if (track_) {
+ DetachTrack();
+ track_->UnregisterObserver(this);
+ RemoveTrackFromStats();
+ }
+
+ // Attach to new track.
+ bool prev_can_send_track = can_send_track();
+ // Keep a reference to the old track to keep it alive until we call SetSend.
+ rtc::scoped_refptr<MediaStreamTrackInterface> old_track = track_;
+ track_ = track;
+ if (track_) {
+ track_->RegisterObserver(this);
+ AttachTrack();
+ }
+
+ // Update channel.
+ if (can_send_track()) {
+ SetSend();
+ AddTrackToStats();
+ } else if (prev_can_send_track) {
+ ClearSend();
+ }
+ attachment_id_ = (track_ ? GenerateUniqueId() : 0);
+ return true;
+}
+
+void RtpSenderBase::SetSsrc(uint32_t ssrc) {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ TRACE_EVENT0("webrtc", "RtpSenderBase::SetSsrc");
+ if (stopped_ || ssrc == ssrc_) {
+ return;
+ }
+ // If we are already sending with a particular SSRC, stop sending.
+ if (can_send_track()) {
+ ClearSend();
+ RemoveTrackFromStats();
+ }
+ ssrc_ = ssrc;
+ if (can_send_track()) {
+ SetSend();
+ AddTrackToStats();
+ }
+ if (!init_parameters_.encodings.empty() ||
+ init_parameters_.degradation_preference.has_value()) {
+ worker_thread_->BlockingCall([&] {
+ RTC_DCHECK(media_channel_);
+ // Get the current parameters, which are constructed from the SDP.
+ // The number of layers in the SDP is currently authoritative to support
+ // SDP munging for Plan-B simulcast with "a=ssrc-group:SIM <ssrc-id>..."
+ // lines as described in RFC 5576.
+ // All fields should be default constructed and the SSRC field set, which
+ // we need to copy.
+ RtpParameters current_parameters =
+ media_channel_->GetRtpSendParameters(ssrc_);
+ RTC_CHECK_GE(current_parameters.encodings.size(),
+ init_parameters_.encodings.size());
+ for (size_t i = 0; i < init_parameters_.encodings.size(); ++i) {
+ init_parameters_.encodings[i].ssrc =
+ current_parameters.encodings[i].ssrc;
+ init_parameters_.encodings[i].rid = current_parameters.encodings[i].rid;
+ current_parameters.encodings[i] = init_parameters_.encodings[i];
+ }
+ current_parameters.degradation_preference =
+ init_parameters_.degradation_preference;
+ media_channel_->SetRtpSendParameters(ssrc_, current_parameters, nullptr);
+ init_parameters_.encodings.clear();
+ init_parameters_.degradation_preference = absl::nullopt;
+ });
+ }
+ // Attempt to attach the frame decryptor to the current media channel.
+ if (frame_encryptor_) {
+ SetFrameEncryptor(frame_encryptor_);
+ }
+ if (frame_transformer_) {
+ SetEncoderToPacketizerFrameTransformer(frame_transformer_);
+ }
+ if (encoder_selector_) {
+ SetEncoderSelectorOnChannel();
+ }
+}
+
+void RtpSenderBase::Stop() {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ TRACE_EVENT0("webrtc", "RtpSenderBase::Stop");
+ // TODO(deadbeef): Need to do more here to fully stop sending packets.
+ if (stopped_) {
+ return;
+ }
+ if (track_) {
+ DetachTrack();
+ track_->UnregisterObserver(this);
+ }
+ if (can_send_track()) {
+ ClearSend();
+ RemoveTrackFromStats();
+ }
+ media_channel_ = nullptr;
+ set_streams_observer_ = nullptr;
+ stopped_ = true;
+}
+
+RTCError RtpSenderBase::DisableEncodingLayers(
+ const std::vector<std::string>& rids) {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ if (stopped_) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_STATE,
+ "Cannot disable encodings on a stopped sender.");
+ }
+
+ if (rids.empty()) {
+ return RTCError::OK();
+ }
+
+ // Check that all the specified layers exist and disable them in the channel.
+ RtpParameters parameters = GetParametersInternalWithAllLayers();
+ for (const std::string& rid : rids) {
+ if (absl::c_none_of(parameters.encodings,
+ [&rid](const RtpEncodingParameters& encoding) {
+ return encoding.rid == rid;
+ })) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER,
+ "RID: " + rid + " does not refer to a valid layer.");
+ }
+ }
+
+ if (!media_channel_ || !ssrc_) {
+ RemoveEncodingLayers(rids, &init_parameters_.encodings);
+ // Invalidate any transaction upon success.
+ last_transaction_id_.reset();
+ return RTCError::OK();
+ }
+
+ for (RtpEncodingParameters& encoding : parameters.encodings) {
+ // Remain active if not in the disable list.
+ encoding.active &= absl::c_none_of(
+ rids,
+ [&encoding](const std::string& rid) { return encoding.rid == rid; });
+ }
+
+ RTCError result = SetParametersInternalWithAllLayers(parameters);
+ if (result.ok()) {
+ disabled_rids_.insert(disabled_rids_.end(), rids.begin(), rids.end());
+ // Invalidate any transaction upon success.
+ last_transaction_id_.reset();
+ }
+ return result;
+}
+
+void RtpSenderBase::SetEncoderToPacketizerFrameTransformer(
+ rtc::scoped_refptr<FrameTransformerInterface> frame_transformer) {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ frame_transformer_ = std::move(frame_transformer);
+ if (media_channel_ && ssrc_ && !stopped_) {
+ worker_thread_->BlockingCall([&] {
+ media_channel_->SetEncoderToPacketizerFrameTransformer(
+ ssrc_, frame_transformer_);
+ });
+ }
+}
+
+LocalAudioSinkAdapter::LocalAudioSinkAdapter() : sink_(nullptr) {}
+
+LocalAudioSinkAdapter::~LocalAudioSinkAdapter() {
+ MutexLock lock(&lock_);
+ if (sink_)
+ sink_->OnClose();
+}
+
+void LocalAudioSinkAdapter::OnData(
+ const void* audio_data,
+ int bits_per_sample,
+ int sample_rate,
+ size_t number_of_channels,
+ size_t number_of_frames,
+ absl::optional<int64_t> absolute_capture_timestamp_ms) {
+ TRACE_EVENT2("webrtc", "LocalAudioSinkAdapter::OnData", "sample_rate",
+ sample_rate, "number_of_frames", number_of_frames);
+ MutexLock lock(&lock_);
+ if (sink_) {
+ sink_->OnData(audio_data, bits_per_sample, sample_rate, number_of_channels,
+ number_of_frames, absolute_capture_timestamp_ms);
+ num_preferred_channels_ = sink_->NumPreferredChannels();
+ }
+}
+
+void LocalAudioSinkAdapter::SetSink(cricket::AudioSource::Sink* sink) {
+ MutexLock lock(&lock_);
+ RTC_DCHECK(!sink || !sink_);
+ sink_ = sink;
+}
+
+rtc::scoped_refptr<AudioRtpSender> AudioRtpSender::Create(
+ rtc::Thread* worker_thread,
+ const std::string& id,
+ LegacyStatsCollectorInterface* stats,
+ SetStreamsObserver* set_streams_observer) {
+ return rtc::make_ref_counted<AudioRtpSender>(worker_thread, id, stats,
+ set_streams_observer);
+}
+
+AudioRtpSender::AudioRtpSender(rtc::Thread* worker_thread,
+ const std::string& id,
+ LegacyStatsCollectorInterface* legacy_stats,
+ SetStreamsObserver* set_streams_observer)
+ : RtpSenderBase(worker_thread, id, set_streams_observer),
+ legacy_stats_(legacy_stats),
+ dtmf_sender_(DtmfSender::Create(rtc::Thread::Current(), this)),
+ dtmf_sender_proxy_(
+ DtmfSenderProxy::Create(rtc::Thread::Current(), dtmf_sender_)),
+ sink_adapter_(new LocalAudioSinkAdapter()) {}
+
+AudioRtpSender::~AudioRtpSender() {
+ dtmf_sender_->OnDtmfProviderDestroyed();
+ Stop();
+}
+
+bool AudioRtpSender::CanInsertDtmf() {
+ if (!media_channel_) {
+ RTC_LOG(LS_ERROR) << "CanInsertDtmf: No audio channel exists.";
+ return false;
+ }
+ // Check that this RTP sender is active (description has been applied that
+ // matches an SSRC to its ID).
+ if (!ssrc_) {
+ RTC_LOG(LS_ERROR) << "CanInsertDtmf: Sender does not have SSRC.";
+ return false;
+ }
+ return worker_thread_->BlockingCall(
+ [&] { return voice_media_channel()->CanInsertDtmf(); });
+}
+
+bool AudioRtpSender::InsertDtmf(int code, int duration) {
+ if (!media_channel_) {
+ RTC_LOG(LS_ERROR) << "InsertDtmf: No audio channel exists.";
+ return false;
+ }
+ if (!ssrc_) {
+ RTC_LOG(LS_ERROR) << "InsertDtmf: Sender does not have SSRC.";
+ return false;
+ }
+ bool success = worker_thread_->BlockingCall(
+ [&] { return voice_media_channel()->InsertDtmf(ssrc_, code, duration); });
+ if (!success) {
+ RTC_LOG(LS_ERROR) << "Failed to insert DTMF to channel.";
+ }
+ return success;
+}
+
+void AudioRtpSender::OnChanged() {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ TRACE_EVENT0("webrtc", "AudioRtpSender::OnChanged");
+ RTC_DCHECK(!stopped_);
+ if (cached_track_enabled_ != track_->enabled()) {
+ cached_track_enabled_ = track_->enabled();
+ if (can_send_track()) {
+ SetSend();
+ }
+ }
+}
+
+void AudioRtpSender::DetachTrack() {
+ RTC_DCHECK(track_);
+ audio_track()->RemoveSink(sink_adapter_.get());
+}
+
+void AudioRtpSender::AttachTrack() {
+ RTC_DCHECK(track_);
+ cached_track_enabled_ = track_->enabled();
+ audio_track()->AddSink(sink_adapter_.get());
+}
+
+void AudioRtpSender::AddTrackToStats() {
+ if (can_send_track() && legacy_stats_) {
+ legacy_stats_->AddLocalAudioTrack(audio_track().get(), ssrc_);
+ }
+}
+
+void AudioRtpSender::RemoveTrackFromStats() {
+ if (can_send_track() && legacy_stats_) {
+ legacy_stats_->RemoveLocalAudioTrack(audio_track().get(), ssrc_);
+ }
+}
+
+rtc::scoped_refptr<DtmfSenderInterface> AudioRtpSender::GetDtmfSender() const {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ return dtmf_sender_proxy_;
+}
+
+RTCError AudioRtpSender::GenerateKeyFrame(
+ const std::vector<std::string>& rids) {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ RTC_DLOG(LS_ERROR) << "Tried to get generate a key frame for audio.";
+ return RTCError(RTCErrorType::UNSUPPORTED_OPERATION,
+ "Generating key frames for audio is not supported.");
+}
+
+void AudioRtpSender::SetSend() {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ RTC_DCHECK(!stopped_);
+ RTC_DCHECK(can_send_track());
+ if (!media_channel_) {
+ RTC_LOG(LS_ERROR) << "SetAudioSend: No audio channel exists.";
+ return;
+ }
+ cricket::AudioOptions options;
+#if !defined(WEBRTC_CHROMIUM_BUILD) && !defined(WEBRTC_WEBKIT_BUILD)
+ // TODO(tommi): Remove this hack when we move CreateAudioSource out of
+ // PeerConnection. This is a bit of a strange way to apply local audio
+ // options since it is also applied to all streams/channels, local or remote.
+ if (track_->enabled() && audio_track()->GetSource() &&
+ !audio_track()->GetSource()->remote()) {
+ options = audio_track()->GetSource()->options();
+ }
+#endif
+
+ // `track_->enabled()` hops to the signaling thread, so call it before we hop
+ // to the worker thread or else it will deadlock.
+ bool track_enabled = track_->enabled();
+ bool success = worker_thread_->BlockingCall([&] {
+ return voice_media_channel()->SetAudioSend(ssrc_, track_enabled, &options,
+ sink_adapter_.get());
+ });
+ if (!success) {
+ RTC_LOG(LS_ERROR) << "SetAudioSend: ssrc is incorrect: " << ssrc_;
+ }
+}
+
+void AudioRtpSender::ClearSend() {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ RTC_DCHECK(ssrc_ != 0);
+ RTC_DCHECK(!stopped_);
+ if (!media_channel_) {
+ RTC_LOG(LS_WARNING) << "ClearAudioSend: No audio channel exists.";
+ return;
+ }
+ cricket::AudioOptions options;
+ bool success = worker_thread_->BlockingCall([&] {
+ return voice_media_channel()->SetAudioSend(ssrc_, false, &options, nullptr);
+ });
+ if (!success) {
+ RTC_LOG(LS_WARNING) << "ClearAudioSend: ssrc is incorrect: " << ssrc_;
+ }
+}
+
+rtc::scoped_refptr<VideoRtpSender> VideoRtpSender::Create(
+ rtc::Thread* worker_thread,
+ const std::string& id,
+ SetStreamsObserver* set_streams_observer) {
+ return rtc::make_ref_counted<VideoRtpSender>(worker_thread, id,
+ set_streams_observer);
+}
+
+VideoRtpSender::VideoRtpSender(rtc::Thread* worker_thread,
+ const std::string& id,
+ SetStreamsObserver* set_streams_observer)
+ : RtpSenderBase(worker_thread, id, set_streams_observer) {}
+
+VideoRtpSender::~VideoRtpSender() {
+ Stop();
+}
+
+void VideoRtpSender::OnChanged() {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ TRACE_EVENT0("webrtc", "VideoRtpSender::OnChanged");
+ RTC_DCHECK(!stopped_);
+
+ auto content_hint = video_track()->content_hint();
+ if (cached_track_content_hint_ != content_hint) {
+ cached_track_content_hint_ = content_hint;
+ if (can_send_track()) {
+ SetSend();
+ }
+ }
+}
+
+void VideoRtpSender::AttachTrack() {
+ RTC_DCHECK(track_);
+ cached_track_content_hint_ = video_track()->content_hint();
+}
+
+rtc::scoped_refptr<DtmfSenderInterface> VideoRtpSender::GetDtmfSender() const {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ RTC_DLOG(LS_ERROR) << "Tried to get DTMF sender from video sender.";
+ return nullptr;
+}
+
+RTCError VideoRtpSender::GenerateKeyFrame(
+ const std::vector<std::string>& rids) {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ if (video_media_channel() && ssrc_ && !stopped_) {
+ const auto parameters = GetParametersInternal();
+ for (const auto& rid : rids) {
+ if (rid.empty()) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER,
+ "Attempted to specify an empty rid.");
+ }
+ if (!absl::c_any_of(parameters.encodings,
+ [&rid](const RtpEncodingParameters& parameters) {
+ return parameters.rid == rid;
+ })) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER,
+ "Attempted to specify a rid not configured.");
+ }
+ }
+ worker_thread_->PostTask([&, rids] {
+ video_media_channel()->GenerateSendKeyFrame(ssrc_, rids);
+ });
+ } else {
+ RTC_LOG(LS_WARNING) << "Tried to generate key frame for sender that is "
+ "stopped or has no media channel.";
+ }
+ return RTCError::OK();
+}
+
+void VideoRtpSender::SetSend() {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ RTC_DCHECK(!stopped_);
+ RTC_DCHECK(can_send_track());
+ if (!media_channel_) {
+ RTC_LOG(LS_ERROR) << "SetVideoSend: No video channel exists.";
+ return;
+ }
+ cricket::VideoOptions options;
+ VideoTrackSourceInterface* source = video_track()->GetSource();
+ if (source) {
+ options.is_screencast = source->is_screencast();
+ options.video_noise_reduction = source->needs_denoising();
+ }
+ options.content_hint = cached_track_content_hint_;
+ switch (cached_track_content_hint_) {
+ case VideoTrackInterface::ContentHint::kNone:
+ break;
+ case VideoTrackInterface::ContentHint::kFluid:
+ options.is_screencast = false;
+ break;
+ case VideoTrackInterface::ContentHint::kDetailed:
+ case VideoTrackInterface::ContentHint::kText:
+ options.is_screencast = true;
+ break;
+ }
+ bool success = worker_thread_->BlockingCall([&] {
+ return video_media_channel()->SetVideoSend(ssrc_, &options,
+ video_track().get());
+ });
+ RTC_DCHECK(success);
+}
+
+void VideoRtpSender::ClearSend() {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ RTC_DCHECK(ssrc_ != 0);
+ RTC_DCHECK(!stopped_);
+ if (!media_channel_) {
+ RTC_LOG(LS_WARNING) << "SetVideoSend: No video channel exists.";
+ return;
+ }
+ // Allow SetVideoSend to fail since `enable` is false and `source` is null.
+ // This the normal case when the underlying media channel has already been
+ // deleted.
+ worker_thread_->BlockingCall(
+ [&] { video_media_channel()->SetVideoSend(ssrc_, nullptr, nullptr); });
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/rtp_sender.h b/third_party/libwebrtc/pc/rtp_sender.h
new file mode 100644
index 0000000000..d29c3760e6
--- /dev/null
+++ b/third_party/libwebrtc/pc/rtp_sender.h
@@ -0,0 +1,454 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This file contains classes that implement RtpSenderInterface.
+// An RtpSender associates a MediaStreamTrackInterface with an underlying
+// transport (provided by AudioProviderInterface/VideoProviderInterface)
+
+#ifndef PC_RTP_SENDER_H_
+#define PC_RTP_SENDER_H_
+
+#include <stddef.h>
+#include <stdint.h>
+
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "api/crypto/frame_encryptor_interface.h"
+#include "api/dtls_transport_interface.h"
+#include "api/dtmf_sender_interface.h"
+#include "api/frame_transformer_interface.h"
+#include "api/media_stream_interface.h"
+#include "api/media_types.h"
+#include "api/rtc_error.h"
+#include "api/rtp_parameters.h"
+#include "api/rtp_sender_interface.h"
+#include "api/scoped_refptr.h"
+#include "api/sequence_checker.h"
+#include "media/base/audio_source.h"
+#include "media/base/media_channel.h"
+#include "pc/dtmf_sender.h"
+#include "pc/legacy_stats_collector_interface.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "rtc_base/thread.h"
+#include "rtc_base/thread_annotations.h"
+
+namespace webrtc {
+
+bool UnimplementedRtpParameterHasValue(const RtpParameters& parameters);
+
+// Internal interface used by PeerConnection.
+class RtpSenderInternal : public RtpSenderInterface {
+ public:
+ // Sets the underlying MediaEngine channel associated with this RtpSender.
+ // A VoiceMediaChannel should be used for audio RtpSenders and
+ // a VideoMediaChannel should be used for video RtpSenders.
+ // Must call SetMediaChannel(nullptr) before the media channel is destroyed.
+ virtual void SetMediaChannel(
+ cricket::MediaSendChannelInterface* media_channel) = 0;
+
+ // Used to set the SSRC of the sender, once a local description has been set.
+ // If `ssrc` is 0, this indiates that the sender should disconnect from the
+ // underlying transport (this occurs if the sender isn't seen in a local
+ // description).
+ virtual void SetSsrc(uint32_t ssrc) = 0;
+
+ virtual void set_stream_ids(const std::vector<std::string>& stream_ids) = 0;
+ virtual void set_init_send_encodings(
+ const std::vector<RtpEncodingParameters>& init_send_encodings) = 0;
+ virtual void set_transport(
+ rtc::scoped_refptr<DtlsTransportInterface> dtls_transport) = 0;
+
+ virtual void Stop() = 0;
+
+ // `GetParameters` and `SetParameters` operate with a transactional model.
+ // Allow access to get/set parameters without invalidating transaction id.
+ virtual RtpParameters GetParametersInternal() const = 0;
+ virtual void SetParametersInternal(const RtpParameters& parameters,
+ SetParametersCallback,
+ bool blocking) = 0;
+
+ // GetParameters and SetParameters will remove deactivated simulcast layers
+ // and restore them on SetParameters. This is probably a Bad Idea, but we
+ // do not know who depends on this behavior
+ virtual RtpParameters GetParametersInternalWithAllLayers() const = 0;
+ virtual RTCError SetParametersInternalWithAllLayers(
+ const RtpParameters& parameters) = 0;
+
+ // Additional checks that are specific to the current codec settings
+ virtual RTCError CheckCodecParameters(const RtpParameters& parameters) {
+ return webrtc::RTCError::OK();
+ }
+
+ // Returns an ID that changes every time SetTrack() is called, but
+ // otherwise remains constant. Used to generate IDs for stats.
+ // The special value zero means that no track is attached.
+ virtual int AttachmentId() const = 0;
+
+ // Disables the layers identified by the specified RIDs.
+ // If the specified list is empty, this is a no-op.
+ virtual RTCError DisableEncodingLayers(
+ const std::vector<std::string>& rid) = 0;
+
+ virtual void SetTransceiverAsStopped() = 0;
+
+ // Used by the owning transceiver to inform the sender on the currently
+ // selected codecs.
+ virtual void SetCodecPreferences(
+ std::vector<cricket::Codec> codec_preferences) = 0;
+};
+
+// Shared implementation for RtpSenderInternal interface.
+class RtpSenderBase : public RtpSenderInternal, public ObserverInterface {
+ public:
+ class SetStreamsObserver {
+ public:
+ virtual ~SetStreamsObserver() = default;
+ virtual void OnSetStreams() = 0;
+ };
+
+ // Sets the underlying MediaEngine channel associated with this RtpSender.
+ // A VoiceMediaChannel should be used for audio RtpSenders and
+ // a VideoMediaChannel should be used for video RtpSenders.
+ // Must call SetMediaChannel(nullptr) before the media channel is destroyed.
+ void SetMediaChannel(
+ cricket::MediaSendChannelInterface* media_channel) override;
+
+ bool SetTrack(MediaStreamTrackInterface* track) override;
+ rtc::scoped_refptr<MediaStreamTrackInterface> track() const override {
+ // This method is currently called from the worker thread by
+ // RTCStatsCollector::PrepareTransceiverStatsInfosAndCallStats_s_w_n.
+ // RTC_DCHECK_RUN_ON(signaling_thread_);
+ return track_;
+ }
+
+ RtpParameters GetParameters() const override;
+ RTCError SetParameters(const RtpParameters& parameters) override;
+ void SetParametersAsync(const RtpParameters& parameters,
+ SetParametersCallback callback) override;
+
+ // `GetParameters` and `SetParameters` operate with a transactional model.
+ // Allow access to get/set parameters without invalidating transaction id.
+ RtpParameters GetParametersInternal() const override;
+ void SetParametersInternal(const RtpParameters& parameters,
+ SetParametersCallback callback = nullptr,
+ bool blocking = true) override;
+ RTCError CheckSetParameters(const RtpParameters& parameters);
+ RTCError CheckCodecParameters(const RtpParameters& parameters) override;
+ RtpParameters GetParametersInternalWithAllLayers() const override;
+ RTCError SetParametersInternalWithAllLayers(
+ const RtpParameters& parameters) override;
+
+ // Used to set the SSRC of the sender, once a local description has been set.
+ // If `ssrc` is 0, this indiates that the sender should disconnect from the
+ // underlying transport (this occurs if the sender isn't seen in a local
+ // description).
+ void SetSsrc(uint32_t ssrc) override;
+ uint32_t ssrc() const override {
+ // This method is currently called from the worker thread by
+ // RTCStatsCollector::PrepareTransceiverStatsInfosAndCallStats_s_w_n.
+ // RTC_DCHECK_RUN_ON(signaling_thread_);
+ return ssrc_;
+ }
+
+ std::vector<std::string> stream_ids() const override {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ return stream_ids_;
+ }
+
+ // Set stream ids, eliminating duplicates in the process.
+ void set_stream_ids(const std::vector<std::string>& stream_ids) override;
+ void SetStreams(const std::vector<std::string>& stream_ids) override;
+
+ std::string id() const override { return id_; }
+
+ void set_init_send_encodings(
+ const std::vector<RtpEncodingParameters>& init_send_encodings) override {
+ init_parameters_.encodings = init_send_encodings;
+ }
+ std::vector<RtpEncodingParameters> init_send_encodings() const override {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ return init_parameters_.encodings;
+ }
+
+ void set_transport(
+ rtc::scoped_refptr<DtlsTransportInterface> dtls_transport) override {
+ dtls_transport_ = dtls_transport;
+ }
+ rtc::scoped_refptr<DtlsTransportInterface> dtls_transport() const override {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ return dtls_transport_;
+ }
+
+ void SetFrameEncryptor(
+ rtc::scoped_refptr<FrameEncryptorInterface> frame_encryptor) override;
+
+ rtc::scoped_refptr<FrameEncryptorInterface> GetFrameEncryptor()
+ const override {
+ return frame_encryptor_;
+ }
+
+ void Stop() override;
+
+ // Returns an ID that changes every time SetTrack() is called, but
+ // otherwise remains constant. Used to generate IDs for stats.
+ // The special value zero means that no track is attached.
+ int AttachmentId() const override { return attachment_id_; }
+
+ // Disables the layers identified by the specified RIDs.
+ // If the specified list is empty, this is a no-op.
+ RTCError DisableEncodingLayers(const std::vector<std::string>& rid) override;
+
+ void SetEncoderToPacketizerFrameTransformer(
+ rtc::scoped_refptr<FrameTransformerInterface> frame_transformer) override;
+
+ void SetEncoderSelector(
+ std::unique_ptr<VideoEncoderFactory::EncoderSelectorInterface>
+ encoder_selector) override;
+
+ void SetEncoderSelectorOnChannel();
+
+ void SetTransceiverAsStopped() override {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ is_transceiver_stopped_ = true;
+ }
+
+ void SetCodecPreferences(
+ std::vector<cricket::Codec> codec_preferences) override {
+ codec_preferences_ = codec_preferences;
+ }
+
+ protected:
+ // If `set_streams_observer` is not null, it is invoked when SetStreams()
+ // is called. `set_streams_observer` is not owned by this object. If not
+ // null, it must be valid at least until this sender becomes stopped.
+ RtpSenderBase(rtc::Thread* worker_thread,
+ const std::string& id,
+ SetStreamsObserver* set_streams_observer);
+ // TODO(bugs.webrtc.org/8694): Since SSRC == 0 is technically valid, figure
+ // out some other way to test if we have a valid SSRC.
+ bool can_send_track() const { return track_ && ssrc_; }
+
+ virtual std::string track_kind() const = 0;
+
+ // Enable sending on the media channel.
+ virtual void SetSend() = 0;
+ // Disable sending on the media channel.
+ virtual void ClearSend() = 0;
+
+ // Template method pattern to allow subclasses to add custom behavior for
+ // when tracks are attached, detached, and for adding tracks to statistics.
+ virtual void AttachTrack() {}
+ virtual void DetachTrack() {}
+ virtual void AddTrackToStats() {}
+ virtual void RemoveTrackFromStats() {}
+
+ rtc::Thread* const signaling_thread_;
+ rtc::Thread* const worker_thread_;
+ uint32_t ssrc_ = 0;
+ bool stopped_ RTC_GUARDED_BY(signaling_thread_) = false;
+ bool is_transceiver_stopped_ RTC_GUARDED_BY(signaling_thread_) = false;
+ int attachment_id_ = 0;
+ const std::string id_;
+
+ std::vector<std::string> stream_ids_;
+ RtpParameters init_parameters_;
+ std::vector<cricket::Codec> codec_preferences_;
+
+ // TODO(tommi): `media_channel_` and several other member variables in this
+ // class (ssrc_, stopped_, etc) are accessed from more than one thread without
+ // a guard or lock. Internally there are also several Invoke()s that we could
+ // remove since the upstream code may already be performing several operations
+ // on the worker thread.
+ cricket::MediaSendChannelInterface* media_channel_ = nullptr;
+ rtc::scoped_refptr<MediaStreamTrackInterface> track_;
+
+ rtc::scoped_refptr<DtlsTransportInterface> dtls_transport_;
+ rtc::scoped_refptr<FrameEncryptorInterface> frame_encryptor_;
+ // `last_transaction_id_` is used to verify that `SetParameters` is receiving
+ // the parameters object that was last returned from `GetParameters`.
+ // As such, it is used for internal verification and is not observable by the
+ // the client. It is marked as mutable to enable `GetParameters` to be a
+ // const method.
+ mutable absl::optional<std::string> last_transaction_id_;
+ std::vector<std::string> disabled_rids_;
+
+ SetStreamsObserver* set_streams_observer_ = nullptr;
+
+ rtc::scoped_refptr<FrameTransformerInterface> frame_transformer_;
+ std::unique_ptr<VideoEncoderFactory::EncoderSelectorInterface>
+ encoder_selector_;
+
+ virtual RTCError GenerateKeyFrame(const std::vector<std::string>& rids) = 0;
+};
+
+// LocalAudioSinkAdapter receives data callback as a sink to the local
+// AudioTrack, and passes the data to the sink of AudioSource.
+class LocalAudioSinkAdapter : public AudioTrackSinkInterface,
+ public cricket::AudioSource {
+ public:
+ LocalAudioSinkAdapter();
+ virtual ~LocalAudioSinkAdapter();
+
+ private:
+ // AudioSinkInterface implementation.
+ void OnData(const void* audio_data,
+ int bits_per_sample,
+ int sample_rate,
+ size_t number_of_channels,
+ size_t number_of_frames,
+ absl::optional<int64_t> absolute_capture_timestamp_ms) override;
+
+ // AudioSinkInterface implementation.
+ void OnData(const void* audio_data,
+ int bits_per_sample,
+ int sample_rate,
+ size_t number_of_channels,
+ size_t number_of_frames) override {
+ OnData(audio_data, bits_per_sample, sample_rate, number_of_channels,
+ number_of_frames,
+ /*absolute_capture_timestamp_ms=*/absl::nullopt);
+ }
+
+ // AudioSinkInterface implementation.
+ int NumPreferredChannels() const override { return num_preferred_channels_; }
+
+ // cricket::AudioSource implementation.
+ void SetSink(cricket::AudioSource::Sink* sink) override;
+
+ cricket::AudioSource::Sink* sink_;
+ // Critical section protecting `sink_`.
+ Mutex lock_;
+ int num_preferred_channels_ = -1;
+};
+
+class AudioRtpSender : public DtmfProviderInterface, public RtpSenderBase {
+ public:
+ // Construct an RtpSender for audio with the given sender ID.
+ // The sender is initialized with no track to send and no associated streams.
+ // StatsCollector provided so that Add/RemoveLocalAudioTrack can be called
+ // at the appropriate times.
+ // If `set_streams_observer` is not null, it is invoked when SetStreams()
+ // is called. `set_streams_observer` is not owned by this object. If not
+ // null, it must be valid at least until this sender becomes stopped.
+ static rtc::scoped_refptr<AudioRtpSender> Create(
+ rtc::Thread* worker_thread,
+ const std::string& id,
+ LegacyStatsCollectorInterface* stats,
+ SetStreamsObserver* set_streams_observer);
+ virtual ~AudioRtpSender();
+
+ // DtmfSenderProvider implementation.
+ bool CanInsertDtmf() override;
+ bool InsertDtmf(int code, int duration) override;
+
+ // ObserverInterface implementation.
+ void OnChanged() override;
+
+ cricket::MediaType media_type() const override {
+ return cricket::MEDIA_TYPE_AUDIO;
+ }
+ std::string track_kind() const override {
+ return MediaStreamTrackInterface::kAudioKind;
+ }
+
+ rtc::scoped_refptr<DtmfSenderInterface> GetDtmfSender() const override;
+ RTCError GenerateKeyFrame(const std::vector<std::string>& rids) override;
+
+ protected:
+ AudioRtpSender(rtc::Thread* worker_thread,
+ const std::string& id,
+ LegacyStatsCollectorInterface* legacy_stats,
+ SetStreamsObserver* set_streams_observer);
+
+ void SetSend() override;
+ void ClearSend() override;
+
+ // Hooks to allow custom logic when tracks are attached and detached.
+ void AttachTrack() override;
+ void DetachTrack() override;
+ void AddTrackToStats() override;
+ void RemoveTrackFromStats() override;
+
+ private:
+ cricket::VoiceMediaSendChannelInterface* voice_media_channel() {
+ return media_channel_->AsVoiceSendChannel();
+ }
+ rtc::scoped_refptr<AudioTrackInterface> audio_track() const {
+ return rtc::scoped_refptr<AudioTrackInterface>(
+ static_cast<AudioTrackInterface*>(track_.get()));
+ }
+
+ LegacyStatsCollectorInterface* legacy_stats_ = nullptr;
+ rtc::scoped_refptr<DtmfSender> dtmf_sender_;
+ rtc::scoped_refptr<DtmfSenderInterface> dtmf_sender_proxy_;
+ bool cached_track_enabled_ = false;
+
+ // Used to pass the data callback from the `track_` to the other end of
+ // cricket::AudioSource.
+ std::unique_ptr<LocalAudioSinkAdapter> sink_adapter_;
+};
+
+class VideoRtpSender : public RtpSenderBase {
+ public:
+ // Construct an RtpSender for video with the given sender ID.
+ // The sender is initialized with no track to send and no associated streams.
+ // If `set_streams_observer` is not null, it is invoked when SetStreams()
+ // is called. `set_streams_observer` is not owned by this object. If not
+ // null, it must be valid at least until this sender becomes stopped.
+ static rtc::scoped_refptr<VideoRtpSender> Create(
+ rtc::Thread* worker_thread,
+ const std::string& id,
+ SetStreamsObserver* set_streams_observer);
+ virtual ~VideoRtpSender();
+
+ // ObserverInterface implementation
+ void OnChanged() override;
+
+ cricket::MediaType media_type() const override {
+ return cricket::MEDIA_TYPE_VIDEO;
+ }
+ std::string track_kind() const override {
+ return MediaStreamTrackInterface::kVideoKind;
+ }
+
+ rtc::scoped_refptr<DtmfSenderInterface> GetDtmfSender() const override;
+ RTCError GenerateKeyFrame(const std::vector<std::string>& rids) override;
+
+ protected:
+ VideoRtpSender(rtc::Thread* worker_thread,
+ const std::string& id,
+ SetStreamsObserver* set_streams_observer);
+
+ void SetSend() override;
+ void ClearSend() override;
+
+ // Hook to allow custom logic when tracks are attached.
+ void AttachTrack() override;
+
+ private:
+ cricket::VideoMediaSendChannelInterface* video_media_channel() {
+ return media_channel_->AsVideoSendChannel();
+ }
+ rtc::scoped_refptr<VideoTrackInterface> video_track() const {
+ return rtc::scoped_refptr<VideoTrackInterface>(
+ static_cast<VideoTrackInterface*>(track_.get()));
+ }
+
+ VideoTrackInterface::ContentHint cached_track_content_hint_ =
+ VideoTrackInterface::ContentHint::kNone;
+};
+
+} // namespace webrtc
+
+#endif // PC_RTP_SENDER_H_
diff --git a/third_party/libwebrtc/pc/rtp_sender_proxy.h b/third_party/libwebrtc/pc/rtp_sender_proxy.h
new file mode 100644
index 0000000000..39862eb133
--- /dev/null
+++ b/third_party/libwebrtc/pc/rtp_sender_proxy.h
@@ -0,0 +1,59 @@
+/*
+ * Copyright 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_RTP_SENDER_PROXY_H_
+#define PC_RTP_SENDER_PROXY_H_
+
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "api/rtp_sender_interface.h"
+#include "pc/proxy.h"
+
+namespace webrtc {
+
+// Define proxy for RtpSenderInterface.
+// TODO(deadbeef): Move this to .cc file. What threads methods are called on is
+// an implementation detail.
+BEGIN_PRIMARY_PROXY_MAP(RtpSender)
+PROXY_PRIMARY_THREAD_DESTRUCTOR()
+PROXY_METHOD1(bool, SetTrack, MediaStreamTrackInterface*)
+PROXY_CONSTMETHOD0(rtc::scoped_refptr<MediaStreamTrackInterface>, track)
+PROXY_CONSTMETHOD0(rtc::scoped_refptr<DtlsTransportInterface>, dtls_transport)
+PROXY_CONSTMETHOD0(uint32_t, ssrc)
+BYPASS_PROXY_CONSTMETHOD0(cricket::MediaType, media_type)
+BYPASS_PROXY_CONSTMETHOD0(std::string, id)
+PROXY_CONSTMETHOD0(std::vector<std::string>, stream_ids)
+PROXY_CONSTMETHOD0(std::vector<RtpEncodingParameters>, init_send_encodings)
+PROXY_CONSTMETHOD0(RtpParameters, GetParameters)
+PROXY_METHOD1(RTCError, SetParameters, const RtpParameters&)
+PROXY_METHOD2(void,
+ SetParametersAsync,
+ const RtpParameters&,
+ SetParametersCallback)
+PROXY_CONSTMETHOD0(rtc::scoped_refptr<DtmfSenderInterface>, GetDtmfSender)
+PROXY_METHOD1(void,
+ SetFrameEncryptor,
+ rtc::scoped_refptr<FrameEncryptorInterface>)
+PROXY_CONSTMETHOD0(rtc::scoped_refptr<FrameEncryptorInterface>,
+ GetFrameEncryptor)
+PROXY_METHOD1(void, SetStreams, const std::vector<std::string>&)
+PROXY_METHOD1(void,
+ SetEncoderToPacketizerFrameTransformer,
+ rtc::scoped_refptr<FrameTransformerInterface>)
+PROXY_METHOD1(void,
+ SetEncoderSelector,
+ std::unique_ptr<VideoEncoderFactory::EncoderSelectorInterface>)
+END_PROXY_MAP(RtpSender)
+
+} // namespace webrtc
+
+#endif // PC_RTP_SENDER_PROXY_H_
diff --git a/third_party/libwebrtc/pc/rtp_sender_receiver_unittest.cc b/third_party/libwebrtc/pc/rtp_sender_receiver_unittest.cc
new file mode 100644
index 0000000000..3092e53c2d
--- /dev/null
+++ b/third_party/libwebrtc/pc/rtp_sender_receiver_unittest.cc
@@ -0,0 +1,1954 @@
+/*
+ * Copyright 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stddef.h>
+
+#include <cstdint>
+#include <iterator>
+#include <memory>
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "absl/algorithm/container.h"
+#include "absl/memory/memory.h"
+#include "absl/types/optional.h"
+#include "api/audio_options.h"
+#include "api/crypto/crypto_options.h"
+#include "api/crypto/frame_decryptor_interface.h"
+#include "api/crypto/frame_encryptor_interface.h"
+#include "api/dtmf_sender_interface.h"
+#include "api/media_stream_interface.h"
+#include "api/rtc_error.h"
+#include "api/rtc_event_log/rtc_event_log.h"
+#include "api/rtp_parameters.h"
+#include "api/rtp_receiver_interface.h"
+#include "api/scoped_refptr.h"
+#include "api/test/fake_frame_decryptor.h"
+#include "api/test/fake_frame_encryptor.h"
+#include "api/video/builtin_video_bitrate_allocator_factory.h"
+#include "api/video/video_bitrate_allocator_factory.h"
+#include "api/video/video_codec_constants.h"
+#include "media/base/codec.h"
+#include "media/base/fake_media_engine.h"
+#include "media/base/media_channel.h"
+#include "media/base/media_config.h"
+#include "media/base/media_engine.h"
+#include "media/base/rid_description.h"
+#include "media/base/stream_params.h"
+#include "media/base/test_utils.h"
+#include "media/engine/fake_webrtc_call.h"
+#include "p2p/base/dtls_transport_internal.h"
+#include "p2p/base/fake_dtls_transport.h"
+#include "p2p/base/p2p_constants.h"
+#include "pc/audio_rtp_receiver.h"
+#include "pc/audio_track.h"
+#include "pc/channel.h"
+#include "pc/dtls_srtp_transport.h"
+#include "pc/local_audio_source.h"
+#include "pc/media_stream.h"
+#include "pc/rtp_sender.h"
+#include "pc/rtp_transport_internal.h"
+#include "pc/test/fake_video_track_source.h"
+#include "pc/video_rtp_receiver.h"
+#include "pc/video_track.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/gunit.h"
+#include "rtc_base/thread.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+#include "test/run_loop.h"
+#include "test/scoped_key_value_config.h"
+
+using ::testing::_;
+using ::testing::ContainerEq;
+using ::testing::Exactly;
+using ::testing::InvokeWithoutArgs;
+using ::testing::Return;
+using RidList = std::vector<std::string>;
+
+namespace {
+
+static const char kStreamId1[] = "local_stream_1";
+static const char kVideoTrackId[] = "video_1";
+static const char kAudioTrackId[] = "audio_1";
+static const uint32_t kVideoSsrc = 98;
+static const uint32_t kVideoSsrc2 = 100;
+static const uint32_t kAudioSsrc = 99;
+static const uint32_t kAudioSsrc2 = 101;
+static const uint32_t kVideoSsrcSimulcast = 102;
+static const uint32_t kVideoSimulcastLayerCount = 2;
+static const int kDefaultTimeout = 10000; // 10 seconds.
+
+class MockSetStreamsObserver
+ : public webrtc::RtpSenderBase::SetStreamsObserver {
+ public:
+ MOCK_METHOD(void, OnSetStreams, (), (override));
+};
+
+} // namespace
+
+namespace webrtc {
+
+class RtpSenderReceiverTest
+ : public ::testing::Test,
+ public ::testing::WithParamInterface<std::pair<RidList, RidList>> {
+ public:
+ RtpSenderReceiverTest()
+ : network_thread_(rtc::Thread::Current()),
+ worker_thread_(rtc::Thread::Current()),
+ video_bitrate_allocator_factory_(
+ webrtc::CreateBuiltinVideoBitrateAllocatorFactory()),
+ // Create fake media engine/etc. so we can create channels to use to
+ // test RtpSenders/RtpReceivers.
+ media_engine_(std::make_unique<cricket::FakeMediaEngine>()),
+ fake_call_(worker_thread_, network_thread_),
+ local_stream_(MediaStream::Create(kStreamId1)) {
+ rtp_dtls_transport_ = std::make_unique<cricket::FakeDtlsTransport>(
+ "fake_dtls_transport", cricket::ICE_CANDIDATE_COMPONENT_RTP);
+ rtp_transport_ = CreateDtlsSrtpTransport();
+
+ // Create the channels, discard the result; we get them later.
+ // Fake media channels are owned by the media engine.
+ voice_media_send_channel_ = media_engine_->voice().CreateSendChannel(
+ &fake_call_, cricket::MediaConfig(), cricket::AudioOptions(),
+ webrtc::CryptoOptions(), webrtc::AudioCodecPairId::Create());
+ video_media_send_channel_ = media_engine_->video().CreateSendChannel(
+ &fake_call_, cricket::MediaConfig(), cricket::VideoOptions(),
+ webrtc::CryptoOptions(), video_bitrate_allocator_factory_.get());
+ voice_media_receive_channel_ = media_engine_->voice().CreateReceiveChannel(
+ &fake_call_, cricket::MediaConfig(), cricket::AudioOptions(),
+ webrtc::CryptoOptions(), webrtc::AudioCodecPairId::Create());
+ video_media_receive_channel_ = media_engine_->video().CreateReceiveChannel(
+ &fake_call_, cricket::MediaConfig(), cricket::VideoOptions(),
+ webrtc::CryptoOptions());
+
+ // Create streams for predefined SSRCs. Streams need to exist in order
+ // for the senders and receievers to apply parameters to them.
+ // Normally these would be created by SetLocalDescription and
+ // SetRemoteDescription.
+ voice_media_send_channel_->AddSendStream(
+ cricket::StreamParams::CreateLegacy(kAudioSsrc));
+ voice_media_receive_channel_->AddRecvStream(
+ cricket::StreamParams::CreateLegacy(kAudioSsrc));
+ voice_media_send_channel_->AddSendStream(
+ cricket::StreamParams::CreateLegacy(kAudioSsrc2));
+ voice_media_receive_channel_->AddRecvStream(
+ cricket::StreamParams::CreateLegacy(kAudioSsrc2));
+ video_media_send_channel_->AddSendStream(
+ cricket::StreamParams::CreateLegacy(kVideoSsrc));
+ video_media_receive_channel_->AddRecvStream(
+ cricket::StreamParams::CreateLegacy(kVideoSsrc));
+ video_media_send_channel_->AddSendStream(
+ cricket::StreamParams::CreateLegacy(kVideoSsrc2));
+ video_media_receive_channel_->AddRecvStream(
+ cricket::StreamParams::CreateLegacy(kVideoSsrc2));
+ }
+
+ ~RtpSenderReceiverTest() {
+ audio_rtp_sender_ = nullptr;
+ video_rtp_sender_ = nullptr;
+ audio_rtp_receiver_ = nullptr;
+ video_rtp_receiver_ = nullptr;
+ local_stream_ = nullptr;
+ video_track_ = nullptr;
+ audio_track_ = nullptr;
+ }
+
+ std::unique_ptr<webrtc::RtpTransportInternal> CreateDtlsSrtpTransport() {
+ auto dtls_srtp_transport = std::make_unique<webrtc::DtlsSrtpTransport>(
+ /*rtcp_mux_required=*/true, field_trials_);
+ dtls_srtp_transport->SetDtlsTransports(rtp_dtls_transport_.get(),
+ /*rtcp_dtls_transport=*/nullptr);
+ return dtls_srtp_transport;
+ }
+
+ // Needed to use DTMF sender.
+ void AddDtmfCodec() {
+ cricket::AudioSenderParameter params;
+ const cricket::AudioCodec kTelephoneEventCodec =
+ cricket::CreateAudioCodec(106, "telephone-event", 8000, 1);
+ params.codecs.push_back(kTelephoneEventCodec);
+ voice_media_send_channel()->SetSenderParameters(params);
+ }
+
+ void AddVideoTrack() { AddVideoTrack(false); }
+
+ void AddVideoTrack(bool is_screencast) {
+ rtc::scoped_refptr<VideoTrackSourceInterface> source(
+ FakeVideoTrackSource::Create(is_screencast));
+ video_track_ =
+ VideoTrack::Create(kVideoTrackId, source, rtc::Thread::Current());
+ EXPECT_TRUE(local_stream_->AddTrack(video_track_));
+ }
+
+ void CreateAudioRtpSender() { CreateAudioRtpSender(nullptr); }
+
+ void CreateAudioRtpSender(
+ const rtc::scoped_refptr<LocalAudioSource>& source) {
+ audio_track_ = AudioTrack::Create(kAudioTrackId, source);
+ EXPECT_TRUE(local_stream_->AddTrack(audio_track_));
+ std::unique_ptr<MockSetStreamsObserver> set_streams_observer =
+ std::make_unique<MockSetStreamsObserver>();
+ audio_rtp_sender_ =
+ AudioRtpSender::Create(worker_thread_, audio_track_->id(), nullptr,
+ set_streams_observer.get());
+ ASSERT_TRUE(audio_rtp_sender_->SetTrack(audio_track_.get()));
+ EXPECT_CALL(*set_streams_observer, OnSetStreams());
+ audio_rtp_sender_->SetStreams({local_stream_->id()});
+ audio_rtp_sender_->SetMediaChannel(voice_media_send_channel_.get());
+ audio_rtp_sender_->SetSsrc(kAudioSsrc);
+ VerifyVoiceChannelInput();
+ }
+
+ void CreateAudioRtpSenderWithNoTrack() {
+ audio_rtp_sender_ =
+ AudioRtpSender::Create(worker_thread_, /*id=*/"", nullptr, nullptr);
+ audio_rtp_sender_->SetMediaChannel(voice_media_send_channel_.get());
+ }
+
+ void CreateVideoRtpSender(uint32_t ssrc) {
+ CreateVideoRtpSender(false, ssrc);
+ }
+
+ void CreateVideoRtpSender() { CreateVideoRtpSender(false); }
+
+ cricket::StreamParams CreateSimulcastStreamParams(int num_layers) {
+ std::vector<uint32_t> ssrcs;
+ ssrcs.reserve(num_layers);
+ for (int i = 0; i < num_layers; ++i) {
+ ssrcs.push_back(kVideoSsrcSimulcast + i);
+ }
+ return cricket::CreateSimStreamParams("cname", ssrcs);
+ }
+
+ uint32_t CreateVideoRtpSender(const cricket::StreamParams& stream_params) {
+ video_media_send_channel_->AddSendStream(stream_params);
+ uint32_t primary_ssrc = stream_params.first_ssrc();
+ CreateVideoRtpSender(primary_ssrc);
+ return primary_ssrc;
+ }
+
+ uint32_t CreateVideoRtpSenderWithSimulcast(
+ int num_layers = kVideoSimulcastLayerCount) {
+ return CreateVideoRtpSender(CreateSimulcastStreamParams(num_layers));
+ }
+
+ uint32_t CreateVideoRtpSenderWithSimulcast(
+ const std::vector<std::string>& rids) {
+ cricket::StreamParams stream_params =
+ CreateSimulcastStreamParams(rids.size());
+ std::vector<cricket::RidDescription> rid_descriptions;
+ absl::c_transform(
+ rids, std::back_inserter(rid_descriptions), [](const std::string& rid) {
+ return cricket::RidDescription(rid, cricket::RidDirection::kSend);
+ });
+ stream_params.set_rids(rid_descriptions);
+ return CreateVideoRtpSender(stream_params);
+ }
+
+ void CreateVideoRtpSender(bool is_screencast, uint32_t ssrc = kVideoSsrc) {
+ AddVideoTrack(is_screencast);
+ std::unique_ptr<MockSetStreamsObserver> set_streams_observer =
+ std::make_unique<MockSetStreamsObserver>();
+ video_rtp_sender_ = VideoRtpSender::Create(
+ worker_thread_, video_track_->id(), set_streams_observer.get());
+ ASSERT_TRUE(video_rtp_sender_->SetTrack(video_track_.get()));
+ EXPECT_CALL(*set_streams_observer, OnSetStreams());
+ video_rtp_sender_->SetStreams({local_stream_->id()});
+ video_rtp_sender_->SetMediaChannel(video_media_send_channel());
+ video_rtp_sender_->SetSsrc(ssrc);
+ VerifyVideoChannelInput(ssrc);
+ }
+ void CreateVideoRtpSenderWithNoTrack() {
+ video_rtp_sender_ =
+ VideoRtpSender::Create(worker_thread_, /*id=*/"", nullptr);
+ video_rtp_sender_->SetMediaChannel(video_media_send_channel());
+ }
+
+ void DestroyAudioRtpSender() {
+ audio_rtp_sender_ = nullptr;
+ VerifyVoiceChannelNoInput();
+ }
+
+ void DestroyVideoRtpSender() {
+ video_rtp_sender_ = nullptr;
+ VerifyVideoChannelNoInput();
+ }
+
+ void CreateAudioRtpReceiver(
+ std::vector<rtc::scoped_refptr<MediaStreamInterface>> streams = {}) {
+ audio_rtp_receiver_ = rtc::make_ref_counted<AudioRtpReceiver>(
+ rtc::Thread::Current(), kAudioTrackId, streams,
+ /*is_unified_plan=*/true);
+ audio_rtp_receiver_->SetMediaChannel(voice_media_receive_channel());
+ audio_rtp_receiver_->SetupMediaChannel(kAudioSsrc);
+ audio_track_ = audio_rtp_receiver_->audio_track();
+ VerifyVoiceChannelOutput();
+ }
+
+ void CreateVideoRtpReceiver(
+ std::vector<rtc::scoped_refptr<MediaStreamInterface>> streams = {}) {
+ video_rtp_receiver_ = rtc::make_ref_counted<VideoRtpReceiver>(
+ rtc::Thread::Current(), kVideoTrackId, streams);
+ video_rtp_receiver_->SetMediaChannel(video_media_receive_channel());
+ video_rtp_receiver_->SetupMediaChannel(kVideoSsrc);
+ video_track_ = video_rtp_receiver_->video_track();
+ VerifyVideoChannelOutput();
+ }
+
+ void CreateVideoRtpReceiverWithSimulcast(
+ std::vector<rtc::scoped_refptr<MediaStreamInterface>> streams = {},
+ int num_layers = kVideoSimulcastLayerCount) {
+ std::vector<uint32_t> ssrcs;
+ ssrcs.reserve(num_layers);
+ for (int i = 0; i < num_layers; ++i)
+ ssrcs.push_back(kVideoSsrcSimulcast + i);
+ cricket::StreamParams stream_params =
+ cricket::CreateSimStreamParams("cname", ssrcs);
+ video_media_receive_channel_->AddRecvStream(stream_params);
+ uint32_t primary_ssrc = stream_params.first_ssrc();
+
+ video_rtp_receiver_ = rtc::make_ref_counted<VideoRtpReceiver>(
+ rtc::Thread::Current(), kVideoTrackId, streams);
+ video_rtp_receiver_->SetMediaChannel(video_media_receive_channel());
+ video_rtp_receiver_->SetupMediaChannel(primary_ssrc);
+ video_track_ = video_rtp_receiver_->video_track();
+ }
+
+ void DestroyAudioRtpReceiver() {
+ if (!audio_rtp_receiver_)
+ return;
+ audio_rtp_receiver_->SetMediaChannel(nullptr);
+ audio_rtp_receiver_ = nullptr;
+ VerifyVoiceChannelNoOutput();
+ }
+
+ void DestroyVideoRtpReceiver() {
+ if (!video_rtp_receiver_)
+ return;
+ video_rtp_receiver_->Stop();
+ video_rtp_receiver_->SetMediaChannel(nullptr);
+ video_rtp_receiver_ = nullptr;
+ VerifyVideoChannelNoOutput();
+ }
+
+ void VerifyVoiceChannelInput() { VerifyVoiceChannelInput(kAudioSsrc); }
+
+ void VerifyVoiceChannelInput(uint32_t ssrc) {
+ // Verify that the media channel has an audio source, and the stream isn't
+ // muted.
+ EXPECT_TRUE(voice_media_send_channel()->HasSource(ssrc));
+ EXPECT_FALSE(voice_media_send_channel()->IsStreamMuted(ssrc));
+ }
+
+ void VerifyVideoChannelInput() { VerifyVideoChannelInput(kVideoSsrc); }
+
+ void VerifyVideoChannelInput(uint32_t ssrc) {
+ // Verify that the media channel has a video source,
+ EXPECT_TRUE(video_media_send_channel()->HasSource(ssrc));
+ }
+
+ void VerifyVoiceChannelNoInput() { VerifyVoiceChannelNoInput(kAudioSsrc); }
+
+ void VerifyVoiceChannelNoInput(uint32_t ssrc) {
+ // Verify that the media channel's source is reset.
+ EXPECT_FALSE(voice_media_receive_channel()->HasSource(ssrc));
+ }
+
+ void VerifyVideoChannelNoInput() { VerifyVideoChannelNoInput(kVideoSsrc); }
+
+ void VerifyVideoChannelNoInput(uint32_t ssrc) {
+ // Verify that the media channel's source is reset.
+ EXPECT_FALSE(video_media_receive_channel()->HasSource(ssrc));
+ }
+
+ void VerifyVoiceChannelOutput() {
+ // Verify that the volume is initialized to 1.
+ double volume;
+ EXPECT_TRUE(
+ voice_media_receive_channel()->GetOutputVolume(kAudioSsrc, &volume));
+ EXPECT_EQ(1, volume);
+ }
+
+ void VerifyVideoChannelOutput() {
+ // Verify that the media channel has a sink.
+ EXPECT_TRUE(video_media_receive_channel()->HasSink(kVideoSsrc));
+ }
+
+ void VerifyVoiceChannelNoOutput() {
+ // Verify that the volume is reset to 0.
+ double volume;
+ EXPECT_TRUE(
+ voice_media_receive_channel()->GetOutputVolume(kAudioSsrc, &volume));
+ EXPECT_EQ(0, volume);
+ }
+
+ void VerifyVideoChannelNoOutput() {
+ // Verify that the media channel's sink is reset.
+ EXPECT_FALSE(video_media_receive_channel()->HasSink(kVideoSsrc));
+ }
+
+ // Verifies that the encoding layers contain the specified RIDs.
+ bool VerifyEncodingLayers(const VideoRtpSender& sender,
+ const std::vector<std::string>& rids) {
+ bool has_failure = HasFailure();
+ RtpParameters parameters = sender.GetParameters();
+ std::vector<std::string> encoding_rids;
+ absl::c_transform(
+ parameters.encodings, std::back_inserter(encoding_rids),
+ [](const RtpEncodingParameters& encoding) { return encoding.rid; });
+ EXPECT_THAT(rids, ContainerEq(encoding_rids));
+ return has_failure || !HasFailure();
+ }
+
+ // Runs a test for disabling the encoding layers on the specified sender.
+ void RunDisableEncodingLayersTest(
+ const std::vector<std::string>& all_layers,
+ const std::vector<std::string>& disabled_layers,
+ VideoRtpSender* sender) {
+ std::vector<std::string> expected;
+ absl::c_copy_if(all_layers, std::back_inserter(expected),
+ [&disabled_layers](const std::string& rid) {
+ return !absl::c_linear_search(disabled_layers, rid);
+ });
+
+ EXPECT_TRUE(VerifyEncodingLayers(*sender, all_layers));
+ sender->DisableEncodingLayers(disabled_layers);
+ EXPECT_TRUE(VerifyEncodingLayers(*sender, expected));
+ }
+
+ // Runs a test for setting an encoding layer as inactive.
+ // This test assumes that some layers have already been disabled.
+ void RunSetLastLayerAsInactiveTest(VideoRtpSender* sender) {
+ auto parameters = sender->GetParameters();
+ if (parameters.encodings.size() == 0) {
+ return;
+ }
+
+ RtpEncodingParameters& encoding = parameters.encodings.back();
+ auto rid = encoding.rid;
+ EXPECT_TRUE(encoding.active);
+ encoding.active = false;
+ auto error = sender->SetParameters(parameters);
+ ASSERT_TRUE(error.ok());
+ parameters = sender->GetParameters();
+ RtpEncodingParameters& result_encoding = parameters.encodings.back();
+ EXPECT_EQ(rid, result_encoding.rid);
+ EXPECT_FALSE(result_encoding.active);
+ }
+
+ // Runs a test for disabling the encoding layers on a sender without a media
+ // channel.
+ void RunDisableSimulcastLayersWithoutMediaEngineTest(
+ const std::vector<std::string>& all_layers,
+ const std::vector<std::string>& disabled_layers) {
+ auto sender = VideoRtpSender::Create(rtc::Thread::Current(), "1", nullptr);
+ RtpParameters parameters;
+ parameters.encodings.resize(all_layers.size());
+ for (size_t i = 0; i < all_layers.size(); ++i) {
+ parameters.encodings[i].rid = all_layers[i];
+ }
+ sender->set_init_send_encodings(parameters.encodings);
+ RunDisableEncodingLayersTest(all_layers, disabled_layers, sender.get());
+ RunSetLastLayerAsInactiveTest(sender.get());
+ }
+
+ // Runs a test for disabling the encoding layers on a sender with a media
+ // channel.
+ void RunDisableSimulcastLayersWithMediaEngineTest(
+ const std::vector<std::string>& all_layers,
+ const std::vector<std::string>& disabled_layers) {
+ uint32_t ssrc = CreateVideoRtpSenderWithSimulcast(all_layers);
+ RunDisableEncodingLayersTest(all_layers, disabled_layers,
+ video_rtp_sender_.get());
+
+ auto channel_parameters =
+ video_media_send_channel_->GetRtpSendParameters(ssrc);
+ ASSERT_EQ(channel_parameters.encodings.size(), all_layers.size());
+ for (size_t i = 0; i < all_layers.size(); ++i) {
+ EXPECT_EQ(all_layers[i], channel_parameters.encodings[i].rid);
+ bool is_active = !absl::c_linear_search(disabled_layers, all_layers[i]);
+ EXPECT_EQ(is_active, channel_parameters.encodings[i].active);
+ }
+
+ RunSetLastLayerAsInactiveTest(video_rtp_sender_.get());
+ }
+
+ // Check that minimum Jitter Buffer delay is propagated to the underlying
+ // `media_channel`.
+ void VerifyRtpReceiverDelayBehaviour(
+ cricket::MediaReceiveChannelInterface* media_channel,
+ RtpReceiverInterface* receiver,
+ uint32_t ssrc) {
+ receiver->SetJitterBufferMinimumDelay(/*delay_seconds=*/0.5);
+ absl::optional<int> delay_ms =
+ media_channel->GetBaseMinimumPlayoutDelayMs(ssrc); // In milliseconds.
+ EXPECT_DOUBLE_EQ(0.5, delay_ms.value_or(0) / 1000.0);
+ }
+
+ protected:
+ cricket::FakeVideoMediaSendChannel* video_media_send_channel() {
+ return static_cast<cricket::FakeVideoMediaSendChannel*>(
+ video_media_send_channel_.get());
+ }
+ cricket::FakeVoiceMediaSendChannel* voice_media_send_channel() {
+ return static_cast<cricket::FakeVoiceMediaSendChannel*>(
+ voice_media_send_channel_.get());
+ }
+ cricket::FakeVideoMediaReceiveChannel* video_media_receive_channel() {
+ return static_cast<cricket::FakeVideoMediaReceiveChannel*>(
+ video_media_receive_channel_.get());
+ }
+ cricket::FakeVoiceMediaReceiveChannel* voice_media_receive_channel() {
+ return static_cast<cricket::FakeVoiceMediaReceiveChannel*>(
+ voice_media_receive_channel_.get());
+ }
+
+ test::RunLoop run_loop_;
+ rtc::Thread* const network_thread_;
+ rtc::Thread* const worker_thread_;
+ webrtc::RtcEventLogNull event_log_;
+ // The `rtp_dtls_transport_` and `rtp_transport_` should be destroyed after
+ // the `channel_manager`.
+ std::unique_ptr<cricket::DtlsTransportInternal> rtp_dtls_transport_;
+ std::unique_ptr<webrtc::RtpTransportInternal> rtp_transport_;
+ std::unique_ptr<webrtc::VideoBitrateAllocatorFactory>
+ video_bitrate_allocator_factory_;
+ std::unique_ptr<cricket::FakeMediaEngine> media_engine_;
+ rtc::UniqueRandomIdGenerator ssrc_generator_;
+ cricket::FakeCall fake_call_;
+ std::unique_ptr<cricket::VoiceMediaSendChannelInterface>
+ voice_media_send_channel_;
+ std::unique_ptr<cricket::VideoMediaSendChannelInterface>
+ video_media_send_channel_;
+ std::unique_ptr<cricket::VoiceMediaReceiveChannelInterface>
+ voice_media_receive_channel_;
+ std::unique_ptr<cricket::VideoMediaReceiveChannelInterface>
+ video_media_receive_channel_;
+ rtc::scoped_refptr<AudioRtpSender> audio_rtp_sender_;
+ rtc::scoped_refptr<VideoRtpSender> video_rtp_sender_;
+ rtc::scoped_refptr<AudioRtpReceiver> audio_rtp_receiver_;
+ rtc::scoped_refptr<VideoRtpReceiver> video_rtp_receiver_;
+ rtc::scoped_refptr<MediaStreamInterface> local_stream_;
+ rtc::scoped_refptr<VideoTrackInterface> video_track_;
+ rtc::scoped_refptr<AudioTrackInterface> audio_track_;
+ webrtc::test::ScopedKeyValueConfig field_trials_;
+};
+
+// Test that `voice_channel_` is updated when an audio track is associated
+// and disassociated with an AudioRtpSender.
+TEST_F(RtpSenderReceiverTest, AddAndDestroyAudioRtpSender) {
+ CreateAudioRtpSender();
+ DestroyAudioRtpSender();
+}
+
+// Test that `video_channel_` is updated when a video track is associated and
+// disassociated with a VideoRtpSender.
+TEST_F(RtpSenderReceiverTest, AddAndDestroyVideoRtpSender) {
+ CreateVideoRtpSender();
+ DestroyVideoRtpSender();
+}
+
+// Test that `voice_channel_` is updated when a remote audio track is
+// associated and disassociated with an AudioRtpReceiver.
+TEST_F(RtpSenderReceiverTest, AddAndDestroyAudioRtpReceiver) {
+ CreateAudioRtpReceiver();
+ DestroyAudioRtpReceiver();
+}
+
+// Test that `video_channel_` is updated when a remote video track is
+// associated and disassociated with a VideoRtpReceiver.
+TEST_F(RtpSenderReceiverTest, AddAndDestroyVideoRtpReceiver) {
+ CreateVideoRtpReceiver();
+ DestroyVideoRtpReceiver();
+}
+
+TEST_F(RtpSenderReceiverTest, AddAndDestroyAudioRtpReceiverWithStreams) {
+ CreateAudioRtpReceiver({local_stream_});
+ DestroyAudioRtpReceiver();
+}
+
+TEST_F(RtpSenderReceiverTest, AddAndDestroyVideoRtpReceiverWithStreams) {
+ CreateVideoRtpReceiver({local_stream_});
+ DestroyVideoRtpReceiver();
+}
+
+// Test that the AudioRtpSender applies options from the local audio source.
+TEST_F(RtpSenderReceiverTest, LocalAudioSourceOptionsApplied) {
+ cricket::AudioOptions options;
+ options.echo_cancellation = true;
+ auto source = LocalAudioSource::Create(&options);
+ CreateAudioRtpSender(source);
+
+ EXPECT_EQ(true, voice_media_send_channel()->options().echo_cancellation);
+
+ DestroyAudioRtpSender();
+}
+
+// Test that the stream is muted when the track is disabled, and unmuted when
+// the track is enabled.
+TEST_F(RtpSenderReceiverTest, LocalAudioTrackDisable) {
+ CreateAudioRtpSender();
+
+ audio_track_->set_enabled(false);
+ EXPECT_TRUE(voice_media_send_channel()->IsStreamMuted(kAudioSsrc));
+
+ audio_track_->set_enabled(true);
+ EXPECT_FALSE(voice_media_send_channel()->IsStreamMuted(kAudioSsrc));
+
+ DestroyAudioRtpSender();
+}
+
+// Test that the volume is set to 0 when the track is disabled, and back to
+// 1 when the track is enabled.
+TEST_F(RtpSenderReceiverTest, RemoteAudioTrackDisable) {
+ CreateAudioRtpReceiver();
+
+ double volume;
+ EXPECT_TRUE(
+ voice_media_receive_channel()->GetOutputVolume(kAudioSsrc, &volume));
+ EXPECT_EQ(1, volume);
+
+ // Handling of enable/disable is applied asynchronously.
+ audio_track_->set_enabled(false);
+ run_loop_.Flush();
+
+ EXPECT_TRUE(
+ voice_media_receive_channel()->GetOutputVolume(kAudioSsrc, &volume));
+ EXPECT_EQ(0, volume);
+
+ audio_track_->set_enabled(true);
+ run_loop_.Flush();
+ EXPECT_TRUE(
+ voice_media_receive_channel()->GetOutputVolume(kAudioSsrc, &volume));
+ EXPECT_EQ(1, volume);
+
+ DestroyAudioRtpReceiver();
+}
+
+// Currently no action is taken when a remote video track is disabled or
+// enabled, so there's nothing to test here, other than what is normally
+// verified in DestroyVideoRtpSender.
+TEST_F(RtpSenderReceiverTest, LocalVideoTrackDisable) {
+ CreateVideoRtpSender();
+
+ video_track_->set_enabled(false);
+ video_track_->set_enabled(true);
+
+ DestroyVideoRtpSender();
+}
+
+// Test that the state of the video track created by the VideoRtpReceiver is
+// updated when the receiver is destroyed.
+TEST_F(RtpSenderReceiverTest, RemoteVideoTrackState) {
+ CreateVideoRtpReceiver();
+
+ EXPECT_EQ(webrtc::MediaStreamTrackInterface::kLive, video_track_->state());
+ EXPECT_EQ(webrtc::MediaSourceInterface::kLive,
+ video_track_->GetSource()->state());
+
+ DestroyVideoRtpReceiver();
+
+ EXPECT_EQ(webrtc::MediaStreamTrackInterface::kEnded, video_track_->state());
+ EXPECT_EQ(webrtc::MediaSourceInterface::kEnded,
+ video_track_->GetSource()->state());
+ DestroyVideoRtpReceiver();
+}
+
+// Currently no action is taken when a remote video track is disabled or
+// enabled, so there's nothing to test here, other than what is normally
+// verified in DestroyVideoRtpReceiver.
+TEST_F(RtpSenderReceiverTest, RemoteVideoTrackDisable) {
+ CreateVideoRtpReceiver();
+
+ video_track_->set_enabled(false);
+ video_track_->set_enabled(true);
+
+ DestroyVideoRtpReceiver();
+}
+
+// Test that the AudioRtpReceiver applies volume changes from the track source
+// to the media channel.
+TEST_F(RtpSenderReceiverTest, RemoteAudioTrackSetVolume) {
+ CreateAudioRtpReceiver();
+
+ double volume;
+ audio_track_->GetSource()->SetVolume(0.5);
+ run_loop_.Flush();
+ EXPECT_TRUE(
+ voice_media_receive_channel()->GetOutputVolume(kAudioSsrc, &volume));
+ EXPECT_EQ(0.5, volume);
+
+ // Disable the audio track, this should prevent setting the volume.
+ audio_track_->set_enabled(false);
+ RTC_DCHECK_EQ(worker_thread_, run_loop_.task_queue());
+ run_loop_.Flush();
+ audio_track_->GetSource()->SetVolume(0.8);
+ EXPECT_TRUE(
+ voice_media_receive_channel()->GetOutputVolume(kAudioSsrc, &volume));
+ EXPECT_EQ(0, volume);
+
+ // When the track is enabled, the previously set volume should take effect.
+ audio_track_->set_enabled(true);
+ run_loop_.Flush();
+ EXPECT_TRUE(
+ voice_media_receive_channel()->GetOutputVolume(kAudioSsrc, &volume));
+ EXPECT_EQ(0.8, volume);
+
+ // Try changing volume one more time.
+ audio_track_->GetSource()->SetVolume(0.9);
+ run_loop_.Flush();
+ EXPECT_TRUE(
+ voice_media_receive_channel()->GetOutputVolume(kAudioSsrc, &volume));
+ EXPECT_EQ(0.9, volume);
+
+ DestroyAudioRtpReceiver();
+}
+
+TEST_F(RtpSenderReceiverTest, AudioRtpReceiverDelay) {
+ CreateAudioRtpReceiver();
+ VerifyRtpReceiverDelayBehaviour(
+ voice_media_receive_channel()->AsVoiceReceiveChannel(),
+ audio_rtp_receiver_.get(), kAudioSsrc);
+ DestroyAudioRtpReceiver();
+}
+
+TEST_F(RtpSenderReceiverTest, VideoRtpReceiverDelay) {
+ CreateVideoRtpReceiver();
+ VerifyRtpReceiverDelayBehaviour(
+ video_media_receive_channel()->AsVideoReceiveChannel(),
+ video_rtp_receiver_.get(), kVideoSsrc);
+ DestroyVideoRtpReceiver();
+}
+
+// Test that the media channel isn't enabled for sending if the audio sender
+// doesn't have both a track and SSRC.
+TEST_F(RtpSenderReceiverTest, AudioSenderWithoutTrackAndSsrc) {
+ CreateAudioRtpSenderWithNoTrack();
+ rtc::scoped_refptr<AudioTrackInterface> track =
+ AudioTrack::Create(kAudioTrackId, nullptr);
+
+ // Track but no SSRC.
+ EXPECT_TRUE(audio_rtp_sender_->SetTrack(track.get()));
+ VerifyVoiceChannelNoInput();
+
+ // SSRC but no track.
+ EXPECT_TRUE(audio_rtp_sender_->SetTrack(nullptr));
+ audio_rtp_sender_->SetSsrc(kAudioSsrc);
+ VerifyVoiceChannelNoInput();
+}
+
+// Test that the media channel isn't enabled for sending if the video sender
+// doesn't have both a track and SSRC.
+TEST_F(RtpSenderReceiverTest, VideoSenderWithoutTrackAndSsrc) {
+ CreateVideoRtpSenderWithNoTrack();
+
+ // Track but no SSRC.
+ EXPECT_TRUE(video_rtp_sender_->SetTrack(video_track_.get()));
+ VerifyVideoChannelNoInput();
+
+ // SSRC but no track.
+ EXPECT_TRUE(video_rtp_sender_->SetTrack(nullptr));
+ video_rtp_sender_->SetSsrc(kVideoSsrc);
+ VerifyVideoChannelNoInput();
+}
+
+// Test that the media channel is enabled for sending when the audio sender
+// has a track and SSRC, when the SSRC is set first.
+TEST_F(RtpSenderReceiverTest, AudioSenderEarlyWarmupSsrcThenTrack) {
+ CreateAudioRtpSenderWithNoTrack();
+ rtc::scoped_refptr<AudioTrackInterface> track =
+ AudioTrack::Create(kAudioTrackId, nullptr);
+ audio_rtp_sender_->SetSsrc(kAudioSsrc);
+ audio_rtp_sender_->SetTrack(track.get());
+ VerifyVoiceChannelInput();
+
+ DestroyAudioRtpSender();
+}
+
+// Test that the media channel is enabled for sending when the audio sender
+// has a track and SSRC, when the SSRC is set last.
+TEST_F(RtpSenderReceiverTest, AudioSenderEarlyWarmupTrackThenSsrc) {
+ CreateAudioRtpSenderWithNoTrack();
+ rtc::scoped_refptr<AudioTrackInterface> track =
+ AudioTrack::Create(kAudioTrackId, nullptr);
+ audio_rtp_sender_->SetTrack(track.get());
+ audio_rtp_sender_->SetSsrc(kAudioSsrc);
+ VerifyVoiceChannelInput();
+
+ DestroyAudioRtpSender();
+}
+
+// Test that the media channel is enabled for sending when the video sender
+// has a track and SSRC, when the SSRC is set first.
+TEST_F(RtpSenderReceiverTest, VideoSenderEarlyWarmupSsrcThenTrack) {
+ AddVideoTrack();
+ CreateVideoRtpSenderWithNoTrack();
+ video_rtp_sender_->SetSsrc(kVideoSsrc);
+ video_rtp_sender_->SetTrack(video_track_.get());
+ VerifyVideoChannelInput();
+
+ DestroyVideoRtpSender();
+}
+
+// Test that the media channel is enabled for sending when the video sender
+// has a track and SSRC, when the SSRC is set last.
+TEST_F(RtpSenderReceiverTest, VideoSenderEarlyWarmupTrackThenSsrc) {
+ AddVideoTrack();
+ CreateVideoRtpSenderWithNoTrack();
+ video_rtp_sender_->SetTrack(video_track_.get());
+ video_rtp_sender_->SetSsrc(kVideoSsrc);
+ VerifyVideoChannelInput();
+
+ DestroyVideoRtpSender();
+}
+
+// Test that the media channel stops sending when the audio sender's SSRC is set
+// to 0.
+TEST_F(RtpSenderReceiverTest, AudioSenderSsrcSetToZero) {
+ CreateAudioRtpSender();
+
+ audio_rtp_sender_->SetSsrc(0);
+ VerifyVoiceChannelNoInput();
+}
+
+// Test that the media channel stops sending when the video sender's SSRC is set
+// to 0.
+TEST_F(RtpSenderReceiverTest, VideoSenderSsrcSetToZero) {
+ CreateAudioRtpSender();
+
+ audio_rtp_sender_->SetSsrc(0);
+ VerifyVideoChannelNoInput();
+}
+
+// Test that the media channel stops sending when the audio sender's track is
+// set to null.
+TEST_F(RtpSenderReceiverTest, AudioSenderTrackSetToNull) {
+ CreateAudioRtpSender();
+
+ EXPECT_TRUE(audio_rtp_sender_->SetTrack(nullptr));
+ VerifyVoiceChannelNoInput();
+}
+
+// Test that the media channel stops sending when the video sender's track is
+// set to null.
+TEST_F(RtpSenderReceiverTest, VideoSenderTrackSetToNull) {
+ CreateVideoRtpSender();
+
+ video_rtp_sender_->SetSsrc(0);
+ VerifyVideoChannelNoInput();
+}
+
+// Test that when the audio sender's SSRC is changed, the media channel stops
+// sending with the old SSRC and starts sending with the new one.
+TEST_F(RtpSenderReceiverTest, AudioSenderSsrcChanged) {
+ CreateAudioRtpSender();
+
+ audio_rtp_sender_->SetSsrc(kAudioSsrc2);
+ VerifyVoiceChannelNoInput(kAudioSsrc);
+ VerifyVoiceChannelInput(kAudioSsrc2);
+
+ audio_rtp_sender_ = nullptr;
+ VerifyVoiceChannelNoInput(kAudioSsrc2);
+}
+
+// Test that when the audio sender's SSRC is changed, the media channel stops
+// sending with the old SSRC and starts sending with the new one.
+TEST_F(RtpSenderReceiverTest, VideoSenderSsrcChanged) {
+ CreateVideoRtpSender();
+
+ video_rtp_sender_->SetSsrc(kVideoSsrc2);
+ VerifyVideoChannelNoInput(kVideoSsrc);
+ VerifyVideoChannelInput(kVideoSsrc2);
+
+ video_rtp_sender_ = nullptr;
+ VerifyVideoChannelNoInput(kVideoSsrc2);
+}
+
+TEST_F(RtpSenderReceiverTest, AudioSenderCanSetParameters) {
+ CreateAudioRtpSender();
+
+ RtpParameters params = audio_rtp_sender_->GetParameters();
+ EXPECT_EQ(1u, params.encodings.size());
+ EXPECT_TRUE(audio_rtp_sender_->SetParameters(params).ok());
+
+ DestroyAudioRtpSender();
+}
+
+TEST_F(RtpSenderReceiverTest, AudioSenderCanSetParametersAsync) {
+ CreateAudioRtpSender();
+
+ RtpParameters params = audio_rtp_sender_->GetParameters();
+ EXPECT_EQ(1u, params.encodings.size());
+ absl::optional<webrtc::RTCError> result;
+ audio_rtp_sender_->SetParametersAsync(
+ params, [&result](webrtc::RTCError error) { result = error; });
+ run_loop_.Flush();
+ EXPECT_TRUE(result->ok());
+
+ DestroyAudioRtpSender();
+}
+
+TEST_F(RtpSenderReceiverTest, AudioSenderCanSetParametersBeforeNegotiation) {
+ audio_rtp_sender_ =
+ AudioRtpSender::Create(worker_thread_, /*id=*/"", nullptr, nullptr);
+
+ RtpParameters params = audio_rtp_sender_->GetParameters();
+ ASSERT_EQ(1u, params.encodings.size());
+ params.encodings[0].max_bitrate_bps = 90000;
+ EXPECT_TRUE(audio_rtp_sender_->SetParameters(params).ok());
+
+ params = audio_rtp_sender_->GetParameters();
+ EXPECT_EQ(params.encodings[0].max_bitrate_bps, 90000);
+ EXPECT_TRUE(audio_rtp_sender_->SetParameters(params).ok());
+
+ DestroyAudioRtpSender();
+}
+
+TEST_F(RtpSenderReceiverTest,
+ AudioSenderCanSetParametersAsyncBeforeNegotiation) {
+ audio_rtp_sender_ =
+ AudioRtpSender::Create(worker_thread_, /*id=*/"", nullptr, nullptr);
+
+ absl::optional<webrtc::RTCError> result;
+ RtpParameters params = audio_rtp_sender_->GetParameters();
+ ASSERT_EQ(1u, params.encodings.size());
+ params.encodings[0].max_bitrate_bps = 90000;
+
+ audio_rtp_sender_->SetParametersAsync(
+ params, [&result](webrtc::RTCError error) { result = error; });
+ run_loop_.Flush();
+ EXPECT_TRUE(result->ok());
+
+ params = audio_rtp_sender_->GetParameters();
+ EXPECT_EQ(params.encodings[0].max_bitrate_bps, 90000);
+
+ audio_rtp_sender_->SetParametersAsync(
+ params, [&result](webrtc::RTCError error) { result = error; });
+ run_loop_.Flush();
+ EXPECT_TRUE(result->ok());
+
+ DestroyAudioRtpSender();
+}
+
+TEST_F(RtpSenderReceiverTest, AudioSenderInitParametersMovedAfterNegotiation) {
+ audio_track_ = AudioTrack::Create(kAudioTrackId, nullptr);
+ EXPECT_TRUE(local_stream_->AddTrack(audio_track_));
+
+ std::unique_ptr<MockSetStreamsObserver> set_streams_observer =
+ std::make_unique<MockSetStreamsObserver>();
+ audio_rtp_sender_ = AudioRtpSender::Create(
+ worker_thread_, audio_track_->id(), nullptr, set_streams_observer.get());
+ ASSERT_TRUE(audio_rtp_sender_->SetTrack(audio_track_.get()));
+ EXPECT_CALL(*set_streams_observer, OnSetStreams());
+ audio_rtp_sender_->SetStreams({local_stream_->id()});
+
+ std::vector<RtpEncodingParameters> init_encodings(1);
+ init_encodings[0].max_bitrate_bps = 60000;
+ audio_rtp_sender_->set_init_send_encodings(init_encodings);
+
+ RtpParameters params = audio_rtp_sender_->GetParameters();
+ ASSERT_EQ(1u, params.encodings.size());
+ EXPECT_EQ(params.encodings[0].max_bitrate_bps, 60000);
+
+ // Simulate the setLocalDescription call
+ std::vector<uint32_t> ssrcs(1, 1);
+ cricket::StreamParams stream_params =
+ cricket::CreateSimStreamParams("cname", ssrcs);
+ voice_media_send_channel()->AddSendStream(stream_params);
+ audio_rtp_sender_->SetMediaChannel(
+ voice_media_send_channel()->AsVoiceSendChannel());
+ audio_rtp_sender_->SetSsrc(1);
+
+ params = audio_rtp_sender_->GetParameters();
+ ASSERT_EQ(1u, params.encodings.size());
+ EXPECT_EQ(params.encodings[0].max_bitrate_bps, 60000);
+
+ DestroyAudioRtpSender();
+}
+
+TEST_F(RtpSenderReceiverTest,
+ AudioSenderMustCallGetParametersBeforeSetParametersBeforeNegotiation) {
+ audio_rtp_sender_ =
+ AudioRtpSender::Create(worker_thread_, /*id=*/"", nullptr, nullptr);
+
+ RtpParameters params;
+ RTCError result = audio_rtp_sender_->SetParameters(params);
+ EXPECT_EQ(RTCErrorType::INVALID_STATE, result.type());
+ DestroyAudioRtpSender();
+}
+
+TEST_F(RtpSenderReceiverTest,
+ AudioSenderMustCallGetParametersBeforeSetParameters) {
+ CreateAudioRtpSender();
+
+ RtpParameters params;
+ RTCError result = audio_rtp_sender_->SetParameters(params);
+ EXPECT_EQ(RTCErrorType::INVALID_STATE, result.type());
+
+ DestroyAudioRtpSender();
+}
+
+TEST_F(RtpSenderReceiverTest,
+ AudioSenderSetParametersInvalidatesTransactionId) {
+ CreateAudioRtpSender();
+
+ RtpParameters params = audio_rtp_sender_->GetParameters();
+ EXPECT_EQ(1u, params.encodings.size());
+ EXPECT_TRUE(audio_rtp_sender_->SetParameters(params).ok());
+ RTCError result = audio_rtp_sender_->SetParameters(params);
+ EXPECT_EQ(RTCErrorType::INVALID_STATE, result.type());
+
+ DestroyAudioRtpSender();
+}
+
+TEST_F(RtpSenderReceiverTest,
+ AudioSenderSetParametersAsyncInvalidatesTransactionId) {
+ CreateAudioRtpSender();
+
+ RtpParameters params = audio_rtp_sender_->GetParameters();
+ EXPECT_EQ(1u, params.encodings.size());
+ absl::optional<webrtc::RTCError> result;
+ audio_rtp_sender_->SetParametersAsync(
+ params, [&result](webrtc::RTCError error) { result = error; });
+ run_loop_.Flush();
+ EXPECT_TRUE(result->ok());
+ audio_rtp_sender_->SetParametersAsync(
+ params, [&result](webrtc::RTCError error) { result = error; });
+ run_loop_.Flush();
+ EXPECT_EQ(RTCErrorType::INVALID_STATE, result->type());
+
+ DestroyAudioRtpSender();
+}
+
+TEST_F(RtpSenderReceiverTest, AudioSenderDetectTransactionIdModification) {
+ CreateAudioRtpSender();
+
+ RtpParameters params = audio_rtp_sender_->GetParameters();
+ params.transaction_id = "";
+ RTCError result = audio_rtp_sender_->SetParameters(params);
+ EXPECT_EQ(RTCErrorType::INVALID_MODIFICATION, result.type());
+
+ DestroyAudioRtpSender();
+}
+
+TEST_F(RtpSenderReceiverTest, AudioSenderCheckTransactionIdRefresh) {
+ CreateAudioRtpSender();
+
+ RtpParameters params = audio_rtp_sender_->GetParameters();
+ EXPECT_NE(params.transaction_id.size(), 0U);
+ auto saved_transaction_id = params.transaction_id;
+ params = audio_rtp_sender_->GetParameters();
+ EXPECT_NE(saved_transaction_id, params.transaction_id);
+
+ DestroyAudioRtpSender();
+}
+
+TEST_F(RtpSenderReceiverTest, AudioSenderSetParametersOldValueFail) {
+ CreateAudioRtpSender();
+
+ RtpParameters params = audio_rtp_sender_->GetParameters();
+ RtpParameters second_params = audio_rtp_sender_->GetParameters();
+
+ RTCError result = audio_rtp_sender_->SetParameters(params);
+ EXPECT_EQ(RTCErrorType::INVALID_MODIFICATION, result.type());
+ DestroyAudioRtpSender();
+}
+
+TEST_F(RtpSenderReceiverTest, AudioSenderCantSetUnimplementedRtpParameters) {
+ CreateAudioRtpSender();
+ RtpParameters params = audio_rtp_sender_->GetParameters();
+ EXPECT_EQ(1u, params.encodings.size());
+
+ // Unimplemented RtpParameters: mid
+ params.mid = "dummy_mid";
+ EXPECT_EQ(RTCErrorType::UNSUPPORTED_PARAMETER,
+ audio_rtp_sender_->SetParameters(params).type());
+ params = audio_rtp_sender_->GetParameters();
+
+ DestroyAudioRtpSender();
+}
+
+TEST_F(RtpSenderReceiverTest, SetAudioMaxSendBitrate) {
+ CreateAudioRtpSender();
+
+ EXPECT_EQ(-1, voice_media_send_channel()->max_bps());
+ webrtc::RtpParameters params = audio_rtp_sender_->GetParameters();
+ EXPECT_EQ(1U, params.encodings.size());
+ EXPECT_FALSE(params.encodings[0].max_bitrate_bps);
+ params.encodings[0].max_bitrate_bps = 1000;
+ EXPECT_TRUE(audio_rtp_sender_->SetParameters(params).ok());
+
+ // Read back the parameters and verify they have been changed.
+ params = audio_rtp_sender_->GetParameters();
+ EXPECT_EQ(1U, params.encodings.size());
+ EXPECT_EQ(1000, params.encodings[0].max_bitrate_bps);
+
+ // Verify that the audio channel received the new parameters.
+ params = voice_media_send_channel()->GetRtpSendParameters(kAudioSsrc);
+ EXPECT_EQ(1U, params.encodings.size());
+ EXPECT_EQ(1000, params.encodings[0].max_bitrate_bps);
+
+ // Verify that the global bitrate limit has not been changed.
+ EXPECT_EQ(-1, voice_media_send_channel()->max_bps());
+
+ DestroyAudioRtpSender();
+}
+
+TEST_F(RtpSenderReceiverTest, SetAudioBitratePriority) {
+ CreateAudioRtpSender();
+
+ webrtc::RtpParameters params = audio_rtp_sender_->GetParameters();
+ EXPECT_EQ(1U, params.encodings.size());
+ EXPECT_EQ(webrtc::kDefaultBitratePriority,
+ params.encodings[0].bitrate_priority);
+ double new_bitrate_priority = 2.0;
+ params.encodings[0].bitrate_priority = new_bitrate_priority;
+ EXPECT_TRUE(audio_rtp_sender_->SetParameters(params).ok());
+
+ params = audio_rtp_sender_->GetParameters();
+ EXPECT_EQ(1U, params.encodings.size());
+ EXPECT_EQ(new_bitrate_priority, params.encodings[0].bitrate_priority);
+
+ params = voice_media_send_channel()->GetRtpSendParameters(kAudioSsrc);
+ EXPECT_EQ(1U, params.encodings.size());
+ EXPECT_EQ(new_bitrate_priority, params.encodings[0].bitrate_priority);
+
+ DestroyAudioRtpSender();
+}
+
+TEST_F(RtpSenderReceiverTest, VideoSenderCanSetParameters) {
+ CreateVideoRtpSender();
+
+ RtpParameters params = video_rtp_sender_->GetParameters();
+ EXPECT_EQ(1u, params.encodings.size());
+ EXPECT_TRUE(video_rtp_sender_->SetParameters(params).ok());
+
+ DestroyVideoRtpSender();
+}
+
+TEST_F(RtpSenderReceiverTest, VideoSenderCanSetParametersAsync) {
+ CreateVideoRtpSender();
+
+ RtpParameters params = video_rtp_sender_->GetParameters();
+ EXPECT_EQ(1u, params.encodings.size());
+ absl::optional<webrtc::RTCError> result;
+ video_rtp_sender_->SetParametersAsync(
+ params, [&result](webrtc::RTCError error) { result = error; });
+ run_loop_.Flush();
+ EXPECT_TRUE(result->ok());
+
+ DestroyVideoRtpSender();
+}
+
+TEST_F(RtpSenderReceiverTest, VideoSenderCanSetParametersBeforeNegotiation) {
+ video_rtp_sender_ =
+ VideoRtpSender::Create(worker_thread_, /*id=*/"", nullptr);
+
+ RtpParameters params = video_rtp_sender_->GetParameters();
+ ASSERT_EQ(1u, params.encodings.size());
+ params.encodings[0].max_bitrate_bps = 90000;
+ EXPECT_TRUE(video_rtp_sender_->SetParameters(params).ok());
+
+ params = video_rtp_sender_->GetParameters();
+ EXPECT_TRUE(video_rtp_sender_->SetParameters(params).ok());
+ EXPECT_EQ(params.encodings[0].max_bitrate_bps, 90000);
+
+ DestroyVideoRtpSender();
+}
+
+TEST_F(RtpSenderReceiverTest,
+ VideoSenderCanSetParametersAsyncBeforeNegotiation) {
+ video_rtp_sender_ =
+ VideoRtpSender::Create(worker_thread_, /*id=*/"", nullptr);
+
+ absl::optional<webrtc::RTCError> result;
+ RtpParameters params = video_rtp_sender_->GetParameters();
+ ASSERT_EQ(1u, params.encodings.size());
+ params.encodings[0].max_bitrate_bps = 90000;
+ video_rtp_sender_->SetParametersAsync(
+ params, [&result](webrtc::RTCError error) { result = error; });
+ run_loop_.Flush();
+ EXPECT_TRUE(result->ok());
+
+ params = video_rtp_sender_->GetParameters();
+ EXPECT_EQ(params.encodings[0].max_bitrate_bps, 90000);
+ video_rtp_sender_->SetParametersAsync(
+ params, [&result](webrtc::RTCError error) { result = error; });
+ run_loop_.Flush();
+ EXPECT_TRUE(result->ok());
+
+ DestroyVideoRtpSender();
+}
+
+TEST_F(RtpSenderReceiverTest, VideoSenderInitParametersMovedAfterNegotiation) {
+ AddVideoTrack(false);
+
+ std::unique_ptr<MockSetStreamsObserver> set_streams_observer =
+ std::make_unique<MockSetStreamsObserver>();
+ video_rtp_sender_ = VideoRtpSender::Create(worker_thread_, video_track_->id(),
+ set_streams_observer.get());
+ ASSERT_TRUE(video_rtp_sender_->SetTrack(video_track_.get()));
+ EXPECT_CALL(*set_streams_observer, OnSetStreams());
+ video_rtp_sender_->SetStreams({local_stream_->id()});
+
+ std::vector<RtpEncodingParameters> init_encodings(2);
+ init_encodings[0].max_bitrate_bps = 60000;
+ init_encodings[1].max_bitrate_bps = 900000;
+ video_rtp_sender_->set_init_send_encodings(init_encodings);
+
+ RtpParameters params = video_rtp_sender_->GetParameters();
+ ASSERT_EQ(2u, params.encodings.size());
+ EXPECT_EQ(params.encodings[0].max_bitrate_bps, 60000);
+ EXPECT_EQ(params.encodings[1].max_bitrate_bps, 900000);
+
+ // Simulate the setLocalDescription call
+ std::vector<uint32_t> ssrcs;
+ ssrcs.reserve(2);
+ for (int i = 0; i < 2; ++i)
+ ssrcs.push_back(kVideoSsrcSimulcast + i);
+ cricket::StreamParams stream_params =
+ cricket::CreateSimStreamParams("cname", ssrcs);
+ video_media_send_channel()->AddSendStream(stream_params);
+ video_rtp_sender_->SetMediaChannel(
+ video_media_send_channel()->AsVideoSendChannel());
+ video_rtp_sender_->SetSsrc(kVideoSsrcSimulcast);
+
+ params = video_rtp_sender_->GetParameters();
+ ASSERT_EQ(2u, params.encodings.size());
+ EXPECT_EQ(params.encodings[0].max_bitrate_bps, 60000);
+ EXPECT_EQ(params.encodings[1].max_bitrate_bps, 900000);
+
+ DestroyVideoRtpSender();
+}
+
+TEST_F(RtpSenderReceiverTest,
+ VideoSenderInitParametersMovedAfterManualSimulcastAndNegotiation) {
+ AddVideoTrack(false);
+
+ std::unique_ptr<MockSetStreamsObserver> set_streams_observer =
+ std::make_unique<MockSetStreamsObserver>();
+ video_rtp_sender_ = VideoRtpSender::Create(worker_thread_, video_track_->id(),
+ set_streams_observer.get());
+ ASSERT_TRUE(video_rtp_sender_->SetTrack(video_track_.get()));
+ EXPECT_CALL(*set_streams_observer, OnSetStreams());
+ video_rtp_sender_->SetStreams({local_stream_->id()});
+
+ std::vector<RtpEncodingParameters> init_encodings(1);
+ init_encodings[0].max_bitrate_bps = 60000;
+ video_rtp_sender_->set_init_send_encodings(init_encodings);
+
+ RtpParameters params = video_rtp_sender_->GetParameters();
+ ASSERT_EQ(1u, params.encodings.size());
+ EXPECT_EQ(params.encodings[0].max_bitrate_bps, 60000);
+
+ // Simulate the setLocalDescription call as if the user used SDP munging
+ // to enable simulcast
+ std::vector<uint32_t> ssrcs;
+ ssrcs.reserve(2);
+ for (int i = 0; i < 2; ++i)
+ ssrcs.push_back(kVideoSsrcSimulcast + i);
+ cricket::StreamParams stream_params =
+ cricket::CreateSimStreamParams("cname", ssrcs);
+ video_media_send_channel()->AddSendStream(stream_params);
+ video_rtp_sender_->SetMediaChannel(
+ video_media_send_channel()->AsVideoSendChannel());
+ video_rtp_sender_->SetSsrc(kVideoSsrcSimulcast);
+
+ params = video_rtp_sender_->GetParameters();
+ ASSERT_EQ(2u, params.encodings.size());
+ EXPECT_EQ(params.encodings[0].max_bitrate_bps, 60000);
+
+ DestroyVideoRtpSender();
+}
+
+#if GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
+using RtpSenderReceiverDeathTest = RtpSenderReceiverTest;
+
+TEST_F(RtpSenderReceiverDeathTest,
+ VideoSenderManualRemoveSimulcastFailsDeathTest) {
+ AddVideoTrack(false);
+
+ std::unique_ptr<MockSetStreamsObserver> set_streams_observer =
+ std::make_unique<MockSetStreamsObserver>();
+ video_rtp_sender_ = VideoRtpSender::Create(worker_thread_, video_track_->id(),
+ set_streams_observer.get());
+ ASSERT_TRUE(video_rtp_sender_->SetTrack(video_track_.get()));
+ EXPECT_CALL(*set_streams_observer, OnSetStreams());
+ video_rtp_sender_->SetStreams({local_stream_->id()});
+
+ std::vector<RtpEncodingParameters> init_encodings(2);
+ init_encodings[0].max_bitrate_bps = 60000;
+ init_encodings[1].max_bitrate_bps = 120000;
+ video_rtp_sender_->set_init_send_encodings(init_encodings);
+
+ RtpParameters params = video_rtp_sender_->GetParameters();
+ ASSERT_EQ(2u, params.encodings.size());
+ EXPECT_EQ(params.encodings[0].max_bitrate_bps, 60000);
+
+ // Simulate the setLocalDescription call as if the user used SDP munging
+ // to disable simulcast.
+ std::vector<uint32_t> ssrcs;
+ ssrcs.reserve(2);
+ for (int i = 0; i < 2; ++i)
+ ssrcs.push_back(kVideoSsrcSimulcast + i);
+ cricket::StreamParams stream_params =
+ cricket::StreamParams::CreateLegacy(kVideoSsrc);
+ video_media_send_channel()->AddSendStream(stream_params);
+ video_rtp_sender_->SetMediaChannel(
+ video_media_send_channel()->AsVideoSendChannel());
+ EXPECT_DEATH(video_rtp_sender_->SetSsrc(kVideoSsrcSimulcast), "");
+}
+#endif
+
+TEST_F(RtpSenderReceiverTest,
+ VideoSenderMustCallGetParametersBeforeSetParametersBeforeNegotiation) {
+ video_rtp_sender_ =
+ VideoRtpSender::Create(worker_thread_, /*id=*/"", nullptr);
+
+ RtpParameters params;
+ RTCError result = video_rtp_sender_->SetParameters(params);
+ EXPECT_EQ(RTCErrorType::INVALID_STATE, result.type());
+ DestroyVideoRtpSender();
+}
+
+TEST_F(RtpSenderReceiverTest,
+ VideoSenderMustCallGetParametersBeforeSetParameters) {
+ CreateVideoRtpSender();
+
+ RtpParameters params;
+ RTCError result = video_rtp_sender_->SetParameters(params);
+ EXPECT_EQ(RTCErrorType::INVALID_STATE, result.type());
+
+ DestroyVideoRtpSender();
+}
+
+TEST_F(RtpSenderReceiverTest,
+ VideoSenderSetParametersInvalidatesTransactionId) {
+ CreateVideoRtpSender();
+
+ RtpParameters params = video_rtp_sender_->GetParameters();
+ EXPECT_EQ(1u, params.encodings.size());
+ EXPECT_TRUE(video_rtp_sender_->SetParameters(params).ok());
+ RTCError result = video_rtp_sender_->SetParameters(params);
+ EXPECT_EQ(RTCErrorType::INVALID_STATE, result.type());
+
+ DestroyVideoRtpSender();
+}
+
+TEST_F(RtpSenderReceiverTest,
+ VideoSenderSetParametersAsyncInvalidatesTransactionId) {
+ CreateVideoRtpSender();
+
+ RtpParameters params = video_rtp_sender_->GetParameters();
+ EXPECT_EQ(1u, params.encodings.size());
+ absl::optional<webrtc::RTCError> result;
+ video_rtp_sender_->SetParametersAsync(
+ params, [&result](webrtc::RTCError error) { result = error; });
+ run_loop_.Flush();
+ EXPECT_TRUE(result->ok());
+ video_rtp_sender_->SetParametersAsync(
+ params, [&result](webrtc::RTCError error) { result = error; });
+ run_loop_.Flush();
+ EXPECT_EQ(RTCErrorType::INVALID_STATE, result->type());
+
+ DestroyVideoRtpSender();
+}
+
+TEST_F(RtpSenderReceiverTest, VideoSenderDetectTransactionIdModification) {
+ CreateVideoRtpSender();
+
+ RtpParameters params = video_rtp_sender_->GetParameters();
+ params.transaction_id = "";
+ RTCError result = video_rtp_sender_->SetParameters(params);
+ EXPECT_EQ(RTCErrorType::INVALID_MODIFICATION, result.type());
+
+ DestroyVideoRtpSender();
+}
+
+TEST_F(RtpSenderReceiverTest, VideoSenderCheckTransactionIdRefresh) {
+ CreateVideoRtpSender();
+
+ RtpParameters params = video_rtp_sender_->GetParameters();
+ EXPECT_NE(params.transaction_id.size(), 0U);
+ auto saved_transaction_id = params.transaction_id;
+ params = video_rtp_sender_->GetParameters();
+ EXPECT_NE(saved_transaction_id, params.transaction_id);
+
+ DestroyVideoRtpSender();
+}
+
+TEST_F(RtpSenderReceiverTest, VideoSenderSetParametersOldValueFail) {
+ CreateVideoRtpSender();
+
+ RtpParameters params = video_rtp_sender_->GetParameters();
+ RtpParameters second_params = video_rtp_sender_->GetParameters();
+
+ RTCError result = video_rtp_sender_->SetParameters(params);
+ EXPECT_EQ(RTCErrorType::INVALID_MODIFICATION, result.type());
+
+ DestroyVideoRtpSender();
+}
+
+TEST_F(RtpSenderReceiverTest, VideoSenderCantSetUnimplementedRtpParameters) {
+ CreateVideoRtpSender();
+ RtpParameters params = video_rtp_sender_->GetParameters();
+ EXPECT_EQ(1u, params.encodings.size());
+
+ // Unimplemented RtpParameters: mid
+ params.mid = "dummy_mid";
+ EXPECT_EQ(RTCErrorType::UNSUPPORTED_PARAMETER,
+ video_rtp_sender_->SetParameters(params).type());
+ params = video_rtp_sender_->GetParameters();
+
+ DestroyVideoRtpSender();
+}
+
+TEST_F(RtpSenderReceiverTest, VideoSenderCanSetScaleResolutionDownBy) {
+ CreateVideoRtpSender();
+
+ RtpParameters params = video_rtp_sender_->GetParameters();
+ params.encodings[0].scale_resolution_down_by = 2;
+
+ EXPECT_TRUE(video_rtp_sender_->SetParameters(params).ok());
+ params = video_rtp_sender_->GetParameters();
+ EXPECT_EQ(2, params.encodings[0].scale_resolution_down_by);
+
+ DestroyVideoRtpSender();
+}
+
+TEST_F(RtpSenderReceiverTest, VideoSenderDetectInvalidScaleResolutionDownBy) {
+ CreateVideoRtpSender();
+
+ RtpParameters params = video_rtp_sender_->GetParameters();
+ params.encodings[0].scale_resolution_down_by = 0.5;
+ RTCError result = video_rtp_sender_->SetParameters(params);
+ EXPECT_EQ(RTCErrorType::INVALID_RANGE, result.type());
+
+ DestroyVideoRtpSender();
+}
+
+TEST_F(RtpSenderReceiverTest, VideoSenderCanSetNumTemporalLayers) {
+ CreateVideoRtpSender();
+
+ RtpParameters params = video_rtp_sender_->GetParameters();
+ params.encodings[0].num_temporal_layers = 2;
+
+ EXPECT_TRUE(video_rtp_sender_->SetParameters(params).ok());
+ params = video_rtp_sender_->GetParameters();
+ EXPECT_EQ(2, params.encodings[0].num_temporal_layers);
+
+ DestroyVideoRtpSender();
+}
+
+TEST_F(RtpSenderReceiverTest, VideoSenderDetectInvalidNumTemporalLayers) {
+ CreateVideoRtpSender();
+
+ RtpParameters params = video_rtp_sender_->GetParameters();
+ params.encodings[0].num_temporal_layers = webrtc::kMaxTemporalStreams + 1;
+ RTCError result = video_rtp_sender_->SetParameters(params);
+ EXPECT_EQ(RTCErrorType::INVALID_RANGE, result.type());
+
+ DestroyVideoRtpSender();
+}
+
+TEST_F(RtpSenderReceiverTest, VideoSenderCanSetMaxFramerate) {
+ CreateVideoRtpSender();
+
+ RtpParameters params = video_rtp_sender_->GetParameters();
+ params.encodings[0].max_framerate = 20;
+
+ EXPECT_TRUE(video_rtp_sender_->SetParameters(params).ok());
+ params = video_rtp_sender_->GetParameters();
+ EXPECT_EQ(20., params.encodings[0].max_framerate);
+
+ DestroyVideoRtpSender();
+}
+
+TEST_F(RtpSenderReceiverTest, VideoSenderCanSetMaxFramerateZero) {
+ CreateVideoRtpSender();
+
+ RtpParameters params = video_rtp_sender_->GetParameters();
+ params.encodings[0].max_framerate = 0.;
+
+ EXPECT_TRUE(video_rtp_sender_->SetParameters(params).ok());
+ params = video_rtp_sender_->GetParameters();
+ EXPECT_EQ(0., params.encodings[0].max_framerate);
+
+ DestroyVideoRtpSender();
+}
+
+TEST_F(RtpSenderReceiverTest, VideoSenderDetectInvalidMaxFramerate) {
+ CreateVideoRtpSender();
+
+ RtpParameters params = video_rtp_sender_->GetParameters();
+ params.encodings[0].max_framerate = -5.;
+ RTCError result = video_rtp_sender_->SetParameters(params);
+ EXPECT_EQ(RTCErrorType::INVALID_RANGE, result.type());
+
+ DestroyVideoRtpSender();
+}
+
+// A video sender can have multiple simulcast layers, in which case it will
+// contain multiple RtpEncodingParameters. This tests that if this is the case
+// (simulcast), then we can't set the bitrate_priority, or max_bitrate_bps
+// for any encodings besides at index 0, because these are both implemented
+// "per-sender."
+TEST_F(RtpSenderReceiverTest, VideoSenderCantSetPerSenderEncodingParameters) {
+ // Add a simulcast specific send stream that contains 2 encoding parameters.
+ CreateVideoRtpSenderWithSimulcast();
+ RtpParameters params = video_rtp_sender_->GetParameters();
+ EXPECT_EQ(kVideoSimulcastLayerCount, params.encodings.size());
+
+ params.encodings[1].bitrate_priority = 2.0;
+ EXPECT_EQ(RTCErrorType::UNSUPPORTED_PARAMETER,
+ video_rtp_sender_->SetParameters(params).type());
+ params = video_rtp_sender_->GetParameters();
+
+ DestroyVideoRtpSender();
+}
+
+TEST_F(RtpSenderReceiverTest, VideoSenderCantSetReadOnlyEncodingParameters) {
+ // Add a simulcast specific send stream that contains 2 encoding parameters.
+ CreateVideoRtpSenderWithSimulcast();
+ RtpParameters params = video_rtp_sender_->GetParameters();
+ EXPECT_EQ(kVideoSimulcastLayerCount, params.encodings.size());
+
+ for (size_t i = 0; i < params.encodings.size(); i++) {
+ params.encodings[i].ssrc = 1337;
+ EXPECT_EQ(RTCErrorType::INVALID_MODIFICATION,
+ video_rtp_sender_->SetParameters(params).type());
+ params = video_rtp_sender_->GetParameters();
+ }
+
+ DestroyVideoRtpSender();
+}
+
+TEST_F(RtpSenderReceiverTest, SetVideoMinMaxSendBitrate) {
+ CreateVideoRtpSender();
+
+ EXPECT_EQ(-1, video_media_send_channel()->max_bps());
+ webrtc::RtpParameters params = video_rtp_sender_->GetParameters();
+ EXPECT_EQ(1U, params.encodings.size());
+ EXPECT_FALSE(params.encodings[0].min_bitrate_bps);
+ EXPECT_FALSE(params.encodings[0].max_bitrate_bps);
+ params.encodings[0].min_bitrate_bps = 100;
+ params.encodings[0].max_bitrate_bps = 1000;
+ EXPECT_TRUE(video_rtp_sender_->SetParameters(params).ok());
+
+ // Read back the parameters and verify they have been changed.
+ params = video_rtp_sender_->GetParameters();
+ EXPECT_EQ(1U, params.encodings.size());
+ EXPECT_EQ(100, params.encodings[0].min_bitrate_bps);
+ EXPECT_EQ(1000, params.encodings[0].max_bitrate_bps);
+
+ // Verify that the video channel received the new parameters.
+ params = video_media_send_channel()->GetRtpSendParameters(kVideoSsrc);
+ EXPECT_EQ(1U, params.encodings.size());
+ EXPECT_EQ(100, params.encodings[0].min_bitrate_bps);
+ EXPECT_EQ(1000, params.encodings[0].max_bitrate_bps);
+
+ // Verify that the global bitrate limit has not been changed.
+ EXPECT_EQ(-1, video_media_send_channel()->max_bps());
+
+ DestroyVideoRtpSender();
+}
+
+TEST_F(RtpSenderReceiverTest, SetVideoMinMaxSendBitrateSimulcast) {
+ // Add a simulcast specific send stream that contains 2 encoding parameters.
+ CreateVideoRtpSenderWithSimulcast();
+
+ RtpParameters params = video_rtp_sender_->GetParameters();
+ EXPECT_EQ(kVideoSimulcastLayerCount, params.encodings.size());
+ params.encodings[0].min_bitrate_bps = 100;
+ params.encodings[0].max_bitrate_bps = 1000;
+ params.encodings[1].min_bitrate_bps = 200;
+ params.encodings[1].max_bitrate_bps = 2000;
+ EXPECT_TRUE(video_rtp_sender_->SetParameters(params).ok());
+
+ // Verify that the video channel received the new parameters.
+ params =
+ video_media_send_channel()->GetRtpSendParameters(kVideoSsrcSimulcast);
+ EXPECT_EQ(kVideoSimulcastLayerCount, params.encodings.size());
+ EXPECT_EQ(100, params.encodings[0].min_bitrate_bps);
+ EXPECT_EQ(1000, params.encodings[0].max_bitrate_bps);
+ EXPECT_EQ(200, params.encodings[1].min_bitrate_bps);
+ EXPECT_EQ(2000, params.encodings[1].max_bitrate_bps);
+
+ DestroyVideoRtpSender();
+}
+
+TEST_F(RtpSenderReceiverTest, SetVideoBitratePriority) {
+ CreateVideoRtpSender();
+
+ webrtc::RtpParameters params = video_rtp_sender_->GetParameters();
+ EXPECT_EQ(1U, params.encodings.size());
+ EXPECT_EQ(webrtc::kDefaultBitratePriority,
+ params.encodings[0].bitrate_priority);
+ double new_bitrate_priority = 2.0;
+ params.encodings[0].bitrate_priority = new_bitrate_priority;
+ EXPECT_TRUE(video_rtp_sender_->SetParameters(params).ok());
+
+ params = video_rtp_sender_->GetParameters();
+ EXPECT_EQ(1U, params.encodings.size());
+ EXPECT_EQ(new_bitrate_priority, params.encodings[0].bitrate_priority);
+
+ params = video_media_send_channel()->GetRtpSendParameters(kVideoSsrc);
+ EXPECT_EQ(1U, params.encodings.size());
+ EXPECT_EQ(new_bitrate_priority, params.encodings[0].bitrate_priority);
+
+ DestroyVideoRtpSender();
+}
+
+TEST_F(RtpSenderReceiverTest, VideoReceiverCanGetParametersWithSimulcast) {
+ CreateVideoRtpReceiverWithSimulcast({}, 2);
+
+ RtpParameters params = video_rtp_receiver_->GetParameters();
+ EXPECT_EQ(2u, params.encodings.size());
+
+ DestroyVideoRtpReceiver();
+}
+
+TEST_F(RtpSenderReceiverTest, GenerateKeyFrameWithAudio) {
+ CreateAudioRtpSender();
+
+ auto error = audio_rtp_sender_->GenerateKeyFrame({});
+ EXPECT_FALSE(error.ok());
+ EXPECT_EQ(error.type(), RTCErrorType::UNSUPPORTED_OPERATION);
+
+ DestroyAudioRtpSender();
+}
+
+TEST_F(RtpSenderReceiverTest, GenerateKeyFrameWithVideo) {
+ CreateVideoRtpSenderWithSimulcast({"1", "2", "3"});
+
+ auto error = video_rtp_sender_->GenerateKeyFrame({});
+ EXPECT_TRUE(error.ok());
+
+ error = video_rtp_sender_->GenerateKeyFrame({"1"});
+ EXPECT_TRUE(error.ok());
+
+ error = video_rtp_sender_->GenerateKeyFrame({""});
+ EXPECT_FALSE(error.ok());
+ EXPECT_EQ(error.type(), RTCErrorType::INVALID_PARAMETER);
+
+ error = video_rtp_sender_->GenerateKeyFrame({"no-such-rid"});
+ EXPECT_FALSE(error.ok());
+ EXPECT_EQ(error.type(), RTCErrorType::INVALID_PARAMETER);
+
+ DestroyVideoRtpSender();
+}
+
+// Test that makes sure that a video track content hint translates to the proper
+// value for sources that are not screencast.
+TEST_F(RtpSenderReceiverTest, PropagatesVideoTrackContentHint) {
+ CreateVideoRtpSender();
+
+ video_track_->set_enabled(true);
+
+ // `video_track_` is not screencast by default.
+ EXPECT_EQ(false, video_media_send_channel()->options().is_screencast);
+ // No content hint should be set by default.
+ EXPECT_EQ(VideoTrackInterface::ContentHint::kNone,
+ video_track_->content_hint());
+ // Setting detailed should turn a non-screencast source into screencast mode.
+ video_track_->set_content_hint(VideoTrackInterface::ContentHint::kDetailed);
+ EXPECT_EQ(true, video_media_send_channel()->options().is_screencast);
+ // Removing the content hint should turn the track back into non-screencast
+ // mode.
+ video_track_->set_content_hint(VideoTrackInterface::ContentHint::kNone);
+ EXPECT_EQ(false, video_media_send_channel()->options().is_screencast);
+ // Setting fluid should remain in non-screencast mode (its default).
+ video_track_->set_content_hint(VideoTrackInterface::ContentHint::kFluid);
+ EXPECT_EQ(false, video_media_send_channel()->options().is_screencast);
+ // Setting text should have the same effect as Detailed
+ video_track_->set_content_hint(VideoTrackInterface::ContentHint::kText);
+ EXPECT_EQ(true, video_media_send_channel()->options().is_screencast);
+
+ DestroyVideoRtpSender();
+}
+
+// Test that makes sure that a video track content hint translates to the proper
+// value for screencast sources.
+TEST_F(RtpSenderReceiverTest,
+ PropagatesVideoTrackContentHintForScreencastSource) {
+ CreateVideoRtpSender(true);
+
+ video_track_->set_enabled(true);
+
+ // `video_track_` with a screencast source should be screencast by default.
+ EXPECT_EQ(true, video_media_send_channel()->options().is_screencast);
+ // No content hint should be set by default.
+ EXPECT_EQ(VideoTrackInterface::ContentHint::kNone,
+ video_track_->content_hint());
+ // Setting fluid should turn a screencast source into non-screencast mode.
+ video_track_->set_content_hint(VideoTrackInterface::ContentHint::kFluid);
+ EXPECT_EQ(false, video_media_send_channel()->options().is_screencast);
+ // Removing the content hint should turn the track back into screencast mode.
+ video_track_->set_content_hint(VideoTrackInterface::ContentHint::kNone);
+ EXPECT_EQ(true, video_media_send_channel()->options().is_screencast);
+ // Setting detailed should still remain in screencast mode (its default).
+ video_track_->set_content_hint(VideoTrackInterface::ContentHint::kDetailed);
+ EXPECT_EQ(true, video_media_send_channel()->options().is_screencast);
+ // Setting text should have the same effect as Detailed
+ video_track_->set_content_hint(VideoTrackInterface::ContentHint::kText);
+ EXPECT_EQ(true, video_media_send_channel()->options().is_screencast);
+
+ DestroyVideoRtpSender();
+}
+
+// Test that makes sure any content hints that are set on a track before
+// VideoRtpSender is ready to send are still applied when it gets ready to send.
+TEST_F(RtpSenderReceiverTest,
+ PropagatesVideoTrackContentHintSetBeforeEnabling) {
+ AddVideoTrack();
+ std::unique_ptr<MockSetStreamsObserver> set_streams_observer =
+ std::make_unique<MockSetStreamsObserver>();
+ // Setting detailed overrides the default non-screencast mode. This should be
+ // applied even if the track is set on construction.
+ video_track_->set_content_hint(VideoTrackInterface::ContentHint::kDetailed);
+ video_rtp_sender_ = VideoRtpSender::Create(worker_thread_, video_track_->id(),
+ set_streams_observer.get());
+ ASSERT_TRUE(video_rtp_sender_->SetTrack(video_track_.get()));
+ EXPECT_CALL(*set_streams_observer, OnSetStreams());
+ video_rtp_sender_->SetStreams({local_stream_->id()});
+ video_rtp_sender_->SetMediaChannel(
+ video_media_send_channel()->AsVideoSendChannel());
+ video_track_->set_enabled(true);
+
+ // Sender is not ready to send (no SSRC) so no option should have been set.
+ EXPECT_EQ(absl::nullopt, video_media_send_channel()->options().is_screencast);
+
+ // Verify that the content hint is accounted for when video_rtp_sender_ does
+ // get enabled.
+ video_rtp_sender_->SetSsrc(kVideoSsrc);
+ EXPECT_EQ(true, video_media_send_channel()->options().is_screencast);
+
+ // And removing the hint should go back to false (to verify that false was
+ // default correctly).
+ video_track_->set_content_hint(VideoTrackInterface::ContentHint::kNone);
+ EXPECT_EQ(false, video_media_send_channel()->options().is_screencast);
+
+ DestroyVideoRtpSender();
+}
+
+TEST_F(RtpSenderReceiverTest, AudioSenderHasDtmfSender) {
+ CreateAudioRtpSender();
+ EXPECT_NE(nullptr, audio_rtp_sender_->GetDtmfSender());
+}
+
+TEST_F(RtpSenderReceiverTest, VideoSenderDoesNotHaveDtmfSender) {
+ CreateVideoRtpSender();
+ EXPECT_EQ(nullptr, video_rtp_sender_->GetDtmfSender());
+}
+
+// Test that the DTMF sender is really using `voice_channel_`, and thus returns
+// true/false from CanSendDtmf based on what `voice_channel_` returns.
+TEST_F(RtpSenderReceiverTest, CanInsertDtmf) {
+ AddDtmfCodec();
+ CreateAudioRtpSender();
+ auto dtmf_sender = audio_rtp_sender_->GetDtmfSender();
+ ASSERT_NE(nullptr, dtmf_sender);
+ EXPECT_TRUE(dtmf_sender->CanInsertDtmf());
+}
+
+TEST_F(RtpSenderReceiverTest, CanNotInsertDtmf) {
+ CreateAudioRtpSender();
+ auto dtmf_sender = audio_rtp_sender_->GetDtmfSender();
+ ASSERT_NE(nullptr, dtmf_sender);
+ // DTMF codec has not been added, as it was in the above test.
+ EXPECT_FALSE(dtmf_sender->CanInsertDtmf());
+}
+
+TEST_F(RtpSenderReceiverTest, InsertDtmf) {
+ AddDtmfCodec();
+ CreateAudioRtpSender();
+ auto dtmf_sender = audio_rtp_sender_->GetDtmfSender();
+ ASSERT_NE(nullptr, dtmf_sender);
+
+ EXPECT_EQ(0U, voice_media_send_channel()->dtmf_info_queue().size());
+
+ // Insert DTMF
+ const int expected_duration = 90;
+ dtmf_sender->InsertDtmf("012", expected_duration, 100);
+
+ // Verify
+ ASSERT_EQ_WAIT(3U, voice_media_send_channel()->dtmf_info_queue().size(),
+ kDefaultTimeout);
+ const uint32_t send_ssrc =
+ voice_media_send_channel()->send_streams()[0].first_ssrc();
+ EXPECT_TRUE(CompareDtmfInfo(voice_media_send_channel()->dtmf_info_queue()[0],
+ send_ssrc, 0, expected_duration));
+ EXPECT_TRUE(CompareDtmfInfo(voice_media_send_channel()->dtmf_info_queue()[1],
+ send_ssrc, 1, expected_duration));
+ EXPECT_TRUE(CompareDtmfInfo(voice_media_send_channel()->dtmf_info_queue()[2],
+ send_ssrc, 2, expected_duration));
+}
+
+// Validate that the default FrameEncryptor setting is nullptr.
+TEST_F(RtpSenderReceiverTest, AudioSenderCanSetFrameEncryptor) {
+ CreateAudioRtpSender();
+ rtc::scoped_refptr<FrameEncryptorInterface> fake_frame_encryptor(
+ new FakeFrameEncryptor());
+ EXPECT_EQ(nullptr, audio_rtp_sender_->GetFrameEncryptor());
+ audio_rtp_sender_->SetFrameEncryptor(fake_frame_encryptor);
+ EXPECT_EQ(fake_frame_encryptor.get(),
+ audio_rtp_sender_->GetFrameEncryptor().get());
+}
+
+// Validate that setting a FrameEncryptor after the send stream is stopped does
+// nothing.
+TEST_F(RtpSenderReceiverTest, AudioSenderCannotSetFrameEncryptorAfterStop) {
+ CreateAudioRtpSender();
+ rtc::scoped_refptr<FrameEncryptorInterface> fake_frame_encryptor(
+ new FakeFrameEncryptor());
+ EXPECT_EQ(nullptr, audio_rtp_sender_->GetFrameEncryptor());
+ audio_rtp_sender_->Stop();
+ audio_rtp_sender_->SetFrameEncryptor(fake_frame_encryptor);
+ // TODO(webrtc:9926) - Validate media channel not set once fakes updated.
+}
+
+// Validate that the default FrameEncryptor setting is nullptr.
+TEST_F(RtpSenderReceiverTest, AudioReceiverCanSetFrameDecryptor) {
+ CreateAudioRtpReceiver();
+ rtc::scoped_refptr<FrameDecryptorInterface> fake_frame_decryptor(
+ rtc::make_ref_counted<FakeFrameDecryptor>());
+ EXPECT_EQ(nullptr, audio_rtp_receiver_->GetFrameDecryptor());
+ audio_rtp_receiver_->SetFrameDecryptor(fake_frame_decryptor);
+ EXPECT_EQ(fake_frame_decryptor.get(),
+ audio_rtp_receiver_->GetFrameDecryptor().get());
+ DestroyAudioRtpReceiver();
+}
+
+// Validate that the default FrameEncryptor setting is nullptr.
+TEST_F(RtpSenderReceiverTest, AudioReceiverCannotSetFrameDecryptorAfterStop) {
+ CreateAudioRtpReceiver();
+ rtc::scoped_refptr<FrameDecryptorInterface> fake_frame_decryptor(
+ rtc::make_ref_counted<FakeFrameDecryptor>());
+ EXPECT_EQ(nullptr, audio_rtp_receiver_->GetFrameDecryptor());
+ audio_rtp_receiver_->SetMediaChannel(nullptr);
+ audio_rtp_receiver_->SetFrameDecryptor(fake_frame_decryptor);
+ // TODO(webrtc:9926) - Validate media channel not set once fakes updated.
+ DestroyAudioRtpReceiver();
+}
+
+// Validate that the default FrameEncryptor setting is nullptr.
+TEST_F(RtpSenderReceiverTest, VideoSenderCanSetFrameEncryptor) {
+ CreateVideoRtpSender();
+ rtc::scoped_refptr<FrameEncryptorInterface> fake_frame_encryptor(
+ new FakeFrameEncryptor());
+ EXPECT_EQ(nullptr, video_rtp_sender_->GetFrameEncryptor());
+ video_rtp_sender_->SetFrameEncryptor(fake_frame_encryptor);
+ EXPECT_EQ(fake_frame_encryptor.get(),
+ video_rtp_sender_->GetFrameEncryptor().get());
+}
+
+// Validate that setting a FrameEncryptor after the send stream is stopped does
+// nothing.
+TEST_F(RtpSenderReceiverTest, VideoSenderCannotSetFrameEncryptorAfterStop) {
+ CreateVideoRtpSender();
+ rtc::scoped_refptr<FrameEncryptorInterface> fake_frame_encryptor(
+ new FakeFrameEncryptor());
+ EXPECT_EQ(nullptr, video_rtp_sender_->GetFrameEncryptor());
+ video_rtp_sender_->Stop();
+ video_rtp_sender_->SetFrameEncryptor(fake_frame_encryptor);
+ // TODO(webrtc:9926) - Validate media channel not set once fakes updated.
+}
+
+// Validate that the default FrameEncryptor setting is nullptr.
+TEST_F(RtpSenderReceiverTest, VideoReceiverCanSetFrameDecryptor) {
+ CreateVideoRtpReceiver();
+ rtc::scoped_refptr<FrameDecryptorInterface> fake_frame_decryptor(
+ rtc::make_ref_counted<FakeFrameDecryptor>());
+ EXPECT_EQ(nullptr, video_rtp_receiver_->GetFrameDecryptor());
+ video_rtp_receiver_->SetFrameDecryptor(fake_frame_decryptor);
+ EXPECT_EQ(fake_frame_decryptor.get(),
+ video_rtp_receiver_->GetFrameDecryptor().get());
+ DestroyVideoRtpReceiver();
+}
+
+// Validate that the default FrameEncryptor setting is nullptr.
+TEST_F(RtpSenderReceiverTest, VideoReceiverCannotSetFrameDecryptorAfterStop) {
+ CreateVideoRtpReceiver();
+ rtc::scoped_refptr<FrameDecryptorInterface> fake_frame_decryptor(
+ rtc::make_ref_counted<FakeFrameDecryptor>());
+ EXPECT_EQ(nullptr, video_rtp_receiver_->GetFrameDecryptor());
+ video_rtp_receiver_->SetMediaChannel(nullptr);
+ video_rtp_receiver_->SetFrameDecryptor(fake_frame_decryptor);
+ // TODO(webrtc:9926) - Validate media channel not set once fakes updated.
+ DestroyVideoRtpReceiver();
+}
+
+// Checks that calling the internal methods for get/set parameters do not
+// invalidate any parameters retreived by clients.
+TEST_F(RtpSenderReceiverTest,
+ InternalParameterMethodsDoNotInvalidateTransaction) {
+ CreateVideoRtpSender();
+ RtpParameters parameters = video_rtp_sender_->GetParameters();
+ RtpParameters new_parameters = video_rtp_sender_->GetParametersInternal();
+ new_parameters.encodings[0].active = false;
+ video_rtp_sender_->SetParametersInternal(new_parameters, nullptr, true);
+ new_parameters.encodings[0].active = true;
+ video_rtp_sender_->SetParametersInternal(new_parameters, nullptr, true);
+ parameters.encodings[0].active = false;
+ EXPECT_TRUE(video_rtp_sender_->SetParameters(parameters).ok());
+}
+
+// Checks that the senders SetStreams eliminates duplicate stream ids.
+TEST_F(RtpSenderReceiverTest, SenderSetStreamsEliminatesDuplicateIds) {
+ AddVideoTrack();
+ video_rtp_sender_ =
+ VideoRtpSender::Create(worker_thread_, video_track_->id(), nullptr);
+ video_rtp_sender_->SetStreams({"1", "2", "1"});
+ EXPECT_EQ(video_rtp_sender_->stream_ids().size(), 2u);
+}
+
+// Helper method for syntactic sugar for accepting a vector with '{}' notation.
+std::pair<RidList, RidList> CreatePairOfRidVectors(
+ const std::vector<std::string>& first,
+ const std::vector<std::string>& second) {
+ return std::make_pair(first, second);
+}
+
+// These parameters are used to test disabling simulcast layers.
+const std::pair<RidList, RidList> kDisableSimulcastLayersParameters[] = {
+ // Tests removing the first layer. This is a special case because
+ // the first layer's SSRC is also the 'primary' SSRC used to associate the
+ // parameters to the media channel.
+ CreatePairOfRidVectors({"1", "2", "3", "4"}, {"1"}),
+ // Tests removing some layers.
+ CreatePairOfRidVectors({"1", "2", "3", "4"}, {"2", "4"}),
+ // Tests simulcast rejected scenario all layers except first are rejected.
+ CreatePairOfRidVectors({"1", "2", "3", "4"}, {"2", "3", "4"}),
+ // Tests removing all layers.
+ CreatePairOfRidVectors({"1", "2", "3", "4"}, {"1", "2", "3", "4"}),
+};
+
+// Runs test for disabling layers on a sender without a media engine set.
+TEST_P(RtpSenderReceiverTest, DisableSimulcastLayersWithoutMediaEngine) {
+ auto parameter = GetParam();
+ RunDisableSimulcastLayersWithoutMediaEngineTest(parameter.first,
+ parameter.second);
+}
+
+// Runs test for disabling layers on a sender with a media engine set.
+TEST_P(RtpSenderReceiverTest, DisableSimulcastLayersWithMediaEngine) {
+ auto parameter = GetParam();
+ RunDisableSimulcastLayersWithMediaEngineTest(parameter.first,
+ parameter.second);
+}
+
+INSTANTIATE_TEST_SUITE_P(
+ DisableSimulcastLayersInSender,
+ RtpSenderReceiverTest,
+ ::testing::ValuesIn(kDisableSimulcastLayersParameters));
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/rtp_transceiver.cc b/third_party/libwebrtc/pc/rtp_transceiver.cc
new file mode 100644
index 0000000000..815ec9dece
--- /dev/null
+++ b/third_party/libwebrtc/pc/rtp_transceiver.cc
@@ -0,0 +1,791 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/rtp_transceiver.h"
+
+#include <stdint.h>
+
+#include <iterator>
+#include <set>
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "absl/algorithm/container.h"
+#include "absl/memory/memory.h"
+#include "api/audio_codecs/audio_codec_pair_id.h"
+#include "api/field_trials_view.h"
+#include "api/rtp_parameters.h"
+#include "api/sequence_checker.h"
+#include "media/base/codec.h"
+#include "media/base/media_channel.h"
+#include "media/base/media_channel_impl.h"
+#include "media/base/media_constants.h"
+#include "media/base/media_engine.h"
+#include "pc/channel.h"
+#include "pc/rtp_media_utils.h"
+#include "pc/session_description.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/thread.h"
+
+namespace webrtc {
+namespace {
+
+RTCError VerifyCodecPreferences(
+ const std::vector<RtpCodecCapability>& codecs,
+ const std::vector<cricket::Codec>& send_codecs,
+ const std::vector<cricket::Codec>& recv_codecs) {
+ // If the intersection between codecs and
+ // RTCRtpSender.getCapabilities(kind).codecs or the intersection between
+ // codecs and RTCRtpReceiver.getCapabilities(kind).codecs only contains RTX,
+ // RED or FEC codecs or is an empty set, throw InvalidModificationError.
+ // This ensures that we always have something to offer, regardless of
+ // transceiver.direction.
+
+ if (!absl::c_any_of(codecs, [&recv_codecs](const RtpCodecCapability& codec) {
+ return codec.name != cricket::kRtxCodecName &&
+ codec.name != cricket::kRedCodecName &&
+ codec.name != cricket::kFlexfecCodecName &&
+ absl::c_any_of(recv_codecs,
+ [&codec](const cricket::Codec& recv_codec) {
+ return recv_codec.MatchesRtpCodec(codec);
+ });
+ })) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_MODIFICATION,
+ "Invalid codec preferences: Missing codec from recv "
+ "codec capabilities.");
+ }
+
+ if (!absl::c_any_of(codecs, [&send_codecs](const RtpCodecCapability& codec) {
+ return codec.name != cricket::kRtxCodecName &&
+ codec.name != cricket::kRedCodecName &&
+ codec.name != cricket::kFlexfecCodecName &&
+ absl::c_any_of(send_codecs,
+ [&codec](const cricket::Codec& send_codec) {
+ return send_codec.MatchesRtpCodec(codec);
+ });
+ })) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_MODIFICATION,
+ "Invalid codec preferences: Missing codec from send "
+ "codec capabilities.");
+ }
+
+ // Let codecCapabilities be the union of
+ // RTCRtpSender.getCapabilities(kind).codecs and
+ // RTCRtpReceiver.getCapabilities(kind).codecs. For each codec in codecs, If
+ // codec is not in codecCapabilities, throw InvalidModificationError.
+ for (const auto& codec_preference : codecs) {
+ bool is_recv_codec = absl::c_any_of(
+ recv_codecs, [&codec_preference](const cricket::Codec& codec) {
+ return codec.MatchesRtpCodec(codec_preference);
+ });
+
+ bool is_send_codec = absl::c_any_of(
+ send_codecs, [&codec_preference](const cricket::Codec& codec) {
+ return codec.MatchesRtpCodec(codec_preference);
+ });
+
+ if (!is_recv_codec && !is_send_codec) {
+ LOG_AND_RETURN_ERROR(
+ RTCErrorType::INVALID_MODIFICATION,
+ std::string("Invalid codec preferences: invalid codec with name \"") +
+ codec_preference.name + "\".");
+ }
+ }
+
+ // Check we have a real codec (not just rtx, red or fec)
+ if (absl::c_all_of(codecs, [](const RtpCodecCapability& codec) {
+ return codec.name == cricket::kRtxCodecName ||
+ codec.name == cricket::kRedCodecName ||
+ codec.name == cricket::kUlpfecCodecName;
+ })) {
+ LOG_AND_RETURN_ERROR(
+ RTCErrorType::INVALID_MODIFICATION,
+ "Invalid codec preferences: codec list must have a non "
+ "RTX, RED or FEC entry.");
+ }
+
+ return RTCError::OK();
+}
+
+// Matches the list of codecs as capabilities (potentially without SVC related
+// information) to the list of send codecs and returns the list of codecs with
+// all the SVC related information.
+std::vector<cricket::VideoCodec> MatchCodecPreferences(
+ const std::vector<RtpCodecCapability>& codecs,
+ const std::vector<cricket::VideoCodec>& send_codecs) {
+ std::vector<cricket::VideoCodec> result;
+
+ for (const auto& codec_preference : codecs) {
+ for (const cricket::VideoCodec& send_codec : send_codecs) {
+ if (send_codec.MatchesRtpCodec(codec_preference)) {
+ result.push_back(send_codec);
+ }
+ }
+ }
+
+ return result;
+}
+
+TaskQueueBase* GetCurrentTaskQueueOrThread() {
+ TaskQueueBase* current = TaskQueueBase::Current();
+ if (!current)
+ current = rtc::ThreadManager::Instance()->CurrentThread();
+ return current;
+}
+
+} // namespace
+
+RtpTransceiver::RtpTransceiver(cricket::MediaType media_type,
+ ConnectionContext* context)
+ : thread_(GetCurrentTaskQueueOrThread()),
+ unified_plan_(false),
+ media_type_(media_type),
+ context_(context) {
+ RTC_DCHECK(media_type == cricket::MEDIA_TYPE_AUDIO ||
+ media_type == cricket::MEDIA_TYPE_VIDEO);
+}
+
+RtpTransceiver::RtpTransceiver(
+ rtc::scoped_refptr<RtpSenderProxyWithInternal<RtpSenderInternal>> sender,
+ rtc::scoped_refptr<RtpReceiverProxyWithInternal<RtpReceiverInternal>>
+ receiver,
+ ConnectionContext* context,
+ std::vector<RtpHeaderExtensionCapability> header_extensions_to_negotiate,
+ std::function<void()> on_negotiation_needed)
+ : thread_(GetCurrentTaskQueueOrThread()),
+ unified_plan_(true),
+ media_type_(sender->media_type()),
+ context_(context),
+ header_extensions_to_negotiate_(
+ std::move(header_extensions_to_negotiate)),
+ on_negotiation_needed_(std::move(on_negotiation_needed)) {
+ RTC_DCHECK(media_type_ == cricket::MEDIA_TYPE_AUDIO ||
+ media_type_ == cricket::MEDIA_TYPE_VIDEO);
+ RTC_DCHECK_EQ(sender->media_type(), receiver->media_type());
+ sender->internal()->SetCodecPreferences(
+ sender->media_type() == cricket::MEDIA_TYPE_VIDEO
+ ? media_engine()->video().send_codecs(false)
+ : media_engine()->voice().send_codecs());
+ senders_.push_back(sender);
+ receivers_.push_back(receiver);
+}
+
+RtpTransceiver::~RtpTransceiver() {
+ // TODO(tommi): On Android, when running PeerConnectionClientTest (e.g.
+ // PeerConnectionClientTest#testCameraSwitch), the instance doesn't get
+ // deleted on `thread_`. See if we can fix that.
+ if (!stopped_) {
+ RTC_DCHECK_RUN_ON(thread_);
+ StopInternal();
+ }
+
+ RTC_CHECK(!channel_) << "Missing call to ClearChannel?";
+}
+
+RTCError RtpTransceiver::CreateChannel(
+ absl::string_view mid,
+ Call* call_ptr,
+ const cricket::MediaConfig& media_config,
+ bool srtp_required,
+ CryptoOptions crypto_options,
+ const cricket::AudioOptions& audio_options,
+ const cricket::VideoOptions& video_options,
+ VideoBitrateAllocatorFactory* video_bitrate_allocator_factory,
+ std::function<RtpTransportInternal*(absl::string_view)> transport_lookup) {
+ RTC_DCHECK_RUN_ON(thread_);
+ if (!media_engine()) {
+ // TODO(hta): Must be a better way
+ return RTCError(RTCErrorType::INTERNAL_ERROR,
+ "No media engine for mid=" + std::string(mid));
+ }
+ std::unique_ptr<cricket::ChannelInterface> new_channel;
+ if (media_type() == cricket::MEDIA_TYPE_AUDIO) {
+ // TODO(bugs.webrtc.org/11992): CreateVideoChannel internally switches to
+ // the worker thread. We shouldn't be using the `call_ptr_` hack here but
+ // simply be on the worker thread and use `call_` (update upstream code).
+ RTC_DCHECK(call_ptr);
+ RTC_DCHECK(media_engine());
+ // TODO(bugs.webrtc.org/11992): Remove this workaround after updates in
+ // PeerConnection and add the expectation that we're already on the right
+ // thread.
+ context()->worker_thread()->BlockingCall([&] {
+ RTC_DCHECK_RUN_ON(context()->worker_thread());
+
+ AudioCodecPairId codec_pair_id = AudioCodecPairId::Create();
+
+ std::unique_ptr<cricket::VoiceMediaSendChannelInterface>
+ media_send_channel = media_engine()->voice().CreateSendChannel(
+ call_ptr, media_config, audio_options, crypto_options,
+ codec_pair_id);
+ if (!media_send_channel) {
+ // TODO(bugs.webrtc.org/14912): Consider CHECK or reporting failure
+ return;
+ }
+ std::unique_ptr<cricket::VoiceMediaReceiveChannelInterface>
+ media_receive_channel = media_engine()->voice().CreateReceiveChannel(
+ call_ptr, media_config, audio_options, crypto_options,
+ codec_pair_id);
+ if (!media_receive_channel) {
+ return;
+ }
+ // Note that this is safe because both sending and
+ // receiving channels will be deleted at the same time.
+ media_send_channel->SetSsrcListChangedCallback(
+ [receive_channel =
+ media_receive_channel.get()](const std::set<uint32_t>& choices) {
+ receive_channel->ChooseReceiverReportSsrc(choices);
+ });
+
+ new_channel = std::make_unique<cricket::VoiceChannel>(
+ context()->worker_thread(), context()->network_thread(),
+ context()->signaling_thread(), std::move(media_send_channel),
+ std::move(media_receive_channel), mid, srtp_required, crypto_options,
+ context()->ssrc_generator());
+ });
+ } else {
+ RTC_DCHECK_EQ(cricket::MEDIA_TYPE_VIDEO, media_type());
+
+ // TODO(bugs.webrtc.org/11992): CreateVideoChannel internally switches to
+ // the worker thread. We shouldn't be using the `call_ptr_` hack here but
+ // simply be on the worker thread and use `call_` (update upstream code).
+ context()->worker_thread()->BlockingCall([&] {
+ RTC_DCHECK_RUN_ON(context()->worker_thread());
+
+ std::unique_ptr<cricket::VideoMediaSendChannelInterface>
+ media_send_channel = media_engine()->video().CreateSendChannel(
+ call_ptr, media_config, video_options, crypto_options,
+ video_bitrate_allocator_factory);
+ if (!media_send_channel) {
+ return;
+ }
+
+ std::unique_ptr<cricket::VideoMediaReceiveChannelInterface>
+ media_receive_channel = media_engine()->video().CreateReceiveChannel(
+ call_ptr, media_config, video_options, crypto_options);
+ if (!media_receive_channel) {
+ return;
+ }
+ // Note that this is safe because both sending and
+ // receiving channels will be deleted at the same time.
+ media_send_channel->SetSsrcListChangedCallback(
+ [receive_channel =
+ media_receive_channel.get()](const std::set<uint32_t>& choices) {
+ receive_channel->ChooseReceiverReportSsrc(choices);
+ });
+
+ new_channel = std::make_unique<cricket::VideoChannel>(
+ context()->worker_thread(), context()->network_thread(),
+ context()->signaling_thread(), std::move(media_send_channel),
+ std::move(media_receive_channel), mid, srtp_required, crypto_options,
+ context()->ssrc_generator());
+ });
+ }
+ if (!new_channel) {
+ // TODO(hta): Must be a better way
+ return RTCError(RTCErrorType::INTERNAL_ERROR,
+ "Failed to create channel for mid=" + std::string(mid));
+ }
+ SetChannel(std::move(new_channel), transport_lookup);
+ return RTCError::OK();
+}
+
+void RtpTransceiver::SetChannel(
+ std::unique_ptr<cricket::ChannelInterface> channel,
+ std::function<RtpTransportInternal*(const std::string&)> transport_lookup) {
+ RTC_DCHECK_RUN_ON(thread_);
+ RTC_DCHECK(channel);
+ RTC_DCHECK(transport_lookup);
+ RTC_DCHECK(!channel_);
+ // Cannot set a channel on a stopped transceiver.
+ if (stopped_) {
+ return;
+ }
+
+ RTC_LOG_THREAD_BLOCK_COUNT();
+
+ RTC_DCHECK_EQ(media_type(), channel->media_type());
+ signaling_thread_safety_ = PendingTaskSafetyFlag::Create();
+
+ std::unique_ptr<cricket::ChannelInterface> channel_to_delete;
+
+ // An alternative to this, could be to require SetChannel to be called
+ // on the network thread. The channel object operates for the most part
+ // on the network thread, as part of its initialization being on the network
+ // thread is required, so setting a channel object as part of the construction
+ // (without thread hopping) might be the more efficient thing to do than
+ // how SetChannel works today.
+ // Similarly, if the channel() accessor is limited to the network thread, that
+ // helps with keeping the channel implementation requirements being met and
+ // avoids synchronization for accessing the pointer or network related state.
+ context()->network_thread()->BlockingCall([&]() {
+ if (channel_) {
+ channel_->SetFirstPacketReceivedCallback(nullptr);
+ channel_->SetRtpTransport(nullptr);
+ channel_to_delete = std::move(channel_);
+ }
+
+ channel_ = std::move(channel);
+
+ channel_->SetRtpTransport(transport_lookup(channel_->mid()));
+ channel_->SetFirstPacketReceivedCallback(
+ [thread = thread_, flag = signaling_thread_safety_, this]() mutable {
+ thread->PostTask(
+ SafeTask(std::move(flag), [this]() { OnFirstPacketReceived(); }));
+ });
+ });
+ PushNewMediaChannelAndDeleteChannel(nullptr);
+
+ RTC_DCHECK_BLOCK_COUNT_NO_MORE_THAN(2);
+}
+
+void RtpTransceiver::ClearChannel() {
+ RTC_DCHECK_RUN_ON(thread_);
+
+ if (!channel_) {
+ return;
+ }
+
+ RTC_LOG_THREAD_BLOCK_COUNT();
+
+ if (channel_) {
+ signaling_thread_safety_->SetNotAlive();
+ signaling_thread_safety_ = nullptr;
+ }
+ std::unique_ptr<cricket::ChannelInterface> channel_to_delete;
+
+ context()->network_thread()->BlockingCall([&]() {
+ if (channel_) {
+ channel_->SetFirstPacketReceivedCallback(nullptr);
+ channel_->SetRtpTransport(nullptr);
+ channel_to_delete = std::move(channel_);
+ }
+ });
+
+ RTC_DCHECK_BLOCK_COUNT_NO_MORE_THAN(1);
+ PushNewMediaChannelAndDeleteChannel(std::move(channel_to_delete));
+
+ RTC_DCHECK_BLOCK_COUNT_NO_MORE_THAN(2);
+}
+
+void RtpTransceiver::PushNewMediaChannelAndDeleteChannel(
+ std::unique_ptr<cricket::ChannelInterface> channel_to_delete) {
+ // The clumsy combination of pushing down media channel and deleting
+ // the channel is due to the desire to do both things in one Invoke().
+ if (!channel_to_delete && senders_.empty() && receivers_.empty()) {
+ return;
+ }
+ context()->worker_thread()->BlockingCall([&]() {
+ // Push down the new media_channel, if any, otherwise clear it.
+ auto* media_send_channel =
+ channel_ ? channel_->media_send_channel() : nullptr;
+ for (const auto& sender : senders_) {
+ sender->internal()->SetMediaChannel(media_send_channel);
+ }
+
+ auto* media_receive_channel =
+ channel_ ? channel_->media_receive_channel() : nullptr;
+ for (const auto& receiver : receivers_) {
+ receiver->internal()->SetMediaChannel(media_receive_channel);
+ }
+
+ // Destroy the channel, if we had one, now _after_ updating the receivers
+ // who might have had references to the previous channel.
+ if (channel_to_delete) {
+ channel_to_delete.reset(nullptr);
+ }
+ });
+}
+
+void RtpTransceiver::AddSender(
+ rtc::scoped_refptr<RtpSenderProxyWithInternal<RtpSenderInternal>> sender) {
+ RTC_DCHECK_RUN_ON(thread_);
+ RTC_DCHECK(!stopped_);
+ RTC_DCHECK(!unified_plan_);
+ RTC_DCHECK(sender);
+ RTC_DCHECK_EQ(media_type(), sender->media_type());
+ RTC_DCHECK(!absl::c_linear_search(senders_, sender));
+
+ std::vector<cricket::Codec> send_codecs =
+ media_type() == cricket::MEDIA_TYPE_VIDEO
+ ? media_engine()->video().send_codecs(false)
+ : media_engine()->voice().send_codecs();
+ sender->internal()->SetCodecPreferences(
+ codec_preferences_.empty()
+ ? send_codecs
+ : MatchCodecPreferences(codec_preferences_, send_codecs));
+ senders_.push_back(sender);
+}
+
+bool RtpTransceiver::RemoveSender(RtpSenderInterface* sender) {
+ RTC_DCHECK(!unified_plan_);
+ if (sender) {
+ RTC_DCHECK_EQ(media_type(), sender->media_type());
+ }
+ auto it = absl::c_find(senders_, sender);
+ if (it == senders_.end()) {
+ return false;
+ }
+ (*it)->internal()->Stop();
+ senders_.erase(it);
+ return true;
+}
+
+void RtpTransceiver::AddReceiver(
+ rtc::scoped_refptr<RtpReceiverProxyWithInternal<RtpReceiverInternal>>
+ receiver) {
+ RTC_DCHECK_RUN_ON(thread_);
+ RTC_DCHECK(!stopped_);
+ RTC_DCHECK(!unified_plan_);
+ RTC_DCHECK(receiver);
+ RTC_DCHECK_EQ(media_type(), receiver->media_type());
+ RTC_DCHECK(!absl::c_linear_search(receivers_, receiver));
+ receivers_.push_back(receiver);
+}
+
+bool RtpTransceiver::RemoveReceiver(RtpReceiverInterface* receiver) {
+ RTC_DCHECK_RUN_ON(thread_);
+ RTC_DCHECK(!unified_plan_);
+ if (receiver) {
+ RTC_DCHECK_EQ(media_type(), receiver->media_type());
+ }
+ auto it = absl::c_find(receivers_, receiver);
+ if (it == receivers_.end()) {
+ return false;
+ }
+
+ (*it)->internal()->Stop();
+ context()->worker_thread()->BlockingCall([&]() {
+ // `Stop()` will clear the receiver's pointer to the media channel.
+ (*it)->internal()->SetMediaChannel(nullptr);
+ });
+
+ receivers_.erase(it);
+ return true;
+}
+
+rtc::scoped_refptr<RtpSenderInternal> RtpTransceiver::sender_internal() const {
+ RTC_DCHECK(unified_plan_);
+ RTC_CHECK_EQ(1u, senders_.size());
+ return rtc::scoped_refptr<RtpSenderInternal>(senders_[0]->internal());
+}
+
+rtc::scoped_refptr<RtpReceiverInternal> RtpTransceiver::receiver_internal()
+ const {
+ RTC_DCHECK(unified_plan_);
+ RTC_CHECK_EQ(1u, receivers_.size());
+ return rtc::scoped_refptr<RtpReceiverInternal>(receivers_[0]->internal());
+}
+
+cricket::MediaType RtpTransceiver::media_type() const {
+ return media_type_;
+}
+
+absl::optional<std::string> RtpTransceiver::mid() const {
+ return mid_;
+}
+
+void RtpTransceiver::OnFirstPacketReceived() {
+ for (const auto& receiver : receivers_) {
+ receiver->internal()->NotifyFirstPacketReceived();
+ }
+}
+
+rtc::scoped_refptr<RtpSenderInterface> RtpTransceiver::sender() const {
+ RTC_DCHECK(unified_plan_);
+ RTC_CHECK_EQ(1u, senders_.size());
+ return senders_[0];
+}
+
+rtc::scoped_refptr<RtpReceiverInterface> RtpTransceiver::receiver() const {
+ RTC_DCHECK(unified_plan_);
+ RTC_CHECK_EQ(1u, receivers_.size());
+ return receivers_[0];
+}
+
+void RtpTransceiver::set_current_direction(RtpTransceiverDirection direction) {
+ RTC_LOG(LS_INFO) << "Changing transceiver (MID=" << mid_.value_or("<not set>")
+ << ") current direction from "
+ << (current_direction_ ? RtpTransceiverDirectionToString(
+ *current_direction_)
+ : "<not set>")
+ << " to " << RtpTransceiverDirectionToString(direction)
+ << ".";
+ current_direction_ = direction;
+ if (RtpTransceiverDirectionHasSend(*current_direction_)) {
+ has_ever_been_used_to_send_ = true;
+ }
+}
+
+void RtpTransceiver::set_fired_direction(
+ absl::optional<RtpTransceiverDirection> direction) {
+ fired_direction_ = direction;
+}
+
+bool RtpTransceiver::stopped() const {
+ RTC_DCHECK_RUN_ON(thread_);
+ return stopped_;
+}
+
+bool RtpTransceiver::stopping() const {
+ RTC_DCHECK_RUN_ON(thread_);
+ return stopping_;
+}
+
+RtpTransceiverDirection RtpTransceiver::direction() const {
+ if (unified_plan_ && stopping())
+ return webrtc::RtpTransceiverDirection::kStopped;
+
+ return direction_;
+}
+
+RTCError RtpTransceiver::SetDirectionWithError(
+ RtpTransceiverDirection new_direction) {
+ if (unified_plan_ && stopping()) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_STATE,
+ "Cannot set direction on a stopping transceiver.");
+ }
+ if (new_direction == direction_)
+ return RTCError::OK();
+
+ if (new_direction == RtpTransceiverDirection::kStopped) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER,
+ "The set direction 'stopped' is invalid.");
+ }
+
+ direction_ = new_direction;
+ on_negotiation_needed_();
+
+ return RTCError::OK();
+}
+
+absl::optional<RtpTransceiverDirection> RtpTransceiver::current_direction()
+ const {
+ if (unified_plan_ && stopped())
+ return webrtc::RtpTransceiverDirection::kStopped;
+
+ return current_direction_;
+}
+
+absl::optional<RtpTransceiverDirection> RtpTransceiver::fired_direction()
+ const {
+ return fired_direction_;
+}
+
+void RtpTransceiver::StopSendingAndReceiving() {
+ // 1. Let sender be transceiver.[[Sender]].
+ // 2. Let receiver be transceiver.[[Receiver]].
+ //
+ // 3. Stop sending media with sender.
+ //
+ RTC_DCHECK_RUN_ON(thread_);
+
+ // 4. Send an RTCP BYE for each RTP stream that was being sent by sender, as
+ // specified in [RFC3550].
+ for (const auto& sender : senders_)
+ sender->internal()->Stop();
+
+ // Signal to receiver sources that we're stopping.
+ for (const auto& receiver : receivers_)
+ receiver->internal()->Stop();
+
+ context()->worker_thread()->BlockingCall([&]() {
+ // 5 Stop receiving media with receiver.
+ for (const auto& receiver : receivers_)
+ receiver->internal()->SetMediaChannel(nullptr);
+ });
+
+ stopping_ = true;
+ direction_ = webrtc::RtpTransceiverDirection::kInactive;
+}
+
+RTCError RtpTransceiver::StopStandard() {
+ RTC_DCHECK_RUN_ON(thread_);
+ // If we're on Plan B, do what Stop() used to do there.
+ if (!unified_plan_) {
+ StopInternal();
+ return RTCError::OK();
+ }
+ // 1. Let transceiver be the RTCRtpTransceiver object on which the method is
+ // invoked.
+ //
+ // 2. Let connection be the RTCPeerConnection object associated with
+ // transceiver.
+ //
+ // 3. If connection.[[IsClosed]] is true, throw an InvalidStateError.
+ if (is_pc_closed_) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_STATE,
+ "PeerConnection is closed.");
+ }
+
+ // 4. If transceiver.[[Stopping]] is true, abort these steps.
+ if (stopping_)
+ return RTCError::OK();
+
+ // 5. Stop sending and receiving given transceiver, and update the
+ // negotiation-needed flag for connection.
+ StopSendingAndReceiving();
+ on_negotiation_needed_();
+
+ return RTCError::OK();
+}
+
+void RtpTransceiver::StopInternal() {
+ RTC_DCHECK_RUN_ON(thread_);
+ StopTransceiverProcedure();
+}
+
+void RtpTransceiver::StopTransceiverProcedure() {
+ RTC_DCHECK_RUN_ON(thread_);
+ // As specified in the "Stop the RTCRtpTransceiver" procedure
+ // 1. If transceiver.[[Stopping]] is false, stop sending and receiving given
+ // transceiver.
+ if (!stopping_)
+ StopSendingAndReceiving();
+
+ // 2. Set transceiver.[[Stopped]] to true.
+ stopped_ = true;
+
+ // Signal the updated change to the senders.
+ for (const auto& sender : senders_)
+ sender->internal()->SetTransceiverAsStopped();
+
+ // 3. Set transceiver.[[Receptive]] to false.
+ // 4. Set transceiver.[[CurrentDirection]] to null.
+ current_direction_ = absl::nullopt;
+}
+
+RTCError RtpTransceiver::SetCodecPreferences(
+ rtc::ArrayView<RtpCodecCapability> codec_capabilities) {
+ RTC_DCHECK(unified_plan_);
+ // 3. If codecs is an empty list, set transceiver's [[PreferredCodecs]] slot
+ // to codecs and abort these steps.
+ if (codec_capabilities.empty()) {
+ codec_preferences_.clear();
+ senders_.front()->internal()->SetCodecPreferences(
+ media_type() == cricket::MEDIA_TYPE_VIDEO
+ ? media_engine()->video().send_codecs(false)
+ : media_engine()->voice().send_codecs());
+ return RTCError::OK();
+ }
+
+ // 4. Remove any duplicate values in codecs.
+ std::vector<RtpCodecCapability> codecs;
+ absl::c_remove_copy_if(codec_capabilities, std::back_inserter(codecs),
+ [&codecs](const RtpCodecCapability& codec) {
+ return absl::c_linear_search(codecs, codec);
+ });
+
+ // 6. to 8.
+ RTCError result;
+ std::vector<cricket::Codec> recv_codecs, send_codecs;
+ if (media_type_ == cricket::MEDIA_TYPE_AUDIO) {
+ send_codecs = media_engine()->voice().send_codecs();
+ recv_codecs = media_engine()->voice().recv_codecs();
+ } else if (media_type_ == cricket::MEDIA_TYPE_VIDEO) {
+ send_codecs = media_engine()->video().send_codecs(context()->use_rtx());
+ recv_codecs = media_engine()->video().recv_codecs(context()->use_rtx());
+ }
+ result = VerifyCodecPreferences(codecs, send_codecs, recv_codecs);
+
+ if (result.ok()) {
+ senders_.front()->internal()->SetCodecPreferences(
+ MatchCodecPreferences(codecs, send_codecs));
+ codec_preferences_ = codecs;
+ }
+
+ return result;
+}
+
+std::vector<RtpHeaderExtensionCapability>
+RtpTransceiver::GetHeaderExtensionsToNegotiate() const {
+ return header_extensions_to_negotiate_;
+}
+
+std::vector<RtpHeaderExtensionCapability>
+RtpTransceiver::GetNegotiatedHeaderExtensions() const {
+ RTC_DCHECK_RUN_ON(thread_);
+ std::vector<RtpHeaderExtensionCapability> result;
+ result.reserve(header_extensions_to_negotiate_.size());
+ for (const auto& ext : header_extensions_to_negotiate_) {
+ auto negotiated = absl::c_find_if(negotiated_header_extensions_,
+ [&ext](const RtpExtension& negotiated) {
+ return negotiated.uri == ext.uri;
+ });
+ RtpHeaderExtensionCapability capability(ext.uri);
+ // TODO(bugs.webrtc.org/7477): extend when header extensions support
+ // direction.
+ capability.direction = negotiated != negotiated_header_extensions_.end()
+ ? RtpTransceiverDirection::kSendRecv
+ : RtpTransceiverDirection::kStopped;
+ result.push_back(capability);
+ }
+ return result;
+}
+
+// Helper function to determine mandatory-to-negotiate extensions.
+// See https://www.rfc-editor.org/rfc/rfc8834#name-header-extensions
+// and https://w3c.github.io/webrtc-extensions/#rtcrtptransceiver-interface
+// Since BUNDLE is offered by default, MID is mandatory and can not be turned
+// off via this API.
+bool IsMandatoryHeaderExtension(const std::string& uri) {
+ return uri == RtpExtension::kMidUri;
+}
+
+RTCError RtpTransceiver::SetHeaderExtensionsToNegotiate(
+ rtc::ArrayView<const RtpHeaderExtensionCapability> header_extensions) {
+ // https://w3c.github.io/webrtc-extensions/#dom-rtcrtptransceiver-setheaderextensionstonegotiate
+ if (header_extensions.size() != header_extensions_to_negotiate_.size()) {
+ return RTCError(RTCErrorType::INVALID_MODIFICATION,
+ "Size of extensions to negotiate does not match.");
+ }
+ // For each index i of extensions, run the following steps: ...
+ for (size_t i = 0; i < header_extensions.size(); i++) {
+ const auto& extension = header_extensions[i];
+ if (extension.uri != header_extensions_to_negotiate_[i].uri) {
+ return RTCError(RTCErrorType::INVALID_MODIFICATION,
+ "Reordering extensions is not allowed.");
+ }
+ if (IsMandatoryHeaderExtension(extension.uri) &&
+ extension.direction != RtpTransceiverDirection::kSendRecv) {
+ return RTCError(RTCErrorType::INVALID_MODIFICATION,
+ "Attempted to stop a mandatory extension.");
+ }
+
+ // TODO(bugs.webrtc.org/7477): Currently there are no recvonly extensions so
+ // this can not be checked: "When there exists header extension capabilities
+ // that have directions other than kSendRecv, restrict extension.direction
+ // as to not exceed that capability."
+ }
+
+ // Apply mutation after error checking.
+ for (size_t i = 0; i < header_extensions.size(); i++) {
+ header_extensions_to_negotiate_[i].direction =
+ header_extensions[i].direction;
+ }
+
+ return RTCError::OK();
+}
+
+void RtpTransceiver::OnNegotiationUpdate(
+ SdpType sdp_type,
+ const cricket::MediaContentDescription* content) {
+ RTC_DCHECK_RUN_ON(thread_);
+ RTC_DCHECK(content);
+ if (sdp_type == SdpType::kAnswer)
+ negotiated_header_extensions_ = content->rtp_header_extensions();
+}
+
+void RtpTransceiver::SetPeerConnectionClosed() {
+ is_pc_closed_ = true;
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/rtp_transceiver.h b/third_party/libwebrtc/pc/rtp_transceiver.h
new file mode 100644
index 0000000000..deda5d7d61
--- /dev/null
+++ b/third_party/libwebrtc/pc/rtp_transceiver.h
@@ -0,0 +1,381 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_RTP_TRANSCEIVER_H_
+#define PC_RTP_TRANSCEIVER_H_
+
+#include <stddef.h>
+
+#include <functional>
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "absl/strings/string_view.h"
+#include "absl/types/optional.h"
+#include "api/array_view.h"
+#include "api/audio_options.h"
+#include "api/crypto/crypto_options.h"
+#include "api/jsep.h"
+#include "api/media_types.h"
+#include "api/rtc_error.h"
+#include "api/rtp_parameters.h"
+#include "api/rtp_receiver_interface.h"
+#include "api/rtp_sender_interface.h"
+#include "api/rtp_transceiver_direction.h"
+#include "api/rtp_transceiver_interface.h"
+#include "api/scoped_refptr.h"
+#include "api/task_queue/pending_task_safety_flag.h"
+#include "api/task_queue/task_queue_base.h"
+#include "api/video/video_bitrate_allocator_factory.h"
+#include "media/base/media_channel.h"
+#include "media/base/media_config.h"
+#include "media/base/media_engine.h"
+#include "pc/channel_interface.h"
+#include "pc/connection_context.h"
+#include "pc/proxy.h"
+#include "pc/rtp_receiver.h"
+#include "pc/rtp_receiver_proxy.h"
+#include "pc/rtp_sender.h"
+#include "pc/rtp_sender_proxy.h"
+#include "pc/rtp_transport_internal.h"
+#include "pc/session_description.h"
+#include "rtc_base/thread_annotations.h"
+
+namespace cricket {
+class MediaEngineInterface;
+}
+
+namespace webrtc {
+
+class PeerConnectionSdpMethods;
+
+// Implementation of the public RtpTransceiverInterface.
+//
+// The RtpTransceiverInterface is only intended to be used with a PeerConnection
+// that enables Unified Plan SDP. Thus, the methods that only need to implement
+// public API features and are not used internally can assume exactly one sender
+// and receiver.
+//
+// Since the RtpTransceiver is used internally by PeerConnection for tracking
+// RtpSenders, RtpReceivers, and BaseChannels, and PeerConnection needs to be
+// backwards compatible with Plan B SDP, this implementation is more flexible
+// than that required by the WebRTC specification.
+//
+// With Plan B SDP, an RtpTransceiver can have any number of senders and
+// receivers which map to a=ssrc lines in the m= section.
+// With Unified Plan SDP, an RtpTransceiver will have exactly one sender and one
+// receiver which are encapsulated by the m= section.
+//
+// This class manages the RtpSenders, RtpReceivers, and BaseChannel associated
+// with this m= section. Since the transceiver, senders, and receivers are
+// reference counted and can be referenced from JavaScript (in Chromium), these
+// objects must be ready to live for an arbitrary amount of time. The
+// BaseChannel is not reference counted, so
+// the PeerConnection must take care of creating/deleting the BaseChannel.
+//
+// The RtpTransceiver is specialized to either audio or video according to the
+// MediaType specified in the constructor. Audio RtpTransceivers will have
+// AudioRtpSenders, AudioRtpReceivers, and a VoiceChannel. Video RtpTransceivers
+// will have VideoRtpSenders, VideoRtpReceivers, and a VideoChannel.
+class RtpTransceiver : public RtpTransceiverInterface {
+ public:
+ // Construct a Plan B-style RtpTransceiver with no senders, receivers, or
+ // channel set.
+ // `media_type` specifies the type of RtpTransceiver (and, by transitivity,
+ // the type of senders, receivers, and channel). Can either by audio or video.
+ RtpTransceiver(cricket::MediaType media_type, ConnectionContext* context);
+ // Construct a Unified Plan-style RtpTransceiver with the given sender and
+ // receiver. The media type will be derived from the media types of the sender
+ // and receiver. The sender and receiver should have the same media type.
+ // `HeaderExtensionsToNegotiate` is used for initializing the return value of
+ // HeaderExtensionsToNegotiate().
+ RtpTransceiver(
+ rtc::scoped_refptr<RtpSenderProxyWithInternal<RtpSenderInternal>> sender,
+ rtc::scoped_refptr<RtpReceiverProxyWithInternal<RtpReceiverInternal>>
+ receiver,
+ ConnectionContext* context,
+ std::vector<RtpHeaderExtensionCapability> HeaderExtensionsToNegotiate,
+ std::function<void()> on_negotiation_needed);
+ ~RtpTransceiver() override;
+
+ // Not copyable or movable.
+ RtpTransceiver(const RtpTransceiver&) = delete;
+ RtpTransceiver& operator=(const RtpTransceiver&) = delete;
+ RtpTransceiver(RtpTransceiver&&) = delete;
+ RtpTransceiver& operator=(RtpTransceiver&&) = delete;
+
+ // Returns the Voice/VideoChannel set for this transceiver. May be null if
+ // the transceiver is not in the currently set local/remote description.
+ cricket::ChannelInterface* channel() const { return channel_.get(); }
+
+ // Creates the Voice/VideoChannel and sets it.
+ RTCError CreateChannel(
+ absl::string_view mid,
+ Call* call_ptr,
+ const cricket::MediaConfig& media_config,
+ bool srtp_required,
+ CryptoOptions crypto_options,
+ const cricket::AudioOptions& audio_options,
+ const cricket::VideoOptions& video_options,
+ VideoBitrateAllocatorFactory* video_bitrate_allocator_factory,
+ std::function<RtpTransportInternal*(absl::string_view)> transport_lookup);
+
+ // Sets the Voice/VideoChannel. The caller must pass in the correct channel
+ // implementation based on the type of the transceiver. The call must
+ // furthermore be made on the signaling thread.
+ //
+ // `channel`: The channel instance to be associated with the transceiver.
+ // This must be a valid pointer.
+ // The state of the object
+ // is expected to be newly constructed and not initalized for network
+ // activity (see next parameter for more).
+ //
+ // The transceiver takes ownership of `channel`.
+ //
+ // `transport_lookup`: This
+ // callback function will be used to look up the `RtpTransport` object
+ // to associate with the channel via `BaseChannel::SetRtpTransport`.
+ // The lookup function will be called on the network thread, synchronously
+ // during the call to `SetChannel`. This means that the caller of
+ // `SetChannel()` may provide a callback function that references state
+ // that exists within the calling scope of SetChannel (e.g. a variable
+ // on the stack).
+ // The reason for this design is to limit the number of times we jump
+ // synchronously to the network thread from the signaling thread.
+ // The callback allows us to combine the transport lookup with network
+ // state initialization of the channel object.
+ // ClearChannel() must be used before calling SetChannel() again.
+ void SetChannel(std::unique_ptr<cricket::ChannelInterface> channel,
+ std::function<RtpTransportInternal*(const std::string&)>
+ transport_lookup);
+
+ // Clear the association between the transceiver and the channel.
+ void ClearChannel();
+
+ // Adds an RtpSender of the appropriate type to be owned by this transceiver.
+ // Must not be null.
+ void AddSender(
+ rtc::scoped_refptr<RtpSenderProxyWithInternal<RtpSenderInternal>> sender);
+
+ // Removes the given RtpSender. Returns false if the sender is not owned by
+ // this transceiver.
+ bool RemoveSender(RtpSenderInterface* sender);
+
+ // Returns a vector of the senders owned by this transceiver.
+ std::vector<rtc::scoped_refptr<RtpSenderProxyWithInternal<RtpSenderInternal>>>
+ senders() const {
+ return senders_;
+ }
+
+ // Adds an RtpReceiver of the appropriate type to be owned by this
+ // transceiver. Must not be null.
+ void AddReceiver(
+ rtc::scoped_refptr<RtpReceiverProxyWithInternal<RtpReceiverInternal>>
+ receiver);
+
+ // Removes the given RtpReceiver. Returns false if the sender is not owned by
+ // this transceiver.
+ bool RemoveReceiver(RtpReceiverInterface* receiver);
+
+ // Returns a vector of the receivers owned by this transceiver.
+ std::vector<
+ rtc::scoped_refptr<RtpReceiverProxyWithInternal<RtpReceiverInternal>>>
+ receivers() const {
+ return receivers_;
+ }
+
+ // Returns the backing object for the transceiver's Unified Plan sender.
+ rtc::scoped_refptr<RtpSenderInternal> sender_internal() const;
+
+ // Returns the backing object for the transceiver's Unified Plan receiver.
+ rtc::scoped_refptr<RtpReceiverInternal> receiver_internal() const;
+
+ // RtpTransceivers are not associated until they have a corresponding media
+ // section set in SetLocalDescription or SetRemoteDescription. Therefore,
+ // when setting a local offer we need a way to remember which transceiver was
+ // used to create which media section in the offer. Storing the mline index
+ // in CreateOffer is specified in JSEP to allow us to do that.
+ absl::optional<size_t> mline_index() const { return mline_index_; }
+ void set_mline_index(absl::optional<size_t> mline_index) {
+ mline_index_ = mline_index;
+ }
+
+ // Sets the MID for this transceiver. If the MID is not null, then the
+ // transceiver is considered "associated" with the media section that has the
+ // same MID.
+ void set_mid(const absl::optional<std::string>& mid) { mid_ = mid; }
+
+ // Sets the intended direction for this transceiver. Intended to be used
+ // internally over SetDirection since this does not trigger a negotiation
+ // needed callback.
+ void set_direction(RtpTransceiverDirection direction) {
+ direction_ = direction;
+ }
+
+ // Sets the current direction for this transceiver as negotiated in an offer/
+ // answer exchange. The current direction is null before an answer with this
+ // transceiver has been set.
+ void set_current_direction(RtpTransceiverDirection direction);
+
+ // Sets the fired direction for this transceiver. The fired direction is null
+ // until SetRemoteDescription is called or an answer is set (either local or
+ // remote) after which the only valid reason to go back to null is rollback.
+ void set_fired_direction(absl::optional<RtpTransceiverDirection> direction);
+
+ // According to JSEP rules for SetRemoteDescription, RtpTransceivers can be
+ // reused only if they were added by AddTrack.
+ void set_created_by_addtrack(bool created_by_addtrack) {
+ created_by_addtrack_ = created_by_addtrack;
+ }
+ // If AddTrack has been called then transceiver can't be removed during
+ // rollback.
+ void set_reused_for_addtrack(bool reused_for_addtrack) {
+ reused_for_addtrack_ = reused_for_addtrack;
+ }
+
+ bool created_by_addtrack() const { return created_by_addtrack_; }
+
+ bool reused_for_addtrack() const { return reused_for_addtrack_; }
+
+ // Returns true if this transceiver has ever had the current direction set to
+ // sendonly or sendrecv.
+ bool has_ever_been_used_to_send() const {
+ return has_ever_been_used_to_send_;
+ }
+
+ // Informs the transceiver that its owning
+ // PeerConnection is closed.
+ void SetPeerConnectionClosed();
+
+ // Executes the "stop the RTCRtpTransceiver" procedure from
+ // the webrtc-pc specification, described under the stop() method.
+ void StopTransceiverProcedure();
+
+ // RtpTransceiverInterface implementation.
+ cricket::MediaType media_type() const override;
+ absl::optional<std::string> mid() const override;
+ rtc::scoped_refptr<RtpSenderInterface> sender() const override;
+ rtc::scoped_refptr<RtpReceiverInterface> receiver() const override;
+ bool stopped() const override;
+ bool stopping() const override;
+ RtpTransceiverDirection direction() const override;
+ RTCError SetDirectionWithError(
+ RtpTransceiverDirection new_direction) override;
+ absl::optional<RtpTransceiverDirection> current_direction() const override;
+ absl::optional<RtpTransceiverDirection> fired_direction() const override;
+ RTCError StopStandard() override;
+ void StopInternal() override;
+ RTCError SetCodecPreferences(
+ rtc::ArrayView<RtpCodecCapability> codecs) override;
+ std::vector<RtpCodecCapability> codec_preferences() const override {
+ return codec_preferences_;
+ }
+ std::vector<RtpHeaderExtensionCapability> GetHeaderExtensionsToNegotiate()
+ const override;
+ std::vector<RtpHeaderExtensionCapability> GetNegotiatedHeaderExtensions()
+ const override;
+ RTCError SetHeaderExtensionsToNegotiate(
+ rtc::ArrayView<const RtpHeaderExtensionCapability> header_extensions)
+ override;
+
+ // Called on the signaling thread when the local or remote content description
+ // is updated. Used to update the negotiated header extensions.
+ // TODO(tommi): The implementation of this method is currently very simple and
+ // only used for updating the negotiated headers. However, we're planning to
+ // move all the updates done on the channel from the transceiver into this
+ // method. This will happen with the ownership of the channel object being
+ // moved into the transceiver.
+ void OnNegotiationUpdate(SdpType sdp_type,
+ const cricket::MediaContentDescription* content);
+
+ private:
+ cricket::MediaEngineInterface* media_engine() const {
+ return context_->media_engine();
+ }
+ ConnectionContext* context() const { return context_; }
+ void OnFirstPacketReceived();
+ void StopSendingAndReceiving();
+ // Delete a channel, and ensure that references to its media channel
+ // are updated before deleting it.
+ void PushNewMediaChannelAndDeleteChannel(
+ std::unique_ptr<cricket::ChannelInterface> channel_to_delete);
+
+ // Enforce that this object is created, used and destroyed on one thread.
+ TaskQueueBase* const thread_;
+ const bool unified_plan_;
+ const cricket::MediaType media_type_;
+ rtc::scoped_refptr<PendingTaskSafetyFlag> signaling_thread_safety_;
+ std::vector<rtc::scoped_refptr<RtpSenderProxyWithInternal<RtpSenderInternal>>>
+ senders_;
+ std::vector<
+ rtc::scoped_refptr<RtpReceiverProxyWithInternal<RtpReceiverInternal>>>
+ receivers_;
+
+ bool stopped_ RTC_GUARDED_BY(thread_) = false;
+ bool stopping_ RTC_GUARDED_BY(thread_) = false;
+ bool is_pc_closed_ = false;
+ RtpTransceiverDirection direction_ = RtpTransceiverDirection::kInactive;
+ absl::optional<RtpTransceiverDirection> current_direction_;
+ absl::optional<RtpTransceiverDirection> fired_direction_;
+ absl::optional<std::string> mid_;
+ absl::optional<size_t> mline_index_;
+ bool created_by_addtrack_ = false;
+ bool reused_for_addtrack_ = false;
+ bool has_ever_been_used_to_send_ = false;
+
+ // Accessed on both thread_ and the network thread. Considered safe
+ // because all access on the network thread is within an invoke()
+ // from thread_.
+ std::unique_ptr<cricket::ChannelInterface> channel_ = nullptr;
+ ConnectionContext* const context_;
+ std::vector<RtpCodecCapability> codec_preferences_;
+ std::vector<RtpHeaderExtensionCapability> header_extensions_to_negotiate_;
+
+ // `negotiated_header_extensions_` is read and written to on the signaling
+ // thread from the SdpOfferAnswerHandler class (e.g.
+ // PushdownMediaDescription().
+ cricket::RtpHeaderExtensions negotiated_header_extensions_
+ RTC_GUARDED_BY(thread_);
+
+ const std::function<void()> on_negotiation_needed_;
+};
+
+BEGIN_PRIMARY_PROXY_MAP(RtpTransceiver)
+
+PROXY_PRIMARY_THREAD_DESTRUCTOR()
+BYPASS_PROXY_CONSTMETHOD0(cricket::MediaType, media_type)
+PROXY_CONSTMETHOD0(absl::optional<std::string>, mid)
+PROXY_CONSTMETHOD0(rtc::scoped_refptr<RtpSenderInterface>, sender)
+PROXY_CONSTMETHOD0(rtc::scoped_refptr<RtpReceiverInterface>, receiver)
+PROXY_CONSTMETHOD0(bool, stopped)
+PROXY_CONSTMETHOD0(bool, stopping)
+PROXY_CONSTMETHOD0(RtpTransceiverDirection, direction)
+PROXY_METHOD1(webrtc::RTCError, SetDirectionWithError, RtpTransceiverDirection)
+PROXY_CONSTMETHOD0(absl::optional<RtpTransceiverDirection>, current_direction)
+PROXY_CONSTMETHOD0(absl::optional<RtpTransceiverDirection>, fired_direction)
+PROXY_METHOD0(webrtc::RTCError, StopStandard)
+PROXY_METHOD0(void, StopInternal)
+PROXY_METHOD1(webrtc::RTCError,
+ SetCodecPreferences,
+ rtc::ArrayView<RtpCodecCapability>)
+PROXY_CONSTMETHOD0(std::vector<RtpCodecCapability>, codec_preferences)
+PROXY_CONSTMETHOD0(std::vector<RtpHeaderExtensionCapability>,
+ GetHeaderExtensionsToNegotiate)
+PROXY_CONSTMETHOD0(std::vector<RtpHeaderExtensionCapability>,
+ GetNegotiatedHeaderExtensions)
+PROXY_METHOD1(webrtc::RTCError,
+ SetHeaderExtensionsToNegotiate,
+ rtc::ArrayView<const RtpHeaderExtensionCapability>)
+END_PROXY_MAP(RtpTransceiver)
+
+} // namespace webrtc
+
+#endif // PC_RTP_TRANSCEIVER_H_
diff --git a/third_party/libwebrtc/pc/rtp_transceiver_unittest.cc b/third_party/libwebrtc/pc/rtp_transceiver_unittest.cc
new file mode 100644
index 0000000000..8b4a2389ce
--- /dev/null
+++ b/third_party/libwebrtc/pc/rtp_transceiver_unittest.cc
@@ -0,0 +1,485 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This file contains tests for `RtpTransceiver`.
+
+#include "pc/rtp_transceiver.h"
+
+#include <memory>
+#include <utility>
+
+#include "absl/strings/string_view.h"
+#include "absl/types/optional.h"
+#include "api/peer_connection_interface.h"
+#include "api/rtp_parameters.h"
+#include "media/base/fake_media_engine.h"
+#include "media/base/media_engine.h"
+#include "pc/test/mock_channel_interface.h"
+#include "pc/test/mock_rtp_receiver_internal.h"
+#include "pc/test/mock_rtp_sender_internal.h"
+#include "rtc_base/thread.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+
+using ::testing::_;
+using ::testing::ElementsAre;
+using ::testing::Field;
+using ::testing::Optional;
+using ::testing::Property;
+using ::testing::Return;
+using ::testing::ReturnRef;
+
+namespace webrtc {
+
+namespace {
+
+class RtpTransceiverTest : public testing::Test {
+ public:
+ RtpTransceiverTest()
+ : dependencies_(MakeDependencies()),
+ context_(ConnectionContext::Create(&dependencies_)) {}
+
+ protected:
+ cricket::MediaEngineInterface* media_engine() {
+ return context_->media_engine();
+ }
+ ConnectionContext* context() { return context_.get(); }
+
+ private:
+ rtc::AutoThread main_thread_;
+
+ static PeerConnectionFactoryDependencies MakeDependencies() {
+ PeerConnectionFactoryDependencies d;
+ d.network_thread = rtc::Thread::Current();
+ d.worker_thread = rtc::Thread::Current();
+ d.signaling_thread = rtc::Thread::Current();
+ d.media_engine = std::make_unique<cricket::FakeMediaEngine>();
+ return d;
+ }
+
+ PeerConnectionFactoryDependencies dependencies_;
+ rtc::scoped_refptr<ConnectionContext> context_;
+};
+
+// Checks that a channel cannot be set on a stopped `RtpTransceiver`.
+TEST_F(RtpTransceiverTest, CannotSetChannelOnStoppedTransceiver) {
+ const std::string content_name("my_mid");
+ auto transceiver = rtc::make_ref_counted<RtpTransceiver>(
+ cricket::MediaType::MEDIA_TYPE_AUDIO, context());
+ auto channel1 = std::make_unique<cricket::MockChannelInterface>();
+ EXPECT_CALL(*channel1, media_type())
+ .WillRepeatedly(Return(cricket::MediaType::MEDIA_TYPE_AUDIO));
+ EXPECT_CALL(*channel1, mid()).WillRepeatedly(ReturnRef(content_name));
+ EXPECT_CALL(*channel1, SetFirstPacketReceivedCallback(_));
+ EXPECT_CALL(*channel1, SetRtpTransport(_)).WillRepeatedly(Return(true));
+ auto channel1_ptr = channel1.get();
+ transceiver->SetChannel(std::move(channel1), [&](const std::string& mid) {
+ EXPECT_EQ(mid, content_name);
+ return nullptr;
+ });
+ EXPECT_EQ(channel1_ptr, transceiver->channel());
+
+ // Stop the transceiver.
+ transceiver->StopInternal();
+ EXPECT_EQ(channel1_ptr, transceiver->channel());
+
+ auto channel2 = std::make_unique<cricket::MockChannelInterface>();
+ EXPECT_CALL(*channel2, media_type())
+ .WillRepeatedly(Return(cricket::MediaType::MEDIA_TYPE_AUDIO));
+
+ // Clear the current channel - required to allow SetChannel()
+ EXPECT_CALL(*channel1_ptr, SetFirstPacketReceivedCallback(_));
+ transceiver->ClearChannel();
+ // Channel can no longer be set, so this call should be a no-op.
+ transceiver->SetChannel(std::move(channel2),
+ [](const std::string&) { return nullptr; });
+ EXPECT_EQ(nullptr, transceiver->channel());
+}
+
+// Checks that a channel can be unset on a stopped `RtpTransceiver`
+TEST_F(RtpTransceiverTest, CanUnsetChannelOnStoppedTransceiver) {
+ const std::string content_name("my_mid");
+ auto transceiver = rtc::make_ref_counted<RtpTransceiver>(
+ cricket::MediaType::MEDIA_TYPE_VIDEO, context());
+ auto channel = std::make_unique<cricket::MockChannelInterface>();
+ EXPECT_CALL(*channel, media_type())
+ .WillRepeatedly(Return(cricket::MediaType::MEDIA_TYPE_VIDEO));
+ EXPECT_CALL(*channel, mid()).WillRepeatedly(ReturnRef(content_name));
+ EXPECT_CALL(*channel, SetFirstPacketReceivedCallback(_))
+ .WillRepeatedly(testing::Return());
+ EXPECT_CALL(*channel, SetRtpTransport(_)).WillRepeatedly(Return(true));
+
+ auto channel_ptr = channel.get();
+ transceiver->SetChannel(std::move(channel), [&](const std::string& mid) {
+ EXPECT_EQ(mid, content_name);
+ return nullptr;
+ });
+ EXPECT_EQ(channel_ptr, transceiver->channel());
+
+ // Stop the transceiver.
+ transceiver->StopInternal();
+ EXPECT_EQ(channel_ptr, transceiver->channel());
+
+ // Set the channel to `nullptr`.
+ transceiver->ClearChannel();
+ EXPECT_EQ(nullptr, transceiver->channel());
+}
+
+class RtpTransceiverUnifiedPlanTest : public RtpTransceiverTest {
+ public:
+ RtpTransceiverUnifiedPlanTest()
+ : transceiver_(rtc::make_ref_counted<RtpTransceiver>(
+ RtpSenderProxyWithInternal<RtpSenderInternal>::Create(
+ rtc::Thread::Current(),
+ sender_),
+ RtpReceiverProxyWithInternal<RtpReceiverInternal>::Create(
+ rtc::Thread::Current(),
+ rtc::Thread::Current(),
+ receiver_),
+ context(),
+ media_engine()->voice().GetRtpHeaderExtensions(),
+ /* on_negotiation_needed= */ [] {})) {}
+
+ static rtc::scoped_refptr<MockRtpReceiverInternal> MockReceiver() {
+ auto receiver = rtc::make_ref_counted<MockRtpReceiverInternal>();
+ EXPECT_CALL(*receiver.get(), media_type())
+ .WillRepeatedly(Return(cricket::MediaType::MEDIA_TYPE_AUDIO));
+ return receiver;
+ }
+
+ static rtc::scoped_refptr<MockRtpSenderInternal> MockSender() {
+ auto sender = rtc::make_ref_counted<MockRtpSenderInternal>();
+ EXPECT_CALL(*sender.get(), media_type())
+ .WillRepeatedly(Return(cricket::MediaType::MEDIA_TYPE_AUDIO));
+ return sender;
+ }
+
+ rtc::AutoThread main_thread_;
+ rtc::scoped_refptr<MockRtpReceiverInternal> receiver_ = MockReceiver();
+ rtc::scoped_refptr<MockRtpSenderInternal> sender_ = MockSender();
+ rtc::scoped_refptr<RtpTransceiver> transceiver_;
+};
+
+// Basic tests for Stop()
+TEST_F(RtpTransceiverUnifiedPlanTest, StopSetsDirection) {
+ EXPECT_CALL(*receiver_.get(), Stop());
+ EXPECT_CALL(*receiver_.get(), SetMediaChannel(_));
+ EXPECT_CALL(*sender_.get(), SetTransceiverAsStopped());
+ EXPECT_CALL(*sender_.get(), Stop());
+
+ EXPECT_EQ(RtpTransceiverDirection::kInactive, transceiver_->direction());
+ EXPECT_FALSE(transceiver_->current_direction());
+ transceiver_->StopStandard();
+ EXPECT_EQ(RtpTransceiverDirection::kStopped, transceiver_->direction());
+ EXPECT_FALSE(transceiver_->current_direction());
+ transceiver_->StopTransceiverProcedure();
+ EXPECT_TRUE(transceiver_->current_direction());
+ EXPECT_EQ(RtpTransceiverDirection::kStopped, transceiver_->direction());
+ EXPECT_EQ(RtpTransceiverDirection::kStopped,
+ *transceiver_->current_direction());
+}
+
+class RtpTransceiverTestForHeaderExtensions : public RtpTransceiverTest {
+ public:
+ RtpTransceiverTestForHeaderExtensions()
+ : extensions_(
+ {RtpHeaderExtensionCapability("uri1",
+ 1,
+ RtpTransceiverDirection::kSendOnly),
+ RtpHeaderExtensionCapability("uri2",
+ 2,
+ RtpTransceiverDirection::kRecvOnly),
+ RtpHeaderExtensionCapability(RtpExtension::kMidUri,
+ 3,
+ RtpTransceiverDirection::kSendRecv),
+ RtpHeaderExtensionCapability(RtpExtension::kVideoRotationUri,
+ 4,
+ RtpTransceiverDirection::kSendRecv)}),
+ transceiver_(rtc::make_ref_counted<RtpTransceiver>(
+ RtpSenderProxyWithInternal<RtpSenderInternal>::Create(
+ rtc::Thread::Current(),
+ sender_),
+ RtpReceiverProxyWithInternal<RtpReceiverInternal>::Create(
+ rtc::Thread::Current(),
+ rtc::Thread::Current(),
+ receiver_),
+ context(),
+ extensions_,
+ /* on_negotiation_needed= */ [] {})) {}
+
+ static rtc::scoped_refptr<MockRtpReceiverInternal> MockReceiver() {
+ auto receiver = rtc::make_ref_counted<MockRtpReceiverInternal>();
+ EXPECT_CALL(*receiver.get(), media_type())
+ .WillRepeatedly(Return(cricket::MediaType::MEDIA_TYPE_AUDIO));
+ return receiver;
+ }
+
+ static rtc::scoped_refptr<MockRtpSenderInternal> MockSender() {
+ auto sender = rtc::make_ref_counted<MockRtpSenderInternal>();
+ EXPECT_CALL(*sender.get(), media_type())
+ .WillRepeatedly(Return(cricket::MediaType::MEDIA_TYPE_AUDIO));
+ return sender;
+ }
+
+ void ClearChannel() {
+ EXPECT_CALL(*sender_.get(), SetMediaChannel(_));
+ transceiver_->ClearChannel();
+ }
+
+ rtc::AutoThread main_thread_;
+ rtc::scoped_refptr<MockRtpReceiverInternal> receiver_ = MockReceiver();
+ rtc::scoped_refptr<MockRtpSenderInternal> sender_ = MockSender();
+
+ std::vector<RtpHeaderExtensionCapability> extensions_;
+ rtc::scoped_refptr<RtpTransceiver> transceiver_;
+};
+
+TEST_F(RtpTransceiverTestForHeaderExtensions, OffersChannelManagerList) {
+ EXPECT_CALL(*receiver_.get(), Stop());
+ EXPECT_CALL(*receiver_.get(), SetMediaChannel(_));
+ EXPECT_CALL(*sender_.get(), SetTransceiverAsStopped());
+ EXPECT_CALL(*sender_.get(), Stop());
+
+ EXPECT_EQ(transceiver_->GetHeaderExtensionsToNegotiate(), extensions_);
+}
+
+TEST_F(RtpTransceiverTestForHeaderExtensions, ModifiesDirection) {
+ EXPECT_CALL(*receiver_.get(), Stop());
+ EXPECT_CALL(*receiver_.get(), SetMediaChannel(_));
+ EXPECT_CALL(*sender_.get(), SetTransceiverAsStopped());
+ EXPECT_CALL(*sender_.get(), Stop());
+
+ auto modified_extensions = extensions_;
+ modified_extensions[0].direction = RtpTransceiverDirection::kSendOnly;
+ EXPECT_TRUE(
+ transceiver_->SetHeaderExtensionsToNegotiate(modified_extensions).ok());
+ EXPECT_EQ(transceiver_->GetHeaderExtensionsToNegotiate(),
+ modified_extensions);
+ modified_extensions[0].direction = RtpTransceiverDirection::kRecvOnly;
+ EXPECT_TRUE(
+ transceiver_->SetHeaderExtensionsToNegotiate(modified_extensions).ok());
+ EXPECT_EQ(transceiver_->GetHeaderExtensionsToNegotiate(),
+ modified_extensions);
+ modified_extensions[0].direction = RtpTransceiverDirection::kSendRecv;
+ EXPECT_TRUE(
+ transceiver_->SetHeaderExtensionsToNegotiate(modified_extensions).ok());
+ EXPECT_EQ(transceiver_->GetHeaderExtensionsToNegotiate(),
+ modified_extensions);
+ modified_extensions[0].direction = RtpTransceiverDirection::kInactive;
+ EXPECT_TRUE(
+ transceiver_->SetHeaderExtensionsToNegotiate(modified_extensions).ok());
+ EXPECT_EQ(transceiver_->GetHeaderExtensionsToNegotiate(),
+ modified_extensions);
+}
+
+TEST_F(RtpTransceiverTestForHeaderExtensions, AcceptsStoppedExtension) {
+ EXPECT_CALL(*receiver_.get(), Stop());
+ EXPECT_CALL(*receiver_.get(), SetMediaChannel(_));
+ EXPECT_CALL(*sender_.get(), SetTransceiverAsStopped());
+ EXPECT_CALL(*sender_.get(), Stop());
+
+ auto modified_extensions = extensions_;
+ modified_extensions[0].direction = RtpTransceiverDirection::kStopped;
+ EXPECT_TRUE(
+ transceiver_->SetHeaderExtensionsToNegotiate(modified_extensions).ok());
+ EXPECT_EQ(transceiver_->GetHeaderExtensionsToNegotiate(),
+ modified_extensions);
+}
+
+TEST_F(RtpTransceiverTestForHeaderExtensions, RejectsDifferentSize) {
+ EXPECT_CALL(*receiver_.get(), Stop());
+ EXPECT_CALL(*receiver_.get(), SetMediaChannel(_));
+ EXPECT_CALL(*sender_.get(), SetTransceiverAsStopped());
+ EXPECT_CALL(*sender_.get(), Stop());
+
+ auto modified_extensions = extensions_;
+ modified_extensions.pop_back();
+
+ EXPECT_THAT(transceiver_->SetHeaderExtensionsToNegotiate(modified_extensions),
+ Property(&RTCError::type, RTCErrorType::INVALID_MODIFICATION));
+ EXPECT_EQ(transceiver_->GetHeaderExtensionsToNegotiate(), extensions_);
+}
+
+TEST_F(RtpTransceiverTestForHeaderExtensions, RejectsChangedUri) {
+ EXPECT_CALL(*receiver_.get(), Stop());
+ EXPECT_CALL(*receiver_.get(), SetMediaChannel(_));
+ EXPECT_CALL(*sender_.get(), SetTransceiverAsStopped());
+ EXPECT_CALL(*sender_.get(), Stop());
+
+ auto modified_extensions = extensions_;
+ ASSERT_TRUE(!modified_extensions.empty());
+ modified_extensions[0].uri = "http://webrtc.org";
+
+ EXPECT_THAT(transceiver_->SetHeaderExtensionsToNegotiate(modified_extensions),
+ Property(&RTCError::type, RTCErrorType::INVALID_MODIFICATION));
+ EXPECT_EQ(transceiver_->GetHeaderExtensionsToNegotiate(), extensions_);
+}
+
+TEST_F(RtpTransceiverTestForHeaderExtensions, RejectsReorder) {
+ EXPECT_CALL(*receiver_.get(), Stop());
+ EXPECT_CALL(*receiver_.get(), SetMediaChannel(_));
+ EXPECT_CALL(*sender_.get(), SetTransceiverAsStopped());
+ EXPECT_CALL(*sender_.get(), Stop());
+
+ auto modified_extensions = extensions_;
+ ASSERT_GE(modified_extensions.size(), 2u);
+ std::swap(modified_extensions[0], modified_extensions[1]);
+
+ EXPECT_THAT(transceiver_->SetHeaderExtensionsToNegotiate(modified_extensions),
+ Property(&RTCError::type, RTCErrorType::INVALID_MODIFICATION));
+ EXPECT_EQ(transceiver_->GetHeaderExtensionsToNegotiate(), extensions_);
+}
+
+TEST_F(RtpTransceiverTestForHeaderExtensions,
+ RejectsStoppedMandatoryExtensions) {
+ EXPECT_CALL(*receiver_.get(), Stop());
+ EXPECT_CALL(*receiver_.get(), SetMediaChannel(_));
+ EXPECT_CALL(*sender_.get(), SetTransceiverAsStopped());
+ EXPECT_CALL(*sender_.get(), Stop());
+
+ std::vector<RtpHeaderExtensionCapability> modified_extensions = extensions_;
+ // Attempting to stop the mandatory MID extension.
+ modified_extensions[2].direction = RtpTransceiverDirection::kStopped;
+ EXPECT_THAT(transceiver_->SetHeaderExtensionsToNegotiate(modified_extensions),
+ Property(&RTCError::type, RTCErrorType::INVALID_MODIFICATION));
+ EXPECT_EQ(transceiver_->GetHeaderExtensionsToNegotiate(), extensions_);
+}
+
+TEST_F(RtpTransceiverTestForHeaderExtensions,
+ NoNegotiatedHdrExtsWithoutChannel) {
+ EXPECT_CALL(*receiver_.get(), Stop());
+ EXPECT_CALL(*receiver_.get(), SetMediaChannel(_));
+ EXPECT_CALL(*sender_.get(), SetTransceiverAsStopped());
+ EXPECT_CALL(*sender_.get(), Stop());
+ EXPECT_THAT(transceiver_->GetNegotiatedHeaderExtensions(),
+ ElementsAre(Field(&RtpHeaderExtensionCapability::direction,
+ RtpTransceiverDirection::kStopped),
+ Field(&RtpHeaderExtensionCapability::direction,
+ RtpTransceiverDirection::kStopped),
+ Field(&RtpHeaderExtensionCapability::direction,
+ RtpTransceiverDirection::kStopped),
+ Field(&RtpHeaderExtensionCapability::direction,
+ RtpTransceiverDirection::kStopped)));
+}
+
+TEST_F(RtpTransceiverTestForHeaderExtensions,
+ NoNegotiatedHdrExtsWithChannelWithoutNegotiation) {
+ const std::string content_name("my_mid");
+ EXPECT_CALL(*receiver_.get(), SetMediaChannel(_)).WillRepeatedly(Return());
+ EXPECT_CALL(*receiver_.get(), Stop()).WillRepeatedly(Return());
+ EXPECT_CALL(*sender_.get(), SetMediaChannel(_));
+ EXPECT_CALL(*sender_.get(), SetTransceiverAsStopped());
+ EXPECT_CALL(*sender_.get(), Stop());
+ auto mock_channel = std::make_unique<cricket::MockChannelInterface>();
+ auto mock_channel_ptr = mock_channel.get();
+ EXPECT_CALL(*mock_channel, SetFirstPacketReceivedCallback(_));
+ EXPECT_CALL(*mock_channel, media_type())
+ .WillRepeatedly(Return(cricket::MediaType::MEDIA_TYPE_AUDIO));
+ EXPECT_CALL(*mock_channel, voice_media_send_channel())
+ .WillRepeatedly(Return(nullptr));
+ EXPECT_CALL(*mock_channel, mid()).WillRepeatedly(ReturnRef(content_name));
+ EXPECT_CALL(*mock_channel, SetRtpTransport(_)).WillRepeatedly(Return(true));
+ transceiver_->SetChannel(std::move(mock_channel),
+ [](const std::string&) { return nullptr; });
+ EXPECT_THAT(transceiver_->GetNegotiatedHeaderExtensions(),
+ ElementsAre(Field(&RtpHeaderExtensionCapability::direction,
+ RtpTransceiverDirection::kStopped),
+ Field(&RtpHeaderExtensionCapability::direction,
+ RtpTransceiverDirection::kStopped),
+ Field(&RtpHeaderExtensionCapability::direction,
+ RtpTransceiverDirection::kStopped),
+ Field(&RtpHeaderExtensionCapability::direction,
+ RtpTransceiverDirection::kStopped)));
+
+ EXPECT_CALL(*mock_channel_ptr, SetFirstPacketReceivedCallback(_));
+ ClearChannel();
+}
+
+TEST_F(RtpTransceiverTestForHeaderExtensions, ReturnsNegotiatedHdrExts) {
+ const std::string content_name("my_mid");
+ EXPECT_CALL(*receiver_.get(), SetMediaChannel(_)).WillRepeatedly(Return());
+ EXPECT_CALL(*receiver_.get(), Stop()).WillRepeatedly(Return());
+ EXPECT_CALL(*sender_.get(), SetMediaChannel(_));
+ EXPECT_CALL(*sender_.get(), SetTransceiverAsStopped());
+ EXPECT_CALL(*sender_.get(), Stop());
+
+ auto mock_channel = std::make_unique<cricket::MockChannelInterface>();
+ auto mock_channel_ptr = mock_channel.get();
+ EXPECT_CALL(*mock_channel, SetFirstPacketReceivedCallback(_));
+ EXPECT_CALL(*mock_channel, media_type())
+ .WillRepeatedly(Return(cricket::MediaType::MEDIA_TYPE_AUDIO));
+ EXPECT_CALL(*mock_channel, voice_media_send_channel())
+ .WillRepeatedly(Return(nullptr));
+ EXPECT_CALL(*mock_channel, mid()).WillRepeatedly(ReturnRef(content_name));
+ EXPECT_CALL(*mock_channel, SetRtpTransport(_)).WillRepeatedly(Return(true));
+
+ cricket::RtpHeaderExtensions extensions = {webrtc::RtpExtension("uri1", 1),
+ webrtc::RtpExtension("uri2", 2)};
+ cricket::AudioContentDescription description;
+ description.set_rtp_header_extensions(extensions);
+ transceiver_->OnNegotiationUpdate(SdpType::kAnswer, &description);
+
+ transceiver_->SetChannel(std::move(mock_channel),
+ [](const std::string&) { return nullptr; });
+
+ EXPECT_THAT(transceiver_->GetNegotiatedHeaderExtensions(),
+ ElementsAre(Field(&RtpHeaderExtensionCapability::direction,
+ RtpTransceiverDirection::kSendRecv),
+ Field(&RtpHeaderExtensionCapability::direction,
+ RtpTransceiverDirection::kSendRecv),
+ Field(&RtpHeaderExtensionCapability::direction,
+ RtpTransceiverDirection::kStopped),
+ Field(&RtpHeaderExtensionCapability::direction,
+ RtpTransceiverDirection::kStopped)));
+ EXPECT_CALL(*mock_channel_ptr, SetFirstPacketReceivedCallback(_));
+ ClearChannel();
+}
+
+TEST_F(RtpTransceiverTestForHeaderExtensions,
+ ReturnsNegotiatedHdrExtsSecondTime) {
+ EXPECT_CALL(*receiver_.get(), Stop());
+ EXPECT_CALL(*receiver_.get(), SetMediaChannel(_));
+ EXPECT_CALL(*sender_.get(), SetTransceiverAsStopped());
+ EXPECT_CALL(*sender_.get(), Stop());
+
+ cricket::RtpHeaderExtensions extensions = {webrtc::RtpExtension("uri1", 1),
+ webrtc::RtpExtension("uri2", 2)};
+ cricket::AudioContentDescription description;
+ description.set_rtp_header_extensions(extensions);
+ transceiver_->OnNegotiationUpdate(SdpType::kAnswer, &description);
+
+ EXPECT_THAT(transceiver_->GetNegotiatedHeaderExtensions(),
+ ElementsAre(Field(&RtpHeaderExtensionCapability::direction,
+ RtpTransceiverDirection::kSendRecv),
+ Field(&RtpHeaderExtensionCapability::direction,
+ RtpTransceiverDirection::kSendRecv),
+ Field(&RtpHeaderExtensionCapability::direction,
+ RtpTransceiverDirection::kStopped),
+ Field(&RtpHeaderExtensionCapability::direction,
+ RtpTransceiverDirection::kStopped)));
+ extensions = {webrtc::RtpExtension("uri3", 4),
+ webrtc::RtpExtension("uri5", 6)};
+ description.set_rtp_header_extensions(extensions);
+ transceiver_->OnNegotiationUpdate(SdpType::kAnswer, &description);
+
+ EXPECT_THAT(transceiver_->GetNegotiatedHeaderExtensions(),
+ ElementsAre(Field(&RtpHeaderExtensionCapability::direction,
+ RtpTransceiverDirection::kStopped),
+ Field(&RtpHeaderExtensionCapability::direction,
+ RtpTransceiverDirection::kStopped),
+ Field(&RtpHeaderExtensionCapability::direction,
+ RtpTransceiverDirection::kStopped),
+ Field(&RtpHeaderExtensionCapability::direction,
+ RtpTransceiverDirection::kStopped)));
+}
+
+} // namespace
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/rtp_transmission_manager.cc b/third_party/libwebrtc/pc/rtp_transmission_manager.cc
new file mode 100644
index 0000000000..96b308842b
--- /dev/null
+++ b/third_party/libwebrtc/pc/rtp_transmission_manager.cc
@@ -0,0 +1,730 @@
+/*
+ * Copyright 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/rtp_transmission_manager.h"
+
+#include <type_traits>
+#include <utility>
+
+#include "absl/types/optional.h"
+#include "api/peer_connection_interface.h"
+#include "api/rtp_transceiver_direction.h"
+#include "pc/audio_rtp_receiver.h"
+#include "pc/channel_interface.h"
+#include "pc/legacy_stats_collector_interface.h"
+#include "pc/video_rtp_receiver.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/helpers.h"
+#include "rtc_base/logging.h"
+
+namespace webrtc {
+
+namespace {
+
+static const char kDefaultAudioSenderId[] = "defaulta0";
+static const char kDefaultVideoSenderId[] = "defaultv0";
+
+} // namespace
+
+RtpTransmissionManager::RtpTransmissionManager(
+ bool is_unified_plan,
+ ConnectionContext* context,
+ UsagePattern* usage_pattern,
+ PeerConnectionObserver* observer,
+ LegacyStatsCollectorInterface* legacy_stats,
+ std::function<void()> on_negotiation_needed)
+ : is_unified_plan_(is_unified_plan),
+ context_(context),
+ usage_pattern_(usage_pattern),
+ observer_(observer),
+ legacy_stats_(legacy_stats),
+ on_negotiation_needed_(on_negotiation_needed),
+ weak_ptr_factory_(this) {}
+
+void RtpTransmissionManager::Close() {
+ closed_ = true;
+ observer_ = nullptr;
+}
+
+// Implementation of SetStreamsObserver
+void RtpTransmissionManager::OnSetStreams() {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ if (IsUnifiedPlan())
+ OnNegotiationNeeded();
+}
+
+// Function to call back to the PeerConnection when negotiation is needed
+void RtpTransmissionManager::OnNegotiationNeeded() {
+ on_negotiation_needed_();
+}
+
+// Function that returns the currently valid observer
+PeerConnectionObserver* RtpTransmissionManager::Observer() const {
+ RTC_DCHECK(!closed_);
+ RTC_DCHECK(observer_);
+ return observer_;
+}
+
+cricket::VoiceMediaSendChannelInterface*
+RtpTransmissionManager::voice_media_send_channel() const {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ RTC_DCHECK(!IsUnifiedPlan());
+ auto* voice_channel = GetAudioTransceiver()->internal()->channel();
+ if (voice_channel) {
+ return voice_channel->voice_media_send_channel();
+ } else {
+ return nullptr;
+ }
+}
+
+cricket::VideoMediaSendChannelInterface*
+RtpTransmissionManager::video_media_send_channel() const {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ RTC_DCHECK(!IsUnifiedPlan());
+ auto* video_channel = GetVideoTransceiver()->internal()->channel();
+ if (video_channel) {
+ return video_channel->video_media_send_channel();
+ } else {
+ return nullptr;
+ }
+}
+cricket::VoiceMediaReceiveChannelInterface*
+RtpTransmissionManager::voice_media_receive_channel() const {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ RTC_DCHECK(!IsUnifiedPlan());
+ auto* voice_channel = GetAudioTransceiver()->internal()->channel();
+ if (voice_channel) {
+ return voice_channel->voice_media_receive_channel();
+ } else {
+ return nullptr;
+ }
+}
+
+cricket::VideoMediaReceiveChannelInterface*
+RtpTransmissionManager::video_media_receive_channel() const {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ RTC_DCHECK(!IsUnifiedPlan());
+ auto* video_channel = GetVideoTransceiver()->internal()->channel();
+ if (video_channel) {
+ return video_channel->video_media_receive_channel();
+ } else {
+ return nullptr;
+ }
+}
+
+RTCErrorOr<rtc::scoped_refptr<RtpSenderInterface>>
+RtpTransmissionManager::AddTrack(
+ rtc::scoped_refptr<MediaStreamTrackInterface> track,
+ const std::vector<std::string>& stream_ids,
+ const std::vector<RtpEncodingParameters>* init_send_encodings) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+
+ return (IsUnifiedPlan()
+ ? AddTrackUnifiedPlan(track, stream_ids, init_send_encodings)
+ : AddTrackPlanB(track, stream_ids, init_send_encodings));
+}
+
+RTCErrorOr<rtc::scoped_refptr<RtpSenderInterface>>
+RtpTransmissionManager::AddTrackPlanB(
+ rtc::scoped_refptr<MediaStreamTrackInterface> track,
+ const std::vector<std::string>& stream_ids,
+ const std::vector<RtpEncodingParameters>* init_send_encodings) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ if (stream_ids.size() > 1u) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::UNSUPPORTED_OPERATION,
+ "AddTrack with more than one stream is not "
+ "supported with Plan B semantics.");
+ }
+ std::vector<std::string> adjusted_stream_ids = stream_ids;
+ if (adjusted_stream_ids.empty()) {
+ adjusted_stream_ids.push_back(rtc::CreateRandomUuid());
+ }
+ cricket::MediaType media_type =
+ (track->kind() == MediaStreamTrackInterface::kAudioKind
+ ? cricket::MEDIA_TYPE_AUDIO
+ : cricket::MEDIA_TYPE_VIDEO);
+ auto new_sender = CreateSender(
+ media_type, track->id(), track, adjusted_stream_ids,
+ init_send_encodings
+ ? *init_send_encodings
+ : std::vector<RtpEncodingParameters>(1, RtpEncodingParameters{}));
+ if (track->kind() == MediaStreamTrackInterface::kAudioKind) {
+ new_sender->internal()->SetMediaChannel(voice_media_send_channel());
+ GetAudioTransceiver()->internal()->AddSender(new_sender);
+ const RtpSenderInfo* sender_info =
+ FindSenderInfo(local_audio_sender_infos_,
+ new_sender->internal()->stream_ids()[0], track->id());
+ if (sender_info) {
+ new_sender->internal()->SetSsrc(sender_info->first_ssrc);
+ }
+ } else {
+ RTC_DCHECK_EQ(MediaStreamTrackInterface::kVideoKind, track->kind());
+ new_sender->internal()->SetMediaChannel(video_media_send_channel());
+ GetVideoTransceiver()->internal()->AddSender(new_sender);
+ const RtpSenderInfo* sender_info =
+ FindSenderInfo(local_video_sender_infos_,
+ new_sender->internal()->stream_ids()[0], track->id());
+ if (sender_info) {
+ new_sender->internal()->SetSsrc(sender_info->first_ssrc);
+ }
+ }
+ return rtc::scoped_refptr<RtpSenderInterface>(new_sender);
+}
+
+RTCErrorOr<rtc::scoped_refptr<RtpSenderInterface>>
+RtpTransmissionManager::AddTrackUnifiedPlan(
+ rtc::scoped_refptr<MediaStreamTrackInterface> track,
+ const std::vector<std::string>& stream_ids,
+ const std::vector<RtpEncodingParameters>* init_send_encodings) {
+ auto transceiver =
+ FindFirstTransceiverForAddedTrack(track, init_send_encodings);
+ if (transceiver) {
+ RTC_LOG(LS_INFO) << "Reusing an existing "
+ << cricket::MediaTypeToString(transceiver->media_type())
+ << " transceiver for AddTrack.";
+ if (transceiver->stopping()) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER,
+ "The existing transceiver is stopping.");
+ }
+
+ if (transceiver->direction() == RtpTransceiverDirection::kRecvOnly) {
+ transceiver->internal()->set_direction(
+ RtpTransceiverDirection::kSendRecv);
+ } else if (transceiver->direction() == RtpTransceiverDirection::kInactive) {
+ transceiver->internal()->set_direction(
+ RtpTransceiverDirection::kSendOnly);
+ }
+ transceiver->sender()->SetTrack(track.get());
+ transceiver->internal()->sender_internal()->set_stream_ids(stream_ids);
+ transceiver->internal()->set_reused_for_addtrack(true);
+ } else {
+ cricket::MediaType media_type =
+ (track->kind() == MediaStreamTrackInterface::kAudioKind
+ ? cricket::MEDIA_TYPE_AUDIO
+ : cricket::MEDIA_TYPE_VIDEO);
+ RTC_LOG(LS_INFO) << "Adding " << cricket::MediaTypeToString(media_type)
+ << " transceiver in response to a call to AddTrack.";
+ std::string sender_id = track->id();
+ // Avoid creating a sender with an existing ID by generating a random ID.
+ // This can happen if this is the second time AddTrack has created a sender
+ // for this track.
+ if (FindSenderById(sender_id)) {
+ sender_id = rtc::CreateRandomUuid();
+ }
+ auto sender = CreateSender(
+ media_type, sender_id, track, stream_ids,
+ init_send_encodings
+ ? *init_send_encodings
+ : std::vector<RtpEncodingParameters>(1, RtpEncodingParameters{}));
+ auto receiver = CreateReceiver(media_type, rtc::CreateRandomUuid());
+ transceiver = CreateAndAddTransceiver(sender, receiver);
+ transceiver->internal()->set_created_by_addtrack(true);
+ transceiver->internal()->set_direction(RtpTransceiverDirection::kSendRecv);
+ }
+ return transceiver->sender();
+}
+
+rtc::scoped_refptr<RtpSenderProxyWithInternal<RtpSenderInternal>>
+RtpTransmissionManager::CreateSender(
+ cricket::MediaType media_type,
+ const std::string& id,
+ rtc::scoped_refptr<MediaStreamTrackInterface> track,
+ const std::vector<std::string>& stream_ids,
+ const std::vector<RtpEncodingParameters>& send_encodings) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ rtc::scoped_refptr<RtpSenderProxyWithInternal<RtpSenderInternal>> sender;
+ if (media_type == cricket::MEDIA_TYPE_AUDIO) {
+ RTC_DCHECK(!track ||
+ (track->kind() == MediaStreamTrackInterface::kAudioKind));
+ sender = RtpSenderProxyWithInternal<RtpSenderInternal>::Create(
+ signaling_thread(),
+ AudioRtpSender::Create(worker_thread(), id, legacy_stats_, this));
+ NoteUsageEvent(UsageEvent::AUDIO_ADDED);
+ } else {
+ RTC_DCHECK_EQ(media_type, cricket::MEDIA_TYPE_VIDEO);
+ RTC_DCHECK(!track ||
+ (track->kind() == MediaStreamTrackInterface::kVideoKind));
+ sender = RtpSenderProxyWithInternal<RtpSenderInternal>::Create(
+ signaling_thread(), VideoRtpSender::Create(worker_thread(), id, this));
+ NoteUsageEvent(UsageEvent::VIDEO_ADDED);
+ }
+ bool set_track_succeeded = sender->SetTrack(track.get());
+ RTC_DCHECK(set_track_succeeded);
+ sender->internal()->set_stream_ids(stream_ids);
+ sender->internal()->set_init_send_encodings(send_encodings);
+ return sender;
+}
+
+rtc::scoped_refptr<RtpReceiverProxyWithInternal<RtpReceiverInternal>>
+RtpTransmissionManager::CreateReceiver(cricket::MediaType media_type,
+ const std::string& receiver_id) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ rtc::scoped_refptr<RtpReceiverProxyWithInternal<RtpReceiverInternal>>
+ receiver;
+ if (media_type == cricket::MEDIA_TYPE_AUDIO) {
+ receiver = RtpReceiverProxyWithInternal<RtpReceiverInternal>::Create(
+ signaling_thread(), worker_thread(),
+ rtc::make_ref_counted<AudioRtpReceiver>(worker_thread(), receiver_id,
+ std::vector<std::string>({}),
+ IsUnifiedPlan()));
+ NoteUsageEvent(UsageEvent::AUDIO_ADDED);
+ } else {
+ RTC_DCHECK_EQ(media_type, cricket::MEDIA_TYPE_VIDEO);
+ receiver = RtpReceiverProxyWithInternal<RtpReceiverInternal>::Create(
+ signaling_thread(), worker_thread(),
+ rtc::make_ref_counted<VideoRtpReceiver>(worker_thread(), receiver_id,
+ std::vector<std::string>({})));
+ NoteUsageEvent(UsageEvent::VIDEO_ADDED);
+ }
+ return receiver;
+}
+
+rtc::scoped_refptr<RtpTransceiverProxyWithInternal<RtpTransceiver>>
+RtpTransmissionManager::CreateAndAddTransceiver(
+ rtc::scoped_refptr<RtpSenderProxyWithInternal<RtpSenderInternal>> sender,
+ rtc::scoped_refptr<RtpReceiverProxyWithInternal<RtpReceiverInternal>>
+ receiver) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ // Ensure that the new sender does not have an ID that is already in use by
+ // another sender.
+ // Allow receiver IDs to conflict since those come from remote SDP (which
+ // could be invalid, but should not cause a crash).
+ RTC_DCHECK(!FindSenderById(sender->id()));
+ auto transceiver = RtpTransceiverProxyWithInternal<RtpTransceiver>::Create(
+ signaling_thread(),
+ rtc::make_ref_counted<RtpTransceiver>(
+ sender, receiver, context_,
+ sender->media_type() == cricket::MEDIA_TYPE_AUDIO
+ ? media_engine()->voice().GetRtpHeaderExtensions()
+ : media_engine()->video().GetRtpHeaderExtensions(),
+ [this_weak_ptr = weak_ptr_factory_.GetWeakPtr()]() {
+ if (this_weak_ptr) {
+ this_weak_ptr->OnNegotiationNeeded();
+ }
+ }));
+ transceivers()->Add(transceiver);
+ return transceiver;
+}
+
+rtc::scoped_refptr<RtpTransceiverProxyWithInternal<RtpTransceiver>>
+RtpTransmissionManager::FindFirstTransceiverForAddedTrack(
+ rtc::scoped_refptr<MediaStreamTrackInterface> track,
+ const std::vector<RtpEncodingParameters>* init_send_encodings) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ RTC_DCHECK(track);
+ if (init_send_encodings != nullptr) {
+ return nullptr;
+ }
+ for (auto transceiver : transceivers()->List()) {
+ if (!transceiver->sender()->track() &&
+ cricket::MediaTypeToString(transceiver->media_type()) ==
+ track->kind() &&
+ !transceiver->internal()->has_ever_been_used_to_send() &&
+ !transceiver->stopped()) {
+ return transceiver;
+ }
+ }
+ return nullptr;
+}
+
+std::vector<rtc::scoped_refptr<RtpSenderProxyWithInternal<RtpSenderInternal>>>
+RtpTransmissionManager::GetSendersInternal() const {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ std::vector<rtc::scoped_refptr<RtpSenderProxyWithInternal<RtpSenderInternal>>>
+ all_senders;
+ for (const auto& transceiver : transceivers_.List()) {
+ if (IsUnifiedPlan() && transceiver->internal()->stopped())
+ continue;
+
+ auto senders = transceiver->internal()->senders();
+ all_senders.insert(all_senders.end(), senders.begin(), senders.end());
+ }
+ return all_senders;
+}
+
+std::vector<
+ rtc::scoped_refptr<RtpReceiverProxyWithInternal<RtpReceiverInternal>>>
+RtpTransmissionManager::GetReceiversInternal() const {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ std::vector<
+ rtc::scoped_refptr<RtpReceiverProxyWithInternal<RtpReceiverInternal>>>
+ all_receivers;
+ for (const auto& transceiver : transceivers_.List()) {
+ if (IsUnifiedPlan() && transceiver->internal()->stopped())
+ continue;
+
+ auto receivers = transceiver->internal()->receivers();
+ all_receivers.insert(all_receivers.end(), receivers.begin(),
+ receivers.end());
+ }
+ return all_receivers;
+}
+
+rtc::scoped_refptr<RtpTransceiverProxyWithInternal<RtpTransceiver>>
+RtpTransmissionManager::GetAudioTransceiver() const {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ // This method only works with Plan B SDP, where there is a single
+ // audio/video transceiver.
+ RTC_DCHECK(!IsUnifiedPlan());
+ for (auto transceiver : transceivers_.List()) {
+ if (transceiver->media_type() == cricket::MEDIA_TYPE_AUDIO) {
+ return transceiver;
+ }
+ }
+ RTC_DCHECK_NOTREACHED();
+ return nullptr;
+}
+
+rtc::scoped_refptr<RtpTransceiverProxyWithInternal<RtpTransceiver>>
+RtpTransmissionManager::GetVideoTransceiver() const {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ // This method only works with Plan B SDP, where there is a single
+ // audio/video transceiver.
+ RTC_DCHECK(!IsUnifiedPlan());
+ for (auto transceiver : transceivers_.List()) {
+ if (transceiver->media_type() == cricket::MEDIA_TYPE_VIDEO) {
+ return transceiver;
+ }
+ }
+ RTC_DCHECK_NOTREACHED();
+ return nullptr;
+}
+
+void RtpTransmissionManager::AddAudioTrack(AudioTrackInterface* track,
+ MediaStreamInterface* stream) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ RTC_DCHECK(track);
+ RTC_DCHECK(stream);
+ auto sender = FindSenderForTrack(track);
+ if (sender) {
+ // We already have a sender for this track, so just change the stream_id
+ // so that it's correct in the next call to CreateOffer.
+ sender->internal()->set_stream_ids({stream->id()});
+ return;
+ }
+
+ // Normal case; we've never seen this track before.
+ auto new_sender = CreateSender(cricket::MEDIA_TYPE_AUDIO, track->id(),
+ rtc::scoped_refptr<AudioTrackInterface>(track),
+ {stream->id()}, {{}});
+ new_sender->internal()->SetMediaChannel(voice_media_send_channel());
+ GetAudioTransceiver()->internal()->AddSender(new_sender);
+ // If the sender has already been configured in SDP, we call SetSsrc,
+ // which will connect the sender to the underlying transport. This can
+ // occur if a local session description that contains the ID of the sender
+ // is set before AddStream is called. It can also occur if the local
+ // session description is not changed and RemoveStream is called, and
+ // later AddStream is called again with the same stream.
+ const RtpSenderInfo* sender_info =
+ FindSenderInfo(local_audio_sender_infos_, stream->id(), track->id());
+ if (sender_info) {
+ new_sender->internal()->SetSsrc(sender_info->first_ssrc);
+ }
+}
+
+// TODO(deadbeef): Don't destroy RtpSenders here; they should be kept around
+// indefinitely, when we have unified plan SDP.
+void RtpTransmissionManager::RemoveAudioTrack(AudioTrackInterface* track,
+ MediaStreamInterface* stream) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ RTC_DCHECK(!IsUnifiedPlan());
+ auto sender = FindSenderForTrack(track);
+ if (!sender) {
+ RTC_LOG(LS_WARNING) << "RtpSender for track with id " << track->id()
+ << " doesn't exist.";
+ return;
+ }
+ GetAudioTransceiver()->internal()->RemoveSender(sender.get());
+}
+
+void RtpTransmissionManager::AddVideoTrack(VideoTrackInterface* track,
+ MediaStreamInterface* stream) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ RTC_DCHECK(track);
+ RTC_DCHECK(stream);
+ auto sender = FindSenderForTrack(track);
+ if (sender) {
+ // We already have a sender for this track, so just change the stream_id
+ // so that it's correct in the next call to CreateOffer.
+ sender->internal()->set_stream_ids({stream->id()});
+ return;
+ }
+
+ // Normal case; we've never seen this track before.
+ auto new_sender = CreateSender(cricket::MEDIA_TYPE_VIDEO, track->id(),
+ rtc::scoped_refptr<VideoTrackInterface>(track),
+ {stream->id()}, {{}});
+ new_sender->internal()->SetMediaChannel(video_media_send_channel());
+ GetVideoTransceiver()->internal()->AddSender(new_sender);
+ const RtpSenderInfo* sender_info =
+ FindSenderInfo(local_video_sender_infos_, stream->id(), track->id());
+ if (sender_info) {
+ new_sender->internal()->SetSsrc(sender_info->first_ssrc);
+ }
+}
+
+void RtpTransmissionManager::RemoveVideoTrack(VideoTrackInterface* track,
+ MediaStreamInterface* stream) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ RTC_DCHECK(!IsUnifiedPlan());
+ auto sender = FindSenderForTrack(track);
+ if (!sender) {
+ RTC_LOG(LS_WARNING) << "RtpSender for track with id " << track->id()
+ << " doesn't exist.";
+ return;
+ }
+ GetVideoTransceiver()->internal()->RemoveSender(sender.get());
+}
+
+void RtpTransmissionManager::CreateAudioReceiver(
+ MediaStreamInterface* stream,
+ const RtpSenderInfo& remote_sender_info) {
+ RTC_DCHECK(!closed_);
+ std::vector<rtc::scoped_refptr<MediaStreamInterface>> streams;
+ streams.push_back(rtc::scoped_refptr<MediaStreamInterface>(stream));
+ // TODO(https://crbug.com/webrtc/9480): When we remove remote_streams(), use
+ // the constructor taking stream IDs instead.
+ auto audio_receiver = rtc::make_ref_counted<AudioRtpReceiver>(
+ worker_thread(), remote_sender_info.sender_id, streams, IsUnifiedPlan(),
+ voice_media_receive_channel());
+ if (remote_sender_info.sender_id == kDefaultAudioSenderId) {
+ audio_receiver->SetupUnsignaledMediaChannel();
+ } else {
+ audio_receiver->SetupMediaChannel(remote_sender_info.first_ssrc);
+ }
+
+ auto receiver = RtpReceiverProxyWithInternal<RtpReceiverInternal>::Create(
+ signaling_thread(), worker_thread(), std::move(audio_receiver));
+ GetAudioTransceiver()->internal()->AddReceiver(receiver);
+ Observer()->OnAddTrack(receiver, streams);
+ NoteUsageEvent(UsageEvent::AUDIO_ADDED);
+}
+
+void RtpTransmissionManager::CreateVideoReceiver(
+ MediaStreamInterface* stream,
+ const RtpSenderInfo& remote_sender_info) {
+ RTC_DCHECK(!closed_);
+ std::vector<rtc::scoped_refptr<MediaStreamInterface>> streams;
+ streams.push_back(rtc::scoped_refptr<MediaStreamInterface>(stream));
+ // TODO(https://crbug.com/webrtc/9480): When we remove remote_streams(), use
+ // the constructor taking stream IDs instead.
+ auto video_receiver = rtc::make_ref_counted<VideoRtpReceiver>(
+ worker_thread(), remote_sender_info.sender_id, streams);
+
+ video_receiver->SetupMediaChannel(
+ remote_sender_info.sender_id == kDefaultVideoSenderId
+ ? absl::nullopt
+ : absl::optional<uint32_t>(remote_sender_info.first_ssrc),
+ video_media_receive_channel());
+
+ auto receiver = RtpReceiverProxyWithInternal<RtpReceiverInternal>::Create(
+ signaling_thread(), worker_thread(), std::move(video_receiver));
+ GetVideoTransceiver()->internal()->AddReceiver(receiver);
+ Observer()->OnAddTrack(receiver, streams);
+ NoteUsageEvent(UsageEvent::VIDEO_ADDED);
+}
+
+// TODO(deadbeef): Keep RtpReceivers around even if track goes away in remote
+// description.
+rtc::scoped_refptr<RtpReceiverInterface>
+RtpTransmissionManager::RemoveAndStopReceiver(
+ const RtpSenderInfo& remote_sender_info) {
+ auto receiver = FindReceiverById(remote_sender_info.sender_id);
+ if (!receiver) {
+ RTC_LOG(LS_WARNING) << "RtpReceiver for track with id "
+ << remote_sender_info.sender_id << " doesn't exist.";
+ return nullptr;
+ }
+ if (receiver->media_type() == cricket::MEDIA_TYPE_AUDIO) {
+ GetAudioTransceiver()->internal()->RemoveReceiver(receiver.get());
+ } else {
+ GetVideoTransceiver()->internal()->RemoveReceiver(receiver.get());
+ }
+ return receiver;
+}
+
+void RtpTransmissionManager::OnRemoteSenderAdded(
+ const RtpSenderInfo& sender_info,
+ MediaStreamInterface* stream,
+ cricket::MediaType media_type) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ RTC_LOG(LS_INFO) << "Creating " << cricket::MediaTypeToString(media_type)
+ << " receiver for track_id=" << sender_info.sender_id
+ << " and stream_id=" << sender_info.stream_id;
+
+ if (media_type == cricket::MEDIA_TYPE_AUDIO) {
+ CreateAudioReceiver(stream, sender_info);
+ } else if (media_type == cricket::MEDIA_TYPE_VIDEO) {
+ CreateVideoReceiver(stream, sender_info);
+ } else {
+ RTC_DCHECK_NOTREACHED() << "Invalid media type";
+ }
+}
+
+void RtpTransmissionManager::OnRemoteSenderRemoved(
+ const RtpSenderInfo& sender_info,
+ MediaStreamInterface* stream,
+ cricket::MediaType media_type) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ RTC_LOG(LS_INFO) << "Removing " << cricket::MediaTypeToString(media_type)
+ << " receiver for track_id=" << sender_info.sender_id
+ << " and stream_id=" << sender_info.stream_id;
+
+ rtc::scoped_refptr<RtpReceiverInterface> receiver;
+ if (media_type == cricket::MEDIA_TYPE_AUDIO) {
+ // When the MediaEngine audio channel is destroyed, the RemoteAudioSource
+ // will be notified which will end the AudioRtpReceiver::track().
+ receiver = RemoveAndStopReceiver(sender_info);
+ rtc::scoped_refptr<AudioTrackInterface> audio_track =
+ stream->FindAudioTrack(sender_info.sender_id);
+ if (audio_track) {
+ stream->RemoveTrack(audio_track);
+ }
+ } else if (media_type == cricket::MEDIA_TYPE_VIDEO) {
+ // Stopping or destroying a VideoRtpReceiver will end the
+ // VideoRtpReceiver::track().
+ receiver = RemoveAndStopReceiver(sender_info);
+ rtc::scoped_refptr<VideoTrackInterface> video_track =
+ stream->FindVideoTrack(sender_info.sender_id);
+ if (video_track) {
+ // There's no guarantee the track is still available, e.g. the track may
+ // have been removed from the stream by an application.
+ stream->RemoveTrack(video_track);
+ }
+ } else {
+ RTC_DCHECK_NOTREACHED() << "Invalid media type";
+ }
+ if (receiver) {
+ RTC_DCHECK(!closed_);
+ Observer()->OnRemoveTrack(receiver);
+ }
+}
+
+void RtpTransmissionManager::OnLocalSenderAdded(
+ const RtpSenderInfo& sender_info,
+ cricket::MediaType media_type) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ RTC_DCHECK(!IsUnifiedPlan());
+ auto sender = FindSenderById(sender_info.sender_id);
+ if (!sender) {
+ RTC_LOG(LS_WARNING) << "An unknown RtpSender with id "
+ << sender_info.sender_id
+ << " has been configured in the local description.";
+ return;
+ }
+
+ if (sender->media_type() != media_type) {
+ RTC_LOG(LS_WARNING) << "An RtpSender has been configured in the local"
+ " description with an unexpected media type.";
+ return;
+ }
+
+ sender->internal()->set_stream_ids({sender_info.stream_id});
+ sender->internal()->SetSsrc(sender_info.first_ssrc);
+}
+
+void RtpTransmissionManager::OnLocalSenderRemoved(
+ const RtpSenderInfo& sender_info,
+ cricket::MediaType media_type) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ auto sender = FindSenderById(sender_info.sender_id);
+ if (!sender) {
+ // This is the normal case. I.e., RemoveStream has been called and the
+ // SessionDescriptions has been renegotiated.
+ return;
+ }
+
+ // A sender has been removed from the SessionDescription but it's still
+ // associated with the PeerConnection. This only occurs if the SDP doesn't
+ // match with the calls to CreateSender, AddStream and RemoveStream.
+ if (sender->media_type() != media_type) {
+ RTC_LOG(LS_WARNING) << "An RtpSender has been configured in the local"
+ " description with an unexpected media type.";
+ return;
+ }
+
+ sender->internal()->SetSsrc(0);
+}
+
+std::vector<RtpSenderInfo>* RtpTransmissionManager::GetRemoteSenderInfos(
+ cricket::MediaType media_type) {
+ RTC_DCHECK(media_type == cricket::MEDIA_TYPE_AUDIO ||
+ media_type == cricket::MEDIA_TYPE_VIDEO);
+ return (media_type == cricket::MEDIA_TYPE_AUDIO)
+ ? &remote_audio_sender_infos_
+ : &remote_video_sender_infos_;
+}
+
+std::vector<RtpSenderInfo>* RtpTransmissionManager::GetLocalSenderInfos(
+ cricket::MediaType media_type) {
+ RTC_DCHECK(media_type == cricket::MEDIA_TYPE_AUDIO ||
+ media_type == cricket::MEDIA_TYPE_VIDEO);
+ return (media_type == cricket::MEDIA_TYPE_AUDIO) ? &local_audio_sender_infos_
+ : &local_video_sender_infos_;
+}
+
+const RtpSenderInfo* RtpTransmissionManager::FindSenderInfo(
+ const std::vector<RtpSenderInfo>& infos,
+ const std::string& stream_id,
+ const std::string& sender_id) const {
+ for (const RtpSenderInfo& sender_info : infos) {
+ if (sender_info.stream_id == stream_id &&
+ sender_info.sender_id == sender_id) {
+ return &sender_info;
+ }
+ }
+ return nullptr;
+}
+
+rtc::scoped_refptr<RtpSenderProxyWithInternal<RtpSenderInternal>>
+RtpTransmissionManager::FindSenderForTrack(
+ MediaStreamTrackInterface* track) const {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ for (const auto& transceiver : transceivers_.List()) {
+ for (auto sender : transceiver->internal()->senders()) {
+ if (sender->track() == track) {
+ return sender;
+ }
+ }
+ }
+ return nullptr;
+}
+
+rtc::scoped_refptr<RtpSenderProxyWithInternal<RtpSenderInternal>>
+RtpTransmissionManager::FindSenderById(const std::string& sender_id) const {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ for (const auto& transceiver : transceivers_.List()) {
+ for (auto sender : transceiver->internal()->senders()) {
+ if (sender->id() == sender_id) {
+ return sender;
+ }
+ }
+ }
+ return nullptr;
+}
+
+rtc::scoped_refptr<RtpReceiverProxyWithInternal<RtpReceiverInternal>>
+RtpTransmissionManager::FindReceiverById(const std::string& receiver_id) const {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ for (const auto& transceiver : transceivers_.List()) {
+ for (auto receiver : transceiver->internal()->receivers()) {
+ if (receiver->id() == receiver_id) {
+ return receiver;
+ }
+ }
+ }
+ return nullptr;
+}
+
+cricket::MediaEngineInterface* RtpTransmissionManager::media_engine() const {
+ return context_->media_engine();
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/rtp_transmission_manager.h b/third_party/libwebrtc/pc/rtp_transmission_manager.h
new file mode 100644
index 0000000000..5a4bf83526
--- /dev/null
+++ b/third_party/libwebrtc/pc/rtp_transmission_manager.h
@@ -0,0 +1,278 @@
+/*
+ * Copyright 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_RTP_TRANSMISSION_MANAGER_H_
+#define PC_RTP_TRANSMISSION_MANAGER_H_
+
+#include <stdint.h>
+
+#include <functional>
+#include <string>
+#include <vector>
+
+#include "api/media_stream_interface.h"
+#include "api/media_types.h"
+#include "api/peer_connection_interface.h"
+#include "api/rtc_error.h"
+#include "api/rtp_parameters.h"
+#include "api/rtp_receiver_interface.h"
+#include "api/rtp_sender_interface.h"
+#include "api/scoped_refptr.h"
+#include "api/sequence_checker.h"
+#include "media/base/media_channel.h"
+#include "pc/legacy_stats_collector_interface.h"
+#include "pc/rtp_receiver.h"
+#include "pc/rtp_receiver_proxy.h"
+#include "pc/rtp_sender.h"
+#include "pc/rtp_sender_proxy.h"
+#include "pc/rtp_transceiver.h"
+#include "pc/transceiver_list.h"
+#include "pc/usage_pattern.h"
+#include "rtc_base/thread.h"
+#include "rtc_base/thread_annotations.h"
+#include "rtc_base/weak_ptr.h"
+
+namespace rtc {
+class Thread;
+}
+
+namespace webrtc {
+
+// This class contains information about
+// an RTPSender, used for things like looking it up by SSRC.
+struct RtpSenderInfo {
+ RtpSenderInfo() : first_ssrc(0) {}
+ RtpSenderInfo(const std::string& stream_id,
+ const std::string& sender_id,
+ uint32_t ssrc)
+ : stream_id(stream_id), sender_id(sender_id), first_ssrc(ssrc) {}
+ bool operator==(const RtpSenderInfo& other) {
+ return this->stream_id == other.stream_id &&
+ this->sender_id == other.sender_id &&
+ this->first_ssrc == other.first_ssrc;
+ }
+ std::string stream_id;
+ std::string sender_id;
+ // An RtpSender can have many SSRCs. The first one is used as a sort of ID
+ // for communicating with the lower layers.
+ uint32_t first_ssrc;
+};
+
+// The RtpTransmissionManager class is responsible for managing the lifetime
+// and relationships between objects of type RtpSender, RtpReceiver and
+// RtpTransceiver.
+class RtpTransmissionManager : public RtpSenderBase::SetStreamsObserver {
+ public:
+ RtpTransmissionManager(bool is_unified_plan,
+ ConnectionContext* context,
+ UsagePattern* usage_pattern,
+ PeerConnectionObserver* observer,
+ LegacyStatsCollectorInterface* legacy_stats,
+ std::function<void()> on_negotiation_needed);
+
+ // No move or copy permitted.
+ RtpTransmissionManager(const RtpTransmissionManager&) = delete;
+ RtpTransmissionManager& operator=(const RtpTransmissionManager&) = delete;
+
+ // Stop activity. In particular, don't call observer_ any more.
+ void Close();
+
+ // RtpSenderBase::SetStreamsObserver override.
+ void OnSetStreams() override;
+
+ // Add a new track, creating transceiver if required.
+ RTCErrorOr<rtc::scoped_refptr<RtpSenderInterface>> AddTrack(
+ rtc::scoped_refptr<MediaStreamTrackInterface> track,
+ const std::vector<std::string>& stream_ids,
+ const std::vector<RtpEncodingParameters>* init_send_encodings);
+
+ // Create a new RTP sender. Does not associate with a transceiver.
+ rtc::scoped_refptr<RtpSenderProxyWithInternal<RtpSenderInternal>>
+ CreateSender(cricket::MediaType media_type,
+ const std::string& id,
+ rtc::scoped_refptr<MediaStreamTrackInterface> track,
+ const std::vector<std::string>& stream_ids,
+ const std::vector<RtpEncodingParameters>& send_encodings);
+
+ // Create a new RTP receiver. Does not associate with a transceiver.
+ rtc::scoped_refptr<RtpReceiverProxyWithInternal<RtpReceiverInternal>>
+ CreateReceiver(cricket::MediaType media_type, const std::string& receiver_id);
+
+ // Create a new RtpTransceiver of the given type and add it to the list of
+ // registered transceivers.
+ rtc::scoped_refptr<RtpTransceiverProxyWithInternal<RtpTransceiver>>
+ CreateAndAddTransceiver(
+ rtc::scoped_refptr<RtpSenderProxyWithInternal<RtpSenderInternal>> sender,
+ rtc::scoped_refptr<RtpReceiverProxyWithInternal<RtpReceiverInternal>>
+ receiver);
+
+ // Returns the first RtpTransceiver suitable for a newly added track, if such
+ // transceiver is available.
+ rtc::scoped_refptr<RtpTransceiverProxyWithInternal<RtpTransceiver>>
+ FindFirstTransceiverForAddedTrack(
+ rtc::scoped_refptr<MediaStreamTrackInterface> track,
+ const std::vector<RtpEncodingParameters>* init_send_encodings);
+
+ // Returns the list of senders currently associated with some
+ // registered transceiver
+ std::vector<rtc::scoped_refptr<RtpSenderProxyWithInternal<RtpSenderInternal>>>
+ GetSendersInternal() const;
+
+ // Returns the list of receivers currently associated with a transceiver
+ std::vector<
+ rtc::scoped_refptr<RtpReceiverProxyWithInternal<RtpReceiverInternal>>>
+ GetReceiversInternal() const;
+
+ // Plan B: Get the transceiver containing all audio senders and receivers
+ rtc::scoped_refptr<RtpTransceiverProxyWithInternal<RtpTransceiver>>
+ GetAudioTransceiver() const;
+ // Plan B: Get the transceiver containing all video senders and receivers
+ rtc::scoped_refptr<RtpTransceiverProxyWithInternal<RtpTransceiver>>
+ GetVideoTransceiver() const;
+
+ // Add an audio track, reusing or creating the sender.
+ void AddAudioTrack(AudioTrackInterface* track, MediaStreamInterface* stream);
+ // Plan B: Remove an audio track, removing the sender.
+ void RemoveAudioTrack(AudioTrackInterface* track,
+ MediaStreamInterface* stream);
+ // Add a video track, reusing or creating the sender.
+ void AddVideoTrack(VideoTrackInterface* track, MediaStreamInterface* stream);
+ // Plan B: Remove a video track, removing the sender.
+ void RemoveVideoTrack(VideoTrackInterface* track,
+ MediaStreamInterface* stream);
+
+ // Triggered when a remote sender has been seen for the first time in a remote
+ // session description. It creates a remote MediaStreamTrackInterface
+ // implementation and triggers CreateAudioReceiver or CreateVideoReceiver.
+ void OnRemoteSenderAdded(const RtpSenderInfo& sender_info,
+ MediaStreamInterface* stream,
+ cricket::MediaType media_type);
+
+ // Triggered when a remote sender has been removed from a remote session
+ // description. It removes the remote sender with id `sender_id` from a remote
+ // MediaStream and triggers DestroyAudioReceiver or DestroyVideoReceiver.
+ void OnRemoteSenderRemoved(const RtpSenderInfo& sender_info,
+ MediaStreamInterface* stream,
+ cricket::MediaType media_type);
+
+ // Triggered when a local sender has been seen for the first time in a local
+ // session description.
+ // This method triggers CreateAudioSender or CreateVideoSender if the rtp
+ // streams in the local SessionDescription can be mapped to a MediaStreamTrack
+ // in a MediaStream in `local_streams_`
+ void OnLocalSenderAdded(const RtpSenderInfo& sender_info,
+ cricket::MediaType media_type);
+
+ // Triggered when a local sender has been removed from a local session
+ // description.
+ // This method triggers DestroyAudioSender or DestroyVideoSender if a stream
+ // has been removed from the local SessionDescription and the stream can be
+ // mapped to a MediaStreamTrack in a MediaStream in `local_streams_`.
+ void OnLocalSenderRemoved(const RtpSenderInfo& sender_info,
+ cricket::MediaType media_type);
+
+ std::vector<RtpSenderInfo>* GetRemoteSenderInfos(
+ cricket::MediaType media_type);
+ std::vector<RtpSenderInfo>* GetLocalSenderInfos(
+ cricket::MediaType media_type);
+ const RtpSenderInfo* FindSenderInfo(const std::vector<RtpSenderInfo>& infos,
+ const std::string& stream_id,
+ const std::string& sender_id) const;
+
+ // Return the RtpSender with the given track attached.
+ rtc::scoped_refptr<RtpSenderProxyWithInternal<RtpSenderInternal>>
+ FindSenderForTrack(MediaStreamTrackInterface* track) const;
+
+ // Return the RtpSender with the given id, or null if none exists.
+ rtc::scoped_refptr<RtpSenderProxyWithInternal<RtpSenderInternal>>
+ FindSenderById(const std::string& sender_id) const;
+
+ // Return the RtpReceiver with the given id, or null if none exists.
+ rtc::scoped_refptr<RtpReceiverProxyWithInternal<RtpReceiverInternal>>
+ FindReceiverById(const std::string& receiver_id) const;
+
+ TransceiverList* transceivers() { return &transceivers_; }
+ const TransceiverList* transceivers() const { return &transceivers_; }
+
+ // Plan B helpers for getting the voice/video media channels for the single
+ // audio/video transceiver, if it exists.
+ cricket::VoiceMediaSendChannelInterface* voice_media_send_channel() const;
+ cricket::VideoMediaSendChannelInterface* video_media_send_channel() const;
+ cricket::VoiceMediaReceiveChannelInterface* voice_media_receive_channel()
+ const;
+ cricket::VideoMediaReceiveChannelInterface* video_media_receive_channel()
+ const;
+
+ private:
+ rtc::Thread* signaling_thread() const { return context_->signaling_thread(); }
+ rtc::Thread* worker_thread() const { return context_->worker_thread(); }
+ bool IsUnifiedPlan() const { return is_unified_plan_; }
+ void NoteUsageEvent(UsageEvent event) {
+ usage_pattern_->NoteUsageEvent(event);
+ }
+
+ // AddTrack implementation when Unified Plan is specified.
+ RTCErrorOr<rtc::scoped_refptr<RtpSenderInterface>> AddTrackUnifiedPlan(
+ rtc::scoped_refptr<MediaStreamTrackInterface> track,
+ const std::vector<std::string>& stream_ids,
+ const std::vector<RtpEncodingParameters>* init_send_encodings);
+ // AddTrack implementation when Plan B is specified.
+ RTCErrorOr<rtc::scoped_refptr<RtpSenderInterface>> AddTrackPlanB(
+ rtc::scoped_refptr<MediaStreamTrackInterface> track,
+ const std::vector<std::string>& stream_ids,
+ const std::vector<RtpEncodingParameters>* init_send_encodings);
+
+ // Create an RtpReceiver that sources an audio track.
+ void CreateAudioReceiver(MediaStreamInterface* stream,
+ const RtpSenderInfo& remote_sender_info)
+ RTC_RUN_ON(signaling_thread());
+
+ // Create an RtpReceiver that sources a video track.
+ void CreateVideoReceiver(MediaStreamInterface* stream,
+ const RtpSenderInfo& remote_sender_info)
+ RTC_RUN_ON(signaling_thread());
+ rtc::scoped_refptr<RtpReceiverInterface> RemoveAndStopReceiver(
+ const RtpSenderInfo& remote_sender_info) RTC_RUN_ON(signaling_thread());
+
+ PeerConnectionObserver* Observer() const;
+ void OnNegotiationNeeded();
+
+ cricket::MediaEngineInterface* media_engine() const;
+
+ rtc::UniqueRandomIdGenerator* ssrc_generator() const {
+ return context_->ssrc_generator();
+ }
+
+ TransceiverList transceivers_;
+
+ // These lists store sender info seen in local/remote descriptions.
+ std::vector<RtpSenderInfo> remote_audio_sender_infos_
+ RTC_GUARDED_BY(signaling_thread());
+ std::vector<RtpSenderInfo> remote_video_sender_infos_
+ RTC_GUARDED_BY(signaling_thread());
+ std::vector<RtpSenderInfo> local_audio_sender_infos_
+ RTC_GUARDED_BY(signaling_thread());
+ std::vector<RtpSenderInfo> local_video_sender_infos_
+ RTC_GUARDED_BY(signaling_thread());
+
+ bool closed_ = false;
+ bool const is_unified_plan_;
+ ConnectionContext* context_;
+ UsagePattern* usage_pattern_;
+ PeerConnectionObserver* observer_;
+ LegacyStatsCollectorInterface* const legacy_stats_;
+ std::function<void()> on_negotiation_needed_;
+ rtc::WeakPtrFactory<RtpTransmissionManager> weak_ptr_factory_
+ RTC_GUARDED_BY(signaling_thread());
+};
+
+} // namespace webrtc
+
+#endif // PC_RTP_TRANSMISSION_MANAGER_H_
diff --git a/third_party/libwebrtc/pc/rtp_transport.cc b/third_party/libwebrtc/pc/rtp_transport.cc
new file mode 100644
index 0000000000..653b51fd9e
--- /dev/null
+++ b/third_party/libwebrtc/pc/rtp_transport.cc
@@ -0,0 +1,303 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/rtp_transport.h"
+
+#include <errno.h>
+
+#include <cstdint>
+#include <utility>
+
+#include "absl/strings/string_view.h"
+#include "api/array_view.h"
+#include "api/units/timestamp.h"
+#include "media/base/rtp_utils.h"
+#include "modules/rtp_rtcp/source/rtp_packet_received.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/copy_on_write_buffer.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/trace_event.h"
+
+namespace webrtc {
+
+void RtpTransport::SetRtcpMuxEnabled(bool enable) {
+ rtcp_mux_enabled_ = enable;
+ MaybeSignalReadyToSend();
+}
+
+const std::string& RtpTransport::transport_name() const {
+ return rtp_packet_transport_->transport_name();
+}
+
+int RtpTransport::SetRtpOption(rtc::Socket::Option opt, int value) {
+ return rtp_packet_transport_->SetOption(opt, value);
+}
+
+int RtpTransport::SetRtcpOption(rtc::Socket::Option opt, int value) {
+ if (rtcp_packet_transport_) {
+ return rtcp_packet_transport_->SetOption(opt, value);
+ }
+ return -1;
+}
+
+void RtpTransport::SetRtpPacketTransport(
+ rtc::PacketTransportInternal* new_packet_transport) {
+ if (new_packet_transport == rtp_packet_transport_) {
+ return;
+ }
+ if (rtp_packet_transport_) {
+ rtp_packet_transport_->SignalReadyToSend.disconnect(this);
+ rtp_packet_transport_->SignalReadPacket.disconnect(this);
+ rtp_packet_transport_->SignalNetworkRouteChanged.disconnect(this);
+ rtp_packet_transport_->SignalWritableState.disconnect(this);
+ rtp_packet_transport_->SignalSentPacket.disconnect(this);
+ // Reset the network route of the old transport.
+ SendNetworkRouteChanged(absl::optional<rtc::NetworkRoute>());
+ }
+ if (new_packet_transport) {
+ new_packet_transport->SignalReadyToSend.connect(
+ this, &RtpTransport::OnReadyToSend);
+ new_packet_transport->SignalReadPacket.connect(this,
+ &RtpTransport::OnReadPacket);
+ new_packet_transport->SignalNetworkRouteChanged.connect(
+ this, &RtpTransport::OnNetworkRouteChanged);
+ new_packet_transport->SignalWritableState.connect(
+ this, &RtpTransport::OnWritableState);
+ new_packet_transport->SignalSentPacket.connect(this,
+ &RtpTransport::OnSentPacket);
+ // Set the network route for the new transport.
+ SendNetworkRouteChanged(new_packet_transport->network_route());
+ }
+
+ rtp_packet_transport_ = new_packet_transport;
+ // Assumes the transport is ready to send if it is writable. If we are wrong,
+ // ready to send will be updated the next time we try to send.
+ SetReadyToSend(false,
+ rtp_packet_transport_ && rtp_packet_transport_->writable());
+}
+
+void RtpTransport::SetRtcpPacketTransport(
+ rtc::PacketTransportInternal* new_packet_transport) {
+ if (new_packet_transport == rtcp_packet_transport_) {
+ return;
+ }
+ if (rtcp_packet_transport_) {
+ rtcp_packet_transport_->SignalReadyToSend.disconnect(this);
+ rtcp_packet_transport_->SignalReadPacket.disconnect(this);
+ rtcp_packet_transport_->SignalNetworkRouteChanged.disconnect(this);
+ rtcp_packet_transport_->SignalWritableState.disconnect(this);
+ rtcp_packet_transport_->SignalSentPacket.disconnect(this);
+ // Reset the network route of the old transport.
+ SendNetworkRouteChanged(absl::optional<rtc::NetworkRoute>());
+ }
+ if (new_packet_transport) {
+ new_packet_transport->SignalReadyToSend.connect(
+ this, &RtpTransport::OnReadyToSend);
+ new_packet_transport->SignalReadPacket.connect(this,
+ &RtpTransport::OnReadPacket);
+ new_packet_transport->SignalNetworkRouteChanged.connect(
+ this, &RtpTransport::OnNetworkRouteChanged);
+ new_packet_transport->SignalWritableState.connect(
+ this, &RtpTransport::OnWritableState);
+ new_packet_transport->SignalSentPacket.connect(this,
+ &RtpTransport::OnSentPacket);
+ // Set the network route for the new transport.
+ SendNetworkRouteChanged(new_packet_transport->network_route());
+ }
+ rtcp_packet_transport_ = new_packet_transport;
+
+ // Assumes the transport is ready to send if it is writable. If we are wrong,
+ // ready to send will be updated the next time we try to send.
+ SetReadyToSend(true,
+ rtcp_packet_transport_ && rtcp_packet_transport_->writable());
+}
+
+bool RtpTransport::IsWritable(bool rtcp) const {
+ rtc::PacketTransportInternal* transport = rtcp && !rtcp_mux_enabled_
+ ? rtcp_packet_transport_
+ : rtp_packet_transport_;
+ return transport && transport->writable();
+}
+
+bool RtpTransport::SendRtpPacket(rtc::CopyOnWriteBuffer* packet,
+ const rtc::PacketOptions& options,
+ int flags) {
+ return SendPacket(false, packet, options, flags);
+}
+
+bool RtpTransport::SendRtcpPacket(rtc::CopyOnWriteBuffer* packet,
+ const rtc::PacketOptions& options,
+ int flags) {
+ return SendPacket(true, packet, options, flags);
+}
+
+bool RtpTransport::SendPacket(bool rtcp,
+ rtc::CopyOnWriteBuffer* packet,
+ const rtc::PacketOptions& options,
+ int flags) {
+ rtc::PacketTransportInternal* transport = rtcp && !rtcp_mux_enabled_
+ ? rtcp_packet_transport_
+ : rtp_packet_transport_;
+ int ret = transport->SendPacket(packet->cdata<char>(), packet->size(),
+ options, flags);
+ if (ret != static_cast<int>(packet->size())) {
+ if (transport->GetError() == ENOTCONN) {
+ RTC_LOG(LS_WARNING) << "Got ENOTCONN from transport.";
+ SetReadyToSend(rtcp, false);
+ }
+ return false;
+ }
+ return true;
+}
+
+void RtpTransport::UpdateRtpHeaderExtensionMap(
+ const cricket::RtpHeaderExtensions& header_extensions) {
+ header_extension_map_ = RtpHeaderExtensionMap(header_extensions);
+}
+
+bool RtpTransport::RegisterRtpDemuxerSink(const RtpDemuxerCriteria& criteria,
+ RtpPacketSinkInterface* sink) {
+ rtp_demuxer_.RemoveSink(sink);
+ if (!rtp_demuxer_.AddSink(criteria, sink)) {
+ RTC_LOG(LS_ERROR) << "Failed to register the sink for RTP demuxer.";
+ return false;
+ }
+ return true;
+}
+
+bool RtpTransport::UnregisterRtpDemuxerSink(RtpPacketSinkInterface* sink) {
+ if (!rtp_demuxer_.RemoveSink(sink)) {
+ RTC_LOG(LS_ERROR) << "Failed to unregister the sink for RTP demuxer.";
+ return false;
+ }
+ return true;
+}
+
+void RtpTransport::DemuxPacket(rtc::CopyOnWriteBuffer packet,
+ int64_t packet_time_us) {
+ webrtc::RtpPacketReceived parsed_packet(
+ &header_extension_map_, packet_time_us == -1
+ ? Timestamp::MinusInfinity()
+ : Timestamp::Micros(packet_time_us));
+ if (!parsed_packet.Parse(std::move(packet))) {
+ RTC_LOG(LS_ERROR)
+ << "Failed to parse the incoming RTP packet before demuxing. Drop it.";
+ return;
+ }
+
+ if (!rtp_demuxer_.OnRtpPacket(parsed_packet)) {
+ RTC_LOG(LS_VERBOSE) << "Failed to demux RTP packet: "
+ << RtpDemuxer::DescribePacket(parsed_packet);
+ NotifyUnDemuxableRtpPacketReceived(parsed_packet);
+ }
+}
+
+bool RtpTransport::IsTransportWritable() {
+ auto rtcp_packet_transport =
+ rtcp_mux_enabled_ ? nullptr : rtcp_packet_transport_;
+ return rtp_packet_transport_ && rtp_packet_transport_->writable() &&
+ (!rtcp_packet_transport || rtcp_packet_transport->writable());
+}
+
+void RtpTransport::OnReadyToSend(rtc::PacketTransportInternal* transport) {
+ SetReadyToSend(transport == rtcp_packet_transport_, true);
+}
+
+void RtpTransport::OnNetworkRouteChanged(
+ absl::optional<rtc::NetworkRoute> network_route) {
+ SendNetworkRouteChanged(network_route);
+}
+
+void RtpTransport::OnWritableState(
+ rtc::PacketTransportInternal* packet_transport) {
+ RTC_DCHECK(packet_transport == rtp_packet_transport_ ||
+ packet_transport == rtcp_packet_transport_);
+ SendWritableState(IsTransportWritable());
+}
+
+void RtpTransport::OnSentPacket(rtc::PacketTransportInternal* packet_transport,
+ const rtc::SentPacket& sent_packet) {
+ RTC_DCHECK(packet_transport == rtp_packet_transport_ ||
+ packet_transport == rtcp_packet_transport_);
+ SendSentPacket(sent_packet);
+}
+
+void RtpTransport::OnRtpPacketReceived(rtc::CopyOnWriteBuffer packet,
+ int64_t packet_time_us) {
+ DemuxPacket(packet, packet_time_us);
+}
+
+void RtpTransport::OnRtcpPacketReceived(rtc::CopyOnWriteBuffer packet,
+ int64_t packet_time_us) {
+ SendRtcpPacketReceived(&packet, packet_time_us);
+}
+
+void RtpTransport::OnReadPacket(rtc::PacketTransportInternal* transport,
+ const char* data,
+ size_t len,
+ const int64_t& packet_time_us,
+ int flags) {
+ TRACE_EVENT0("webrtc", "RtpTransport::OnReadPacket");
+
+ // When using RTCP multiplexing we might get RTCP packets on the RTP
+ // transport. We check the RTP payload type to determine if it is RTCP.
+ auto array_view = rtc::MakeArrayView(data, len);
+ cricket::RtpPacketType packet_type = cricket::InferRtpPacketType(array_view);
+ // Filter out the packet that is neither RTP nor RTCP.
+ if (packet_type == cricket::RtpPacketType::kUnknown) {
+ return;
+ }
+
+ // Protect ourselves against crazy data.
+ if (!cricket::IsValidRtpPacketSize(packet_type, len)) {
+ RTC_LOG(LS_ERROR) << "Dropping incoming "
+ << cricket::RtpPacketTypeToString(packet_type)
+ << " packet: wrong size=" << len;
+ return;
+ }
+
+ rtc::CopyOnWriteBuffer packet(data, len);
+ if (packet_type == cricket::RtpPacketType::kRtcp) {
+ OnRtcpPacketReceived(std::move(packet), packet_time_us);
+ } else {
+ OnRtpPacketReceived(std::move(packet), packet_time_us);
+ }
+}
+
+void RtpTransport::SetReadyToSend(bool rtcp, bool ready) {
+ if (rtcp) {
+ rtcp_ready_to_send_ = ready;
+ } else {
+ rtp_ready_to_send_ = ready;
+ }
+
+ MaybeSignalReadyToSend();
+}
+
+void RtpTransport::MaybeSignalReadyToSend() {
+ bool ready_to_send =
+ rtp_ready_to_send_ && (rtcp_ready_to_send_ || rtcp_mux_enabled_);
+ if (ready_to_send != ready_to_send_) {
+ if (processing_ready_to_send_) {
+ // Delay ReadyToSend processing until current operation is finished.
+ // Note that this may not cause a signal, since ready_to_send may
+ // have a new value by the time this executes.
+ TaskQueueBase::Current()->PostTask(
+ SafeTask(safety_.flag(), [this] { MaybeSignalReadyToSend(); }));
+ return;
+ }
+ ready_to_send_ = ready_to_send;
+ processing_ready_to_send_ = true;
+ SendReadyToSend(ready_to_send);
+ processing_ready_to_send_ = false;
+ }
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/rtp_transport.h b/third_party/libwebrtc/pc/rtp_transport.h
new file mode 100644
index 0000000000..456c91c370
--- /dev/null
+++ b/third_party/libwebrtc/pc/rtp_transport.h
@@ -0,0 +1,148 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_RTP_TRANSPORT_H_
+#define PC_RTP_TRANSPORT_H_
+
+#include <stddef.h>
+#include <stdint.h>
+
+#include <string>
+
+#include "absl/types/optional.h"
+#include "api/task_queue/pending_task_safety_flag.h"
+#include "call/rtp_demuxer.h"
+#include "call/video_receive_stream.h"
+#include "modules/rtp_rtcp/include/rtp_header_extension_map.h"
+#include "p2p/base/packet_transport_internal.h"
+#include "pc/rtp_transport_internal.h"
+#include "pc/session_description.h"
+#include "rtc_base/async_packet_socket.h"
+#include "rtc_base/copy_on_write_buffer.h"
+#include "rtc_base/network/sent_packet.h"
+#include "rtc_base/network_route.h"
+#include "rtc_base/socket.h"
+
+namespace rtc {
+
+class CopyOnWriteBuffer;
+struct PacketOptions;
+class PacketTransportInternal;
+
+} // namespace rtc
+
+namespace webrtc {
+
+class RtpTransport : public RtpTransportInternal {
+ public:
+ RtpTransport(const RtpTransport&) = delete;
+ RtpTransport& operator=(const RtpTransport&) = delete;
+
+ explicit RtpTransport(bool rtcp_mux_enabled)
+ : rtcp_mux_enabled_(rtcp_mux_enabled) {}
+
+ bool rtcp_mux_enabled() const override { return rtcp_mux_enabled_; }
+ void SetRtcpMuxEnabled(bool enable) override;
+
+ const std::string& transport_name() const override;
+
+ int SetRtpOption(rtc::Socket::Option opt, int value) override;
+ int SetRtcpOption(rtc::Socket::Option opt, int value) override;
+
+ rtc::PacketTransportInternal* rtp_packet_transport() const {
+ return rtp_packet_transport_;
+ }
+ void SetRtpPacketTransport(rtc::PacketTransportInternal* rtp);
+
+ rtc::PacketTransportInternal* rtcp_packet_transport() const {
+ return rtcp_packet_transport_;
+ }
+ void SetRtcpPacketTransport(rtc::PacketTransportInternal* rtcp);
+
+ bool IsReadyToSend() const override { return ready_to_send_; }
+
+ bool IsWritable(bool rtcp) const override;
+
+ bool SendRtpPacket(rtc::CopyOnWriteBuffer* packet,
+ const rtc::PacketOptions& options,
+ int flags) override;
+
+ bool SendRtcpPacket(rtc::CopyOnWriteBuffer* packet,
+ const rtc::PacketOptions& options,
+ int flags) override;
+
+ bool IsSrtpActive() const override { return false; }
+
+ void UpdateRtpHeaderExtensionMap(
+ const cricket::RtpHeaderExtensions& header_extensions) override;
+
+ bool RegisterRtpDemuxerSink(const RtpDemuxerCriteria& criteria,
+ RtpPacketSinkInterface* sink) override;
+
+ bool UnregisterRtpDemuxerSink(RtpPacketSinkInterface* sink) override;
+
+ protected:
+ // These methods will be used in the subclasses.
+ void DemuxPacket(rtc::CopyOnWriteBuffer packet, int64_t packet_time_us);
+
+ bool SendPacket(bool rtcp,
+ rtc::CopyOnWriteBuffer* packet,
+ const rtc::PacketOptions& options,
+ int flags);
+
+ // Overridden by SrtpTransport.
+ virtual void OnNetworkRouteChanged(
+ absl::optional<rtc::NetworkRoute> network_route);
+ virtual void OnRtpPacketReceived(rtc::CopyOnWriteBuffer packet,
+ int64_t packet_time_us);
+ virtual void OnRtcpPacketReceived(rtc::CopyOnWriteBuffer packet,
+ int64_t packet_time_us);
+ // Overridden by SrtpTransport and DtlsSrtpTransport.
+ virtual void OnWritableState(rtc::PacketTransportInternal* packet_transport);
+
+ private:
+ void OnReadyToSend(rtc::PacketTransportInternal* transport);
+ void OnSentPacket(rtc::PacketTransportInternal* packet_transport,
+ const rtc::SentPacket& sent_packet);
+ void OnReadPacket(rtc::PacketTransportInternal* transport,
+ const char* data,
+ size_t len,
+ const int64_t& packet_time_us,
+ int flags);
+
+ // Updates "ready to send" for an individual channel and fires
+ // SignalReadyToSend.
+ void SetReadyToSend(bool rtcp, bool ready);
+
+ void MaybeSignalReadyToSend();
+
+ bool IsTransportWritable();
+
+ bool rtcp_mux_enabled_;
+
+ rtc::PacketTransportInternal* rtp_packet_transport_ = nullptr;
+ rtc::PacketTransportInternal* rtcp_packet_transport_ = nullptr;
+
+ bool ready_to_send_ = false;
+ bool rtp_ready_to_send_ = false;
+ bool rtcp_ready_to_send_ = false;
+
+ RtpDemuxer rtp_demuxer_;
+
+ // Used for identifying the MID for RtpDemuxer.
+ RtpHeaderExtensionMap header_extension_map_;
+ // Guard against recursive "ready to send" signals
+ bool processing_ready_to_send_ = false;
+ ScopedTaskSafety safety_;
+};
+
+} // namespace webrtc
+
+#endif // PC_RTP_TRANSPORT_H_
diff --git a/third_party/libwebrtc/pc/rtp_transport_internal.h b/third_party/libwebrtc/pc/rtp_transport_internal.h
new file mode 100644
index 0000000000..4114fa9340
--- /dev/null
+++ b/third_party/libwebrtc/pc/rtp_transport_internal.h
@@ -0,0 +1,174 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_RTP_TRANSPORT_INTERNAL_H_
+#define PC_RTP_TRANSPORT_INTERNAL_H_
+
+#include <string>
+#include <utility>
+
+#include "call/rtp_demuxer.h"
+#include "p2p/base/ice_transport_internal.h"
+#include "pc/session_description.h"
+#include "rtc_base/callback_list.h"
+#include "rtc_base/network_route.h"
+#include "rtc_base/ssl_stream_adapter.h"
+
+namespace rtc {
+class CopyOnWriteBuffer;
+struct PacketOptions;
+} // namespace rtc
+
+namespace webrtc {
+
+// This class is an internal interface; it is not accessible to API consumers
+// but is accessible to internal classes in order to send and receive RTP and
+// RTCP packets belonging to a single RTP session. Additional convenience and
+// configuration methods are also provided.
+class RtpTransportInternal : public sigslot::has_slots<> {
+ public:
+ virtual ~RtpTransportInternal() = default;
+
+ virtual void SetRtcpMuxEnabled(bool enable) = 0;
+
+ virtual const std::string& transport_name() const = 0;
+
+ // Sets socket options on the underlying RTP or RTCP transports.
+ virtual int SetRtpOption(rtc::Socket::Option opt, int value) = 0;
+ virtual int SetRtcpOption(rtc::Socket::Option opt, int value) = 0;
+
+ virtual bool rtcp_mux_enabled() const = 0;
+
+ virtual bool IsReadyToSend() const = 0;
+
+ // Called whenever a transport's ready-to-send state changes. The argument
+ // is true if all used transports are ready to send. This is more specific
+ // than just "writable"; it means the last send didn't return ENOTCONN.
+ void SubscribeReadyToSend(const void* tag,
+ absl::AnyInvocable<void(bool)> callback) {
+ callback_list_ready_to_send_.AddReceiver(tag, std::move(callback));
+ }
+ void UnsubscribeReadyToSend(const void* tag) {
+ callback_list_ready_to_send_.RemoveReceivers(tag);
+ }
+
+ // Called whenever an RTCP packet is received. There is no equivalent signal
+ // for demuxable RTP packets because they would be forwarded to the
+ // BaseChannel through the RtpDemuxer callback.
+ void SubscribeRtcpPacketReceived(
+ const void* tag,
+ absl::AnyInvocable<void(rtc::CopyOnWriteBuffer*, int64_t)> callback) {
+ callback_list_rtcp_packet_received_.AddReceiver(tag, std::move(callback));
+ }
+ // There doesn't seem to be a need to unsubscribe from this signal.
+
+ // Called whenever a RTP packet that can not be demuxed by the transport is
+ // received.
+ void SetUnDemuxableRtpPacketReceivedHandler(
+ absl::AnyInvocable<void(webrtc::RtpPacketReceived&)> callback) {
+ callback_undemuxable_rtp_packet_received_ = std::move(callback);
+ }
+
+ // Called whenever the network route of the P2P layer transport changes.
+ // The argument is an optional network route.
+ void SubscribeNetworkRouteChanged(
+ const void* tag,
+ absl::AnyInvocable<void(absl::optional<rtc::NetworkRoute>)> callback) {
+ callback_list_network_route_changed_.AddReceiver(tag, std::move(callback));
+ }
+ void UnsubscribeNetworkRouteChanged(const void* tag) {
+ callback_list_network_route_changed_.RemoveReceivers(tag);
+ }
+
+ // Called whenever a transport's writable state might change. The argument is
+ // true if the transport is writable, otherwise it is false.
+ void SubscribeWritableState(const void* tag,
+ absl::AnyInvocable<void(bool)> callback) {
+ callback_list_writable_state_.AddReceiver(tag, std::move(callback));
+ }
+ void UnsubscribeWritableState(const void* tag) {
+ callback_list_writable_state_.RemoveReceivers(tag);
+ }
+ void SubscribeSentPacket(
+ const void* tag,
+ absl::AnyInvocable<void(const rtc::SentPacket&)> callback) {
+ callback_list_sent_packet_.AddReceiver(tag, std::move(callback));
+ }
+ void UnsubscribeSentPacket(const void* tag) {
+ callback_list_sent_packet_.RemoveReceivers(tag);
+ }
+
+ virtual bool IsWritable(bool rtcp) const = 0;
+
+ // TODO(zhihuang): Pass the `packet` by copy so that the original data
+ // wouldn't be modified.
+ virtual bool SendRtpPacket(rtc::CopyOnWriteBuffer* packet,
+ const rtc::PacketOptions& options,
+ int flags) = 0;
+
+ virtual bool SendRtcpPacket(rtc::CopyOnWriteBuffer* packet,
+ const rtc::PacketOptions& options,
+ int flags) = 0;
+
+ // This method updates the RTP header extension map so that the RTP transport
+ // can parse the received packets and identify the MID. This is called by the
+ // BaseChannel when setting the content description.
+ //
+ // TODO(zhihuang): Merging and replacing following methods handling header
+ // extensions with SetParameters:
+ // UpdateRtpHeaderExtensionMap,
+ // UpdateSendEncryptedHeaderExtensionIds,
+ // UpdateRecvEncryptedHeaderExtensionIds,
+ // CacheRtpAbsSendTimeHeaderExtension,
+ virtual void UpdateRtpHeaderExtensionMap(
+ const cricket::RtpHeaderExtensions& header_extensions) = 0;
+
+ virtual bool IsSrtpActive() const = 0;
+
+ virtual bool RegisterRtpDemuxerSink(const RtpDemuxerCriteria& criteria,
+ RtpPacketSinkInterface* sink) = 0;
+
+ virtual bool UnregisterRtpDemuxerSink(RtpPacketSinkInterface* sink) = 0;
+
+ protected:
+ void SendReadyToSend(bool arg) { callback_list_ready_to_send_.Send(arg); }
+ void SendRtcpPacketReceived(rtc::CopyOnWriteBuffer* buffer,
+ int64_t packet_time_us) {
+ callback_list_rtcp_packet_received_.Send(buffer, packet_time_us);
+ }
+ void NotifyUnDemuxableRtpPacketReceived(RtpPacketReceived& packet) {
+ callback_undemuxable_rtp_packet_received_(packet);
+ }
+ void SendNetworkRouteChanged(absl::optional<rtc::NetworkRoute> route) {
+ callback_list_network_route_changed_.Send(route);
+ }
+ void SendWritableState(bool state) {
+ callback_list_writable_state_.Send(state);
+ }
+ void SendSentPacket(const rtc::SentPacket& packet) {
+ callback_list_sent_packet_.Send(packet);
+ }
+
+ private:
+ CallbackList<bool> callback_list_ready_to_send_;
+ CallbackList<rtc::CopyOnWriteBuffer*, int64_t>
+ callback_list_rtcp_packet_received_;
+ absl::AnyInvocable<void(webrtc::RtpPacketReceived&)>
+ callback_undemuxable_rtp_packet_received_ =
+ [](RtpPacketReceived& packet) {};
+ CallbackList<absl::optional<rtc::NetworkRoute>>
+ callback_list_network_route_changed_;
+ CallbackList<bool> callback_list_writable_state_;
+ CallbackList<const rtc::SentPacket&> callback_list_sent_packet_;
+};
+
+} // namespace webrtc
+
+#endif // PC_RTP_TRANSPORT_INTERNAL_H_
diff --git a/third_party/libwebrtc/pc/rtp_transport_unittest.cc b/third_party/libwebrtc/pc/rtp_transport_unittest.cc
new file mode 100644
index 0000000000..5b6a8309e0
--- /dev/null
+++ b/third_party/libwebrtc/pc/rtp_transport_unittest.cc
@@ -0,0 +1,352 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/rtp_transport.h"
+
+#include <utility>
+
+#include "p2p/base/fake_packet_transport.h"
+#include "pc/test/rtp_transport_test_util.h"
+#include "rtc_base/buffer.h"
+#include "rtc_base/containers/flat_set.h"
+#include "rtc_base/gunit.h"
+#include "rtc_base/third_party/sigslot/sigslot.h"
+#include "test/gtest.h"
+#include "test/run_loop.h"
+
+namespace webrtc {
+
+constexpr bool kMuxDisabled = false;
+constexpr bool kMuxEnabled = true;
+constexpr uint16_t kLocalNetId = 1;
+constexpr uint16_t kRemoteNetId = 2;
+constexpr int kLastPacketId = 100;
+constexpr int kTransportOverheadPerPacket = 28; // Ipv4(20) + UDP(8).
+
+class SignalObserver : public sigslot::has_slots<> {
+ public:
+ explicit SignalObserver(RtpTransport* transport) {
+ transport_ = transport;
+ transport->SubscribeReadyToSend(
+ this, [this](bool ready) { OnReadyToSend(ready); });
+ transport->SubscribeNetworkRouteChanged(
+ this, [this](absl::optional<rtc::NetworkRoute> route) {
+ OnNetworkRouteChanged(route);
+ });
+ if (transport->rtp_packet_transport()) {
+ transport->rtp_packet_transport()->SignalSentPacket.connect(
+ this, &SignalObserver::OnSentPacket);
+ }
+
+ if (transport->rtcp_packet_transport()) {
+ transport->rtcp_packet_transport()->SignalSentPacket.connect(
+ this, &SignalObserver::OnSentPacket);
+ }
+ }
+
+ bool ready() const { return ready_; }
+ void OnReadyToSend(bool ready) { ready_ = ready; }
+
+ absl::optional<rtc::NetworkRoute> network_route() { return network_route_; }
+ void OnNetworkRouteChanged(absl::optional<rtc::NetworkRoute> network_route) {
+ network_route_ = network_route;
+ }
+
+ void OnSentPacket(rtc::PacketTransportInternal* packet_transport,
+ const rtc::SentPacket& sent_packet) {
+ if (packet_transport == transport_->rtp_packet_transport()) {
+ rtp_transport_sent_count_++;
+ } else {
+ ASSERT_EQ(transport_->rtcp_packet_transport(), packet_transport);
+ rtcp_transport_sent_count_++;
+ }
+ }
+
+ int rtp_transport_sent_count() { return rtp_transport_sent_count_; }
+
+ int rtcp_transport_sent_count() { return rtcp_transport_sent_count_; }
+
+ private:
+ int rtp_transport_sent_count_ = 0;
+ int rtcp_transport_sent_count_ = 0;
+ RtpTransport* transport_ = nullptr;
+ bool ready_ = false;
+ absl::optional<rtc::NetworkRoute> network_route_;
+};
+
+TEST(RtpTransportTest, SettingRtcpAndRtpSignalsReady) {
+ RtpTransport transport(kMuxDisabled);
+ SignalObserver observer(&transport);
+ rtc::FakePacketTransport fake_rtcp("fake_rtcp");
+ fake_rtcp.SetWritable(true);
+ rtc::FakePacketTransport fake_rtp("fake_rtp");
+ fake_rtp.SetWritable(true);
+
+ transport.SetRtcpPacketTransport(&fake_rtcp); // rtcp ready
+ EXPECT_FALSE(observer.ready());
+ transport.SetRtpPacketTransport(&fake_rtp); // rtp ready
+ EXPECT_TRUE(observer.ready());
+}
+
+TEST(RtpTransportTest, SettingRtpAndRtcpSignalsReady) {
+ RtpTransport transport(kMuxDisabled);
+ SignalObserver observer(&transport);
+ rtc::FakePacketTransport fake_rtcp("fake_rtcp");
+ fake_rtcp.SetWritable(true);
+ rtc::FakePacketTransport fake_rtp("fake_rtp");
+ fake_rtp.SetWritable(true);
+
+ transport.SetRtpPacketTransport(&fake_rtp); // rtp ready
+ EXPECT_FALSE(observer.ready());
+ transport.SetRtcpPacketTransport(&fake_rtcp); // rtcp ready
+ EXPECT_TRUE(observer.ready());
+}
+
+TEST(RtpTransportTest, SettingRtpWithRtcpMuxEnabledSignalsReady) {
+ RtpTransport transport(kMuxEnabled);
+ SignalObserver observer(&transport);
+ rtc::FakePacketTransport fake_rtp("fake_rtp");
+ fake_rtp.SetWritable(true);
+
+ transport.SetRtpPacketTransport(&fake_rtp); // rtp ready
+ EXPECT_TRUE(observer.ready());
+}
+
+TEST(RtpTransportTest, DisablingRtcpMuxSignalsNotReady) {
+ RtpTransport transport(kMuxEnabled);
+ SignalObserver observer(&transport);
+ rtc::FakePacketTransport fake_rtp("fake_rtp");
+ fake_rtp.SetWritable(true);
+
+ transport.SetRtpPacketTransport(&fake_rtp); // rtp ready
+ EXPECT_TRUE(observer.ready());
+
+ transport.SetRtcpMuxEnabled(false);
+ EXPECT_FALSE(observer.ready());
+}
+
+TEST(RtpTransportTest, EnablingRtcpMuxSignalsReady) {
+ RtpTransport transport(kMuxDisabled);
+ SignalObserver observer(&transport);
+ rtc::FakePacketTransport fake_rtp("fake_rtp");
+ fake_rtp.SetWritable(true);
+
+ transport.SetRtpPacketTransport(&fake_rtp); // rtp ready
+ EXPECT_FALSE(observer.ready());
+
+ transport.SetRtcpMuxEnabled(true);
+ EXPECT_TRUE(observer.ready());
+}
+
+// Tests the SignalNetworkRoute is fired when setting a packet transport.
+TEST(RtpTransportTest, SetRtpTransportWithNetworkRouteChanged) {
+ RtpTransport transport(kMuxDisabled);
+ SignalObserver observer(&transport);
+ rtc::FakePacketTransport fake_rtp("fake_rtp");
+
+ EXPECT_FALSE(observer.network_route());
+
+ rtc::NetworkRoute network_route;
+ // Set a non-null RTP transport with a new network route.
+ network_route.connected = true;
+ network_route.local = rtc::RouteEndpoint::CreateWithNetworkId(kLocalNetId);
+ network_route.remote = rtc::RouteEndpoint::CreateWithNetworkId(kRemoteNetId);
+ network_route.last_sent_packet_id = kLastPacketId;
+ network_route.packet_overhead = kTransportOverheadPerPacket;
+ fake_rtp.SetNetworkRoute(absl::optional<rtc::NetworkRoute>(network_route));
+ transport.SetRtpPacketTransport(&fake_rtp);
+ ASSERT_TRUE(observer.network_route());
+ EXPECT_TRUE(observer.network_route()->connected);
+ EXPECT_EQ(kLocalNetId, observer.network_route()->local.network_id());
+ EXPECT_EQ(kRemoteNetId, observer.network_route()->remote.network_id());
+ EXPECT_EQ(kTransportOverheadPerPacket,
+ observer.network_route()->packet_overhead);
+ EXPECT_EQ(kLastPacketId, observer.network_route()->last_sent_packet_id);
+
+ // Set a null RTP transport.
+ transport.SetRtpPacketTransport(nullptr);
+ EXPECT_FALSE(observer.network_route());
+}
+
+TEST(RtpTransportTest, SetRtcpTransportWithNetworkRouteChanged) {
+ RtpTransport transport(kMuxDisabled);
+ SignalObserver observer(&transport);
+ rtc::FakePacketTransport fake_rtcp("fake_rtcp");
+
+ EXPECT_FALSE(observer.network_route());
+
+ rtc::NetworkRoute network_route;
+ // Set a non-null RTCP transport with a new network route.
+ network_route.connected = true;
+ network_route.local = rtc::RouteEndpoint::CreateWithNetworkId(kLocalNetId);
+ network_route.remote = rtc::RouteEndpoint::CreateWithNetworkId(kRemoteNetId);
+ network_route.last_sent_packet_id = kLastPacketId;
+ network_route.packet_overhead = kTransportOverheadPerPacket;
+ fake_rtcp.SetNetworkRoute(absl::optional<rtc::NetworkRoute>(network_route));
+ transport.SetRtcpPacketTransport(&fake_rtcp);
+ ASSERT_TRUE(observer.network_route());
+ EXPECT_TRUE(observer.network_route()->connected);
+ EXPECT_EQ(kLocalNetId, observer.network_route()->local.network_id());
+ EXPECT_EQ(kRemoteNetId, observer.network_route()->remote.network_id());
+ EXPECT_EQ(kTransportOverheadPerPacket,
+ observer.network_route()->packet_overhead);
+ EXPECT_EQ(kLastPacketId, observer.network_route()->last_sent_packet_id);
+
+ // Set a null RTCP transport.
+ transport.SetRtcpPacketTransport(nullptr);
+ EXPECT_FALSE(observer.network_route());
+}
+
+// Test that RTCP packets are sent over correct transport based on the RTCP-mux
+// status.
+TEST(RtpTransportTest, RtcpPacketSentOverCorrectTransport) {
+ // If the RTCP-mux is not enabled, RTCP packets are expected to be sent over
+ // the RtcpPacketTransport.
+ RtpTransport transport(kMuxDisabled);
+ rtc::FakePacketTransport fake_rtcp("fake_rtcp");
+ rtc::FakePacketTransport fake_rtp("fake_rtp");
+ transport.SetRtcpPacketTransport(&fake_rtcp); // rtcp ready
+ transport.SetRtpPacketTransport(&fake_rtp); // rtp ready
+ SignalObserver observer(&transport);
+
+ fake_rtp.SetDestination(&fake_rtp, true);
+ fake_rtcp.SetDestination(&fake_rtcp, true);
+
+ rtc::CopyOnWriteBuffer packet;
+ EXPECT_TRUE(transport.SendRtcpPacket(&packet, rtc::PacketOptions(), 0));
+ EXPECT_EQ(1, observer.rtcp_transport_sent_count());
+
+ // The RTCP packets are expected to be sent over RtpPacketTransport if
+ // RTCP-mux is enabled.
+ transport.SetRtcpMuxEnabled(true);
+ EXPECT_TRUE(transport.SendRtcpPacket(&packet, rtc::PacketOptions(), 0));
+ EXPECT_EQ(1, observer.rtp_transport_sent_count());
+}
+
+TEST(RtpTransportTest, ChangingReadyToSendStateOnlySignalsWhenChanged) {
+ RtpTransport transport(kMuxEnabled);
+ TransportObserver observer(&transport);
+ rtc::FakePacketTransport fake_rtp("fake_rtp");
+ fake_rtp.SetWritable(true);
+
+ // State changes, so we should signal.
+ transport.SetRtpPacketTransport(&fake_rtp);
+ EXPECT_EQ(observer.ready_to_send_signal_count(), 1);
+
+ // State does not change, so we should not signal.
+ transport.SetRtpPacketTransport(&fake_rtp);
+ EXPECT_EQ(observer.ready_to_send_signal_count(), 1);
+
+ // State does not change, so we should not signal.
+ transport.SetRtcpMuxEnabled(true);
+ EXPECT_EQ(observer.ready_to_send_signal_count(), 1);
+
+ // State changes, so we should signal.
+ transport.SetRtcpMuxEnabled(false);
+ EXPECT_EQ(observer.ready_to_send_signal_count(), 2);
+}
+
+// Test that SignalPacketReceived fires with rtcp=true when a RTCP packet is
+// received.
+TEST(RtpTransportTest, SignalDemuxedRtcp) {
+ RtpTransport transport(kMuxDisabled);
+ rtc::FakePacketTransport fake_rtp("fake_rtp");
+ fake_rtp.SetDestination(&fake_rtp, true);
+ transport.SetRtpPacketTransport(&fake_rtp);
+ TransportObserver observer(&transport);
+
+ // An rtcp packet.
+ const unsigned char data[] = {0x80, 73, 0, 0};
+ const int len = 4;
+ const rtc::PacketOptions options;
+ const int flags = 0;
+ fake_rtp.SendPacket(reinterpret_cast<const char*>(data), len, options, flags);
+ EXPECT_EQ(0, observer.rtp_count());
+ EXPECT_EQ(1, observer.rtcp_count());
+}
+
+static const unsigned char kRtpData[] = {0x80, 0x11, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0};
+static const int kRtpLen = 12;
+
+// Test that SignalPacketReceived fires with rtcp=false when a RTP packet with a
+// handled payload type is received.
+TEST(RtpTransportTest, SignalHandledRtpPayloadType) {
+ RtpTransport transport(kMuxDisabled);
+ rtc::FakePacketTransport fake_rtp("fake_rtp");
+ fake_rtp.SetDestination(&fake_rtp, true);
+ transport.SetRtpPacketTransport(&fake_rtp);
+ TransportObserver observer(&transport);
+ RtpDemuxerCriteria demuxer_criteria;
+ // Add a handled payload type.
+ demuxer_criteria.payload_types().insert(0x11);
+ transport.RegisterRtpDemuxerSink(demuxer_criteria, &observer);
+
+ // An rtp packet.
+ const rtc::PacketOptions options;
+ const int flags = 0;
+ rtc::Buffer rtp_data(kRtpData, kRtpLen);
+ fake_rtp.SendPacket(rtp_data.data<char>(), kRtpLen, options, flags);
+ EXPECT_EQ(1, observer.rtp_count());
+ EXPECT_EQ(0, observer.un_demuxable_rtp_count());
+ EXPECT_EQ(0, observer.rtcp_count());
+ // Remove the sink before destroying the transport.
+ transport.UnregisterRtpDemuxerSink(&observer);
+}
+
+// Test that SignalPacketReceived does not fire when a RTP packet with an
+// unhandled payload type is received.
+TEST(RtpTransportTest, DontSignalUnhandledRtpPayloadType) {
+ RtpTransport transport(kMuxDisabled);
+ rtc::FakePacketTransport fake_rtp("fake_rtp");
+ fake_rtp.SetDestination(&fake_rtp, true);
+ transport.SetRtpPacketTransport(&fake_rtp);
+ TransportObserver observer(&transport);
+ RtpDemuxerCriteria demuxer_criteria;
+ // Add an unhandled payload type.
+ demuxer_criteria.payload_types().insert(0x12);
+ transport.RegisterRtpDemuxerSink(demuxer_criteria, &observer);
+
+ const rtc::PacketOptions options;
+ const int flags = 0;
+ rtc::Buffer rtp_data(kRtpData, kRtpLen);
+ fake_rtp.SendPacket(rtp_data.data<char>(), kRtpLen, options, flags);
+ EXPECT_EQ(0, observer.rtp_count());
+ EXPECT_EQ(1, observer.un_demuxable_rtp_count());
+ EXPECT_EQ(0, observer.rtcp_count());
+ // Remove the sink before destroying the transport.
+ transport.UnregisterRtpDemuxerSink(&observer);
+}
+
+TEST(RtpTransportTest, RecursiveSetSendDoesNotCrash) {
+ const int kShortTimeout = 100;
+ test::RunLoop loop;
+ RtpTransport transport(kMuxEnabled);
+ rtc::FakePacketTransport fake_rtp("fake_rtp");
+ transport.SetRtpPacketTransport(&fake_rtp);
+ TransportObserver observer(&transport);
+ observer.SetActionOnReadyToSend([&](bool ready) {
+ const rtc::PacketOptions options;
+ const int flags = 0;
+ rtc::CopyOnWriteBuffer rtp_data(kRtpData, kRtpLen);
+ transport.SendRtpPacket(&rtp_data, options, flags);
+ });
+ // The fake RTP will have no destination, so will return -1.
+ fake_rtp.SetError(ENOTCONN);
+ fake_rtp.SetWritable(true);
+ // At this point, only the initial ready-to-send is observed.
+ EXPECT_TRUE(observer.ready_to_send());
+ EXPECT_EQ(observer.ready_to_send_signal_count(), 1);
+ // After the wait, the ready-to-send false is observed.
+ EXPECT_EQ_WAIT(observer.ready_to_send_signal_count(), 2, kShortTimeout);
+ EXPECT_FALSE(observer.ready_to_send());
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/scenario_tests/BUILD.gn b/third_party/libwebrtc/pc/scenario_tests/BUILD.gn
new file mode 100644
index 0000000000..fa3a67c9a2
--- /dev/null
+++ b/third_party/libwebrtc/pc/scenario_tests/BUILD.gn
@@ -0,0 +1,24 @@
+# Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+import("../../webrtc.gni")
+
+if (rtc_include_tests) {
+ rtc_library("pc_scenario_tests") {
+ testonly = true
+ sources = [ "goog_cc_test.cc" ]
+ deps = [
+ "../../api:rtc_stats_api",
+ "../../modules/rtp_rtcp:rtp_rtcp",
+ "../../pc:pc_test_utils",
+ "../../test:field_trial",
+ "../../test:test_support",
+ "../../test/peer_scenario:peer_scenario",
+ ]
+ }
+}
diff --git a/third_party/libwebrtc/pc/scenario_tests/goog_cc_test.cc b/third_party/libwebrtc/pc/scenario_tests/goog_cc_test.cc
new file mode 100644
index 0000000000..ea96408ac7
--- /dev/null
+++ b/third_party/libwebrtc/pc/scenario_tests/goog_cc_test.cc
@@ -0,0 +1,109 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/stats/rtc_stats_collector_callback.h"
+#include "api/stats/rtcstats_objects.h"
+#include "pc/test/mock_peer_connection_observers.h"
+#include "test/field_trial.h"
+#include "test/gtest.h"
+#include "test/peer_scenario/peer_scenario.h"
+#include "test/peer_scenario/peer_scenario_client.h"
+
+namespace webrtc {
+namespace test {
+
+// TODO(terelius): Use fake encoder and enable on Android once
+// https://bugs.chromium.org/p/webrtc/issues/detail?id=11408 is fixed.
+#if defined(WEBRTC_ANDROID)
+#define MAYBE_NoBweChangeFromVideoUnmute DISABLED_NoBweChangeFromVideoUnmute
+#else
+#define MAYBE_NoBweChangeFromVideoUnmute NoBweChangeFromVideoUnmute
+#endif
+TEST(GoogCcPeerScenarioTest, MAYBE_NoBweChangeFromVideoUnmute) {
+ // If transport wide sequence numbers are used for audio, and the call
+ // switches from audio only to video only, there will be a sharp change in
+ // packets sizes. This will create a change in propagation time which might be
+ // detected as an overuse. Using separate overuse detectors for audio and
+ // video avoids the issue.
+ std::string audio_twcc_trials("WebRTC-Audio-AlrProbing/Disabled/");
+ std::string separate_audio_video(
+ "WebRTC-Bwe-SeparateAudioPackets/"
+ "enabled:true,packet_threshold:15,time_threshold:1000ms/");
+ ScopedFieldTrials field_trial(audio_twcc_trials + separate_audio_video);
+ PeerScenario s(*test_info_);
+ auto* caller = s.CreateClient(PeerScenarioClient::Config());
+ auto* callee = s.CreateClient(PeerScenarioClient::Config());
+
+ BuiltInNetworkBehaviorConfig net_conf;
+ net_conf.link_capacity_kbps = 350;
+ net_conf.queue_delay_ms = 50;
+ auto send_node = s.net()->CreateEmulatedNode(net_conf);
+ auto ret_node = s.net()->CreateEmulatedNode(net_conf);
+
+ PeerScenarioClient::VideoSendTrackConfig video_conf;
+ video_conf.generator.squares_video->framerate = 15;
+ auto video = caller->CreateVideo("VIDEO", video_conf);
+ auto audio = caller->CreateAudio("AUDIO", cricket::AudioOptions());
+
+ // Start ICE and exchange SDP.
+ s.SimpleConnection(caller, callee, {send_node}, {ret_node});
+
+ // Limit the encoder bitrate to ensure that there are no actual BWE overuses.
+ ASSERT_EQ(caller->pc()->GetSenders().size(), 2u); // 2 senders.
+ int num_video_streams = 0;
+ for (auto& rtp_sender : caller->pc()->GetSenders()) {
+ auto parameters = rtp_sender->GetParameters();
+ ASSERT_EQ(parameters.encodings.size(), 1u); // 1 stream per sender.
+ for (auto& encoding_parameters : parameters.encodings) {
+ if (encoding_parameters.ssrc == video.sender->ssrc()) {
+ num_video_streams++;
+ encoding_parameters.max_bitrate_bps = 220000;
+ encoding_parameters.max_framerate = 15;
+ }
+ }
+ rtp_sender->SetParameters(parameters);
+ }
+ ASSERT_EQ(num_video_streams, 1); // Exactly 1 video stream.
+
+ auto get_bwe = [&] {
+ auto callback =
+ rtc::make_ref_counted<webrtc::MockRTCStatsCollectorCallback>();
+ caller->pc()->GetStats(callback.get());
+ s.net()->time_controller()->Wait([&] { return callback->called(); });
+ auto stats =
+ callback->report()->GetStatsOfType<RTCIceCandidatePairStats>()[0];
+ return DataRate::BitsPerSec(*stats->available_outgoing_bitrate);
+ };
+
+ s.ProcessMessages(TimeDelta::Seconds(15));
+ const DataRate initial_bwe = get_bwe();
+ EXPECT_GE(initial_bwe, DataRate::KilobitsPerSec(300));
+
+ // 10 seconds audio only. Bandwidth should not drop.
+ video.capturer->Stop();
+ s.ProcessMessages(TimeDelta::Seconds(10));
+ EXPECT_GE(get_bwe(), initial_bwe);
+
+ // Resume video but stop audio. Bandwidth should not drop.
+ video.capturer->Start();
+ RTCError status = caller->pc()->RemoveTrackOrError(audio.sender);
+ ASSERT_TRUE(status.ok());
+ audio.track->set_enabled(false);
+ for (int i = 0; i < 10; i++) {
+ s.ProcessMessages(TimeDelta::Seconds(1));
+ EXPECT_GE(get_bwe(), initial_bwe);
+ }
+
+ caller->pc()->Close();
+ callee->pc()->Close();
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/sctp_data_channel.cc b/third_party/libwebrtc/pc/sctp_data_channel.cc
new file mode 100644
index 0000000000..8fdbf4cb92
--- /dev/null
+++ b/third_party/libwebrtc/pc/sctp_data_channel.cc
@@ -0,0 +1,1002 @@
+/*
+ * Copyright 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/sctp_data_channel.h"
+
+#include <limits>
+#include <memory>
+#include <string>
+#include <utility>
+
+#include "media/sctp/sctp_transport_internal.h"
+#include "pc/proxy.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/system/unused.h"
+#include "rtc_base/thread.h"
+
+namespace webrtc {
+
+namespace {
+
+static size_t kMaxQueuedReceivedDataBytes = 16 * 1024 * 1024;
+
+static std::atomic<int> g_unique_id{0};
+
+int GenerateUniqueId() {
+ return ++g_unique_id;
+}
+
+// Define proxy for DataChannelInterface.
+BEGIN_PROXY_MAP(DataChannel)
+PROXY_PRIMARY_THREAD_DESTRUCTOR()
+BYPASS_PROXY_METHOD1(void, RegisterObserver, DataChannelObserver*)
+BYPASS_PROXY_METHOD0(void, UnregisterObserver)
+BYPASS_PROXY_CONSTMETHOD0(std::string, label)
+BYPASS_PROXY_CONSTMETHOD0(bool, reliable)
+BYPASS_PROXY_CONSTMETHOD0(bool, ordered)
+BYPASS_PROXY_CONSTMETHOD0(uint16_t, maxRetransmitTime)
+BYPASS_PROXY_CONSTMETHOD0(uint16_t, maxRetransmits)
+BYPASS_PROXY_CONSTMETHOD0(absl::optional<int>, maxRetransmitsOpt)
+BYPASS_PROXY_CONSTMETHOD0(absl::optional<int>, maxPacketLifeTime)
+BYPASS_PROXY_CONSTMETHOD0(std::string, protocol)
+BYPASS_PROXY_CONSTMETHOD0(bool, negotiated)
+// Can't bypass the proxy since the id may change.
+PROXY_SECONDARY_CONSTMETHOD0(int, id)
+BYPASS_PROXY_CONSTMETHOD0(Priority, priority)
+BYPASS_PROXY_CONSTMETHOD0(DataState, state)
+BYPASS_PROXY_CONSTMETHOD0(RTCError, error)
+PROXY_SECONDARY_CONSTMETHOD0(uint32_t, messages_sent)
+PROXY_SECONDARY_CONSTMETHOD0(uint64_t, bytes_sent)
+PROXY_SECONDARY_CONSTMETHOD0(uint32_t, messages_received)
+PROXY_SECONDARY_CONSTMETHOD0(uint64_t, bytes_received)
+PROXY_SECONDARY_CONSTMETHOD0(uint64_t, buffered_amount)
+PROXY_SECONDARY_METHOD0(void, Close)
+PROXY_SECONDARY_METHOD1(bool, Send, const DataBuffer&)
+BYPASS_PROXY_METHOD2(void,
+ SendAsync,
+ DataBuffer,
+ absl::AnyInvocable<void(RTCError) &&>)
+END_PROXY_MAP(DataChannel)
+} // namespace
+
+InternalDataChannelInit::InternalDataChannelInit(const DataChannelInit& base)
+ : DataChannelInit(base), open_handshake_role(kOpener) {
+ // If the channel is externally negotiated, do not send the OPEN message.
+ if (base.negotiated) {
+ open_handshake_role = kNone;
+ } else {
+ // Datachannel is externally negotiated. Ignore the id value.
+ // Specified in createDataChannel, WebRTC spec section 6.1 bullet 13.
+ id = -1;
+ }
+ // Backwards compatibility: If maxRetransmits or maxRetransmitTime
+ // are negative, the feature is not enabled.
+ // Values are clamped to a 16bit range.
+ if (maxRetransmits) {
+ if (*maxRetransmits < 0) {
+ RTC_LOG(LS_ERROR)
+ << "Accepting maxRetransmits < 0 for backwards compatibility";
+ maxRetransmits = absl::nullopt;
+ } else if (*maxRetransmits > std::numeric_limits<uint16_t>::max()) {
+ maxRetransmits = std::numeric_limits<uint16_t>::max();
+ }
+ }
+
+ if (maxRetransmitTime) {
+ if (*maxRetransmitTime < 0) {
+ RTC_LOG(LS_ERROR)
+ << "Accepting maxRetransmitTime < 0 for backwards compatibility";
+ maxRetransmitTime = absl::nullopt;
+ } else if (*maxRetransmitTime > std::numeric_limits<uint16_t>::max()) {
+ maxRetransmitTime = std::numeric_limits<uint16_t>::max();
+ }
+ }
+}
+
+bool InternalDataChannelInit::IsValid() const {
+ if (id < -1)
+ return false;
+
+ if (maxRetransmits.has_value() && maxRetransmits.value() < 0)
+ return false;
+
+ if (maxRetransmitTime.has_value() && maxRetransmitTime.value() < 0)
+ return false;
+
+ // Only one of these can be set.
+ if (maxRetransmits.has_value() && maxRetransmitTime.has_value())
+ return false;
+
+ return true;
+}
+
+StreamId SctpSidAllocator::AllocateSid(rtc::SSLRole role) {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ int potential_sid = (role == rtc::SSL_CLIENT) ? 0 : 1;
+ while (potential_sid <= static_cast<int>(cricket::kMaxSctpSid)) {
+ StreamId sid(potential_sid);
+ if (used_sids_.insert(sid).second)
+ return sid;
+ potential_sid += 2;
+ }
+ RTC_LOG(LS_ERROR) << "SCTP sid allocation pool exhausted.";
+ return StreamId();
+}
+
+bool SctpSidAllocator::ReserveSid(StreamId sid) {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ if (!sid.HasValue() || sid.stream_id_int() > cricket::kMaxSctpSid)
+ return false;
+ return used_sids_.insert(sid).second;
+}
+
+void SctpSidAllocator::ReleaseSid(StreamId sid) {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ used_sids_.erase(sid);
+}
+
+// A DataChannelObserver implementation that offers backwards compatibility with
+// implementations that aren't yet ready to be called back on the network
+// thread. This implementation posts events to the signaling thread where
+// events are delivered.
+// In the class, and together with the `SctpDataChannel` implementation, there's
+// special handling for the `state()` property whereby if that property is
+// queried on the channel object while inside an event callback, we return
+// the state that was active at the time the event was issued. This is to avoid
+// a problem with calling the `state()` getter on the proxy, which would do
+// a blocking call to the network thread, effectively flushing operations on
+// the network thread that could cause the state to change and eventually return
+// a misleading or arguably, wrong, state value to the callback implementation.
+// As a future improvement to the ObserverAdapter, we could do the same for
+// other properties that need to be read on the network thread. Eventually
+// all implementations should expect to be called on the network thread though
+// and the ObserverAdapter no longer be necessary.
+class SctpDataChannel::ObserverAdapter : public DataChannelObserver {
+ public:
+ explicit ObserverAdapter(
+ SctpDataChannel* channel,
+ rtc::scoped_refptr<PendingTaskSafetyFlag> signaling_safety)
+ : channel_(channel), signaling_safety_(std::move(signaling_safety)) {}
+
+ bool IsInsideCallback() const {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ return cached_getters_ != nullptr;
+ }
+
+ DataChannelInterface::DataState cached_state() const {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ RTC_DCHECK(IsInsideCallback());
+ return cached_getters_->state();
+ }
+
+ RTCError cached_error() const {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ RTC_DCHECK(IsInsideCallback());
+ return cached_getters_->error();
+ }
+
+ void SetDelegate(DataChannelObserver* delegate) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ delegate_ = delegate;
+ safety_.reset(PendingTaskSafetyFlag::CreateDetached());
+ }
+
+ static void DeleteOnSignalingThread(
+ std::unique_ptr<ObserverAdapter> observer) {
+ auto* signaling_thread = observer->signaling_thread();
+ if (!signaling_thread->IsCurrent())
+ signaling_thread->PostTask([observer = std::move(observer)]() {});
+ }
+
+ private:
+ class CachedGetters {
+ public:
+ explicit CachedGetters(ObserverAdapter* adapter)
+ : adapter_(adapter),
+ cached_state_(adapter_->channel_->state()),
+ cached_error_(adapter_->channel_->error()) {
+ RTC_DCHECK_RUN_ON(adapter->network_thread());
+ }
+
+ ~CachedGetters() {
+ if (!was_dropped_) {
+ RTC_DCHECK_RUN_ON(adapter_->signaling_thread());
+ RTC_DCHECK_EQ(adapter_->cached_getters_, this);
+ adapter_->cached_getters_ = nullptr;
+ }
+ }
+
+ bool PrepareForCallback() {
+ RTC_DCHECK_RUN_ON(adapter_->signaling_thread());
+ RTC_DCHECK(was_dropped_);
+ was_dropped_ = false;
+ adapter_->cached_getters_ = this;
+ return adapter_->delegate_ && adapter_->signaling_safety_->alive();
+ }
+
+ RTCError error() { return cached_error_; }
+ DataChannelInterface::DataState state() { return cached_state_; }
+
+ private:
+ ObserverAdapter* const adapter_;
+ bool was_dropped_ = true;
+ const DataChannelInterface::DataState cached_state_;
+ const RTCError cached_error_;
+ };
+
+ void OnStateChange() override {
+ RTC_DCHECK_RUN_ON(network_thread());
+ signaling_thread()->PostTask(
+ SafeTask(safety_.flag(),
+ [this, cached_state = std::make_unique<CachedGetters>(this)] {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ if (cached_state->PrepareForCallback())
+ delegate_->OnStateChange();
+ }));
+ }
+
+ void OnMessage(const DataBuffer& buffer) override {
+ RTC_DCHECK_RUN_ON(network_thread());
+ signaling_thread()->PostTask(SafeTask(
+ safety_.flag(), [this, buffer = buffer,
+ cached_state = std::make_unique<CachedGetters>(this)] {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ if (cached_state->PrepareForCallback())
+ delegate_->OnMessage(buffer);
+ }));
+ }
+
+ void OnBufferedAmountChange(uint64_t sent_data_size) override {
+ RTC_DCHECK_RUN_ON(network_thread());
+ signaling_thread()->PostTask(SafeTask(
+ safety_.flag(), [this, sent_data_size,
+ cached_state = std::make_unique<CachedGetters>(this)] {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ if (cached_state->PrepareForCallback())
+ delegate_->OnBufferedAmountChange(sent_data_size);
+ }));
+ }
+
+ bool IsOkToCallOnTheNetworkThread() override { return true; }
+
+ rtc::Thread* signaling_thread() const { return signaling_thread_; }
+ rtc::Thread* network_thread() const { return channel_->network_thread_; }
+
+ DataChannelObserver* delegate_ RTC_GUARDED_BY(signaling_thread()) = nullptr;
+ SctpDataChannel* const channel_;
+ // Make sure to keep our own signaling_thread_ pointer to avoid dereferencing
+ // `channel_` in the `RTC_DCHECK_RUN_ON` checks on the signaling thread.
+ rtc::Thread* const signaling_thread_{channel_->signaling_thread_};
+ ScopedTaskSafety safety_;
+ rtc::scoped_refptr<PendingTaskSafetyFlag> signaling_safety_;
+ CachedGetters* cached_getters_ RTC_GUARDED_BY(signaling_thread()) = nullptr;
+};
+
+// static
+rtc::scoped_refptr<SctpDataChannel> SctpDataChannel::Create(
+ rtc::WeakPtr<SctpDataChannelControllerInterface> controller,
+ const std::string& label,
+ bool connected_to_transport,
+ const InternalDataChannelInit& config,
+ rtc::Thread* signaling_thread,
+ rtc::Thread* network_thread) {
+ RTC_DCHECK(config.IsValid());
+ return rtc::make_ref_counted<SctpDataChannel>(
+ config, std::move(controller), label, connected_to_transport,
+ signaling_thread, network_thread);
+}
+
+// static
+rtc::scoped_refptr<DataChannelInterface> SctpDataChannel::CreateProxy(
+ rtc::scoped_refptr<SctpDataChannel> channel,
+ rtc::scoped_refptr<PendingTaskSafetyFlag> signaling_safety) {
+ // Copy thread params to local variables before `std::move()`.
+ auto* signaling_thread = channel->signaling_thread_;
+ auto* network_thread = channel->network_thread_;
+ channel->observer_adapter_ = std::make_unique<ObserverAdapter>(
+ channel.get(), std::move(signaling_safety));
+ return DataChannelProxy::Create(signaling_thread, network_thread,
+ std::move(channel));
+}
+
+SctpDataChannel::SctpDataChannel(
+ const InternalDataChannelInit& config,
+ rtc::WeakPtr<SctpDataChannelControllerInterface> controller,
+ const std::string& label,
+ bool connected_to_transport,
+ rtc::Thread* signaling_thread,
+ rtc::Thread* network_thread)
+ : signaling_thread_(signaling_thread),
+ network_thread_(network_thread),
+ id_n_(config.id),
+ internal_id_(GenerateUniqueId()),
+ label_(label),
+ protocol_(config.protocol),
+ max_retransmit_time_(config.maxRetransmitTime),
+ max_retransmits_(config.maxRetransmits),
+ priority_(config.priority),
+ negotiated_(config.negotiated),
+ ordered_(config.ordered),
+ observer_(nullptr),
+ controller_(std::move(controller)) {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ // Since we constructed on the network thread we can't (yet) check the
+ // `controller_` pointer since doing so will trigger a thread check.
+ RTC_UNUSED(network_thread_);
+ RTC_DCHECK(config.IsValid());
+
+ if (connected_to_transport)
+ network_safety_->SetAlive();
+
+ switch (config.open_handshake_role) {
+ case InternalDataChannelInit::kNone: // pre-negotiated
+ handshake_state_ = kHandshakeReady;
+ break;
+ case InternalDataChannelInit::kOpener:
+ handshake_state_ = kHandshakeShouldSendOpen;
+ break;
+ case InternalDataChannelInit::kAcker:
+ handshake_state_ = kHandshakeShouldSendAck;
+ break;
+ }
+}
+
+SctpDataChannel::~SctpDataChannel() {
+ if (observer_adapter_)
+ ObserverAdapter::DeleteOnSignalingThread(std::move(observer_adapter_));
+}
+
+void SctpDataChannel::RegisterObserver(DataChannelObserver* observer) {
+ // Note: at this point, we do not know on which thread we're being called
+ // from since this method bypasses the proxy. On Android in particular,
+ // registration methods are called from unknown threads.
+
+ // Check if we should set up an observer adapter that will make sure that
+ // callbacks are delivered on the signaling thread rather than directly
+ // on the network thread.
+ const auto* current_thread = rtc::Thread::Current();
+ // TODO(webrtc:11547): Eventually all DataChannelObserver implementations
+ // should be called on the network thread and IsOkToCallOnTheNetworkThread().
+ if (!observer->IsOkToCallOnTheNetworkThread()) {
+ RTC_LOG(LS_WARNING) << "DataChannelObserver - adapter needed";
+ auto prepare_observer = [&]() {
+ RTC_DCHECK(observer_adapter_) << "CreateProxy hasn't been called";
+ observer_adapter_->SetDelegate(observer);
+ return observer_adapter_.get();
+ };
+ // Instantiate the adapter in the right context and then substitute the
+ // observer pointer the SctpDataChannel will call back on, with the adapter.
+ if (signaling_thread_ == current_thread) {
+ observer = prepare_observer();
+ } else {
+ observer = signaling_thread_->BlockingCall(std::move(prepare_observer));
+ }
+ }
+
+ // Now do the observer registration on the network thread. In the common case,
+ // we'll do this asynchronously via `PostTask()`. For that reason we grab
+ // a reference to ourselves while the task is in flight. We can't use
+ // `SafeTask(network_safety_, ...)` for this since we can't assume that we
+ // have a transport (network_safety_ represents the transport connection).
+ rtc::scoped_refptr<SctpDataChannel> me(this);
+ auto register_observer = [me = std::move(me), observer = observer] {
+ RTC_DCHECK_RUN_ON(me->network_thread_);
+ me->observer_ = observer;
+ me->DeliverQueuedReceivedData();
+ };
+
+ if (network_thread_ == current_thread) {
+ register_observer();
+ } else {
+ network_thread_->BlockingCall(std::move(register_observer));
+ }
+}
+
+void SctpDataChannel::UnregisterObserver() {
+ // Note: As with `RegisterObserver`, the proxy is being bypassed.
+ const auto* current_thread = rtc::Thread::Current();
+ // Callers must not be invoking the unregistration from the network thread
+ // (assuming a multi-threaded environment where we have a dedicated network
+ // thread). That would indicate non-network related work happening on the
+ // network thread or that unregistration is being done from within a callback
+ // (without unwinding the stack, which is a requirement).
+ // The network thread is not allowed to make blocking calls to the signaling
+ // thread, so that would blow up if attempted. Since we support an adapter
+ // for observers that are not safe to call on the network thread, we do
+ // need to check+free it on the signaling thread.
+ RTC_DCHECK(current_thread != network_thread_ ||
+ network_thread_ == signaling_thread_);
+
+ auto unregister_observer = [&] {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ observer_ = nullptr;
+ };
+
+ if (current_thread == network_thread_) {
+ unregister_observer();
+ } else {
+ network_thread_->BlockingCall(std::move(unregister_observer));
+ }
+
+ auto clear_observer = [&]() {
+ if (observer_adapter_)
+ observer_adapter_->SetDelegate(nullptr);
+ };
+
+ if (current_thread != signaling_thread_) {
+ signaling_thread_->BlockingCall(std::move(clear_observer));
+ } else {
+ clear_observer();
+ }
+}
+
+std::string SctpDataChannel::label() const {
+ return label_;
+}
+
+bool SctpDataChannel::reliable() const {
+ // May be called on any thread.
+ return !max_retransmits_ && !max_retransmit_time_;
+}
+
+bool SctpDataChannel::ordered() const {
+ return ordered_;
+}
+
+uint16_t SctpDataChannel::maxRetransmitTime() const {
+ return max_retransmit_time_ ? *max_retransmit_time_
+ : static_cast<uint16_t>(-1);
+}
+
+uint16_t SctpDataChannel::maxRetransmits() const {
+ return max_retransmits_ ? *max_retransmits_ : static_cast<uint16_t>(-1);
+}
+
+absl::optional<int> SctpDataChannel::maxPacketLifeTime() const {
+ return max_retransmit_time_;
+}
+
+absl::optional<int> SctpDataChannel::maxRetransmitsOpt() const {
+ return max_retransmits_;
+}
+
+std::string SctpDataChannel::protocol() const {
+ return protocol_;
+}
+
+bool SctpDataChannel::negotiated() const {
+ return negotiated_;
+}
+
+int SctpDataChannel::id() const {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ return id_n_.stream_id_int();
+}
+
+Priority SctpDataChannel::priority() const {
+ return priority_ ? *priority_ : Priority::kLow;
+}
+
+uint64_t SctpDataChannel::buffered_amount() const {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ return queued_send_data_.byte_count();
+}
+
+void SctpDataChannel::Close() {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ if (state_ == kClosing || state_ == kClosed)
+ return;
+ SetState(kClosing);
+ // Will send queued data before beginning the underlying closing procedure.
+ UpdateState();
+}
+
+SctpDataChannel::DataState SctpDataChannel::state() const {
+ // Note: The proxy is bypassed for the `state()` accessor. This is to allow
+ // observer callbacks to query what the new state is from within a state
+ // update notification without having to do a blocking call to the network
+ // thread from within a callback. This also makes it so that the returned
+ // state is guaranteed to be the new state that provoked the state change
+ // notification, whereby a blocking call to the network thread might end up
+ // getting put behind other messages on the network thread and eventually
+ // fetch a different state value (since pending messages might cause the
+ // state to change in the meantime).
+ const auto* current_thread = rtc::Thread::Current();
+ if (current_thread == signaling_thread_ && observer_adapter_ &&
+ observer_adapter_->IsInsideCallback()) {
+ return observer_adapter_->cached_state();
+ }
+
+ auto return_state = [&] {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ return state_;
+ };
+
+ return current_thread == network_thread_
+ ? return_state()
+ : network_thread_->BlockingCall(std::move(return_state));
+}
+
+RTCError SctpDataChannel::error() const {
+ const auto* current_thread = rtc::Thread::Current();
+ if (current_thread == signaling_thread_ && observer_adapter_ &&
+ observer_adapter_->IsInsideCallback()) {
+ return observer_adapter_->cached_error();
+ }
+
+ auto return_error = [&] {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ return error_;
+ };
+
+ return current_thread == network_thread_
+ ? return_error()
+ : network_thread_->BlockingCall(std::move(return_error));
+}
+
+uint32_t SctpDataChannel::messages_sent() const {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ return messages_sent_;
+}
+
+uint64_t SctpDataChannel::bytes_sent() const {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ return bytes_sent_;
+}
+
+uint32_t SctpDataChannel::messages_received() const {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ return messages_received_;
+}
+
+uint64_t SctpDataChannel::bytes_received() const {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ return bytes_received_;
+}
+
+bool SctpDataChannel::Send(const DataBuffer& buffer) {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ RTCError err = SendImpl(buffer);
+ if (err.type() == RTCErrorType::INVALID_STATE ||
+ err.type() == RTCErrorType::RESOURCE_EXHAUSTED) {
+ return false;
+ }
+
+ // Always return true for SCTP DataChannel per the spec.
+ return true;
+}
+
+// RTC_RUN_ON(network_thread_);
+RTCError SctpDataChannel::SendImpl(DataBuffer buffer) {
+ if (state_ != kOpen) {
+ error_ = RTCError(RTCErrorType::INVALID_STATE);
+ return error_;
+ }
+
+ // If the queue is non-empty, we're waiting for SignalReadyToSend,
+ // so just add to the end of the queue and keep waiting.
+ if (!queued_send_data_.Empty()) {
+ error_ = QueueSendDataMessage(buffer)
+ ? RTCError::OK()
+ : RTCError(RTCErrorType::RESOURCE_EXHAUSTED);
+ return error_;
+ }
+
+ return SendDataMessage(buffer, true);
+}
+
+void SctpDataChannel::SendAsync(
+ DataBuffer buffer,
+ absl::AnyInvocable<void(RTCError) &&> on_complete) {
+ // Note: at this point, we do not know on which thread we're being called
+ // since this method bypasses the proxy. On Android the thread might be VM
+ // owned, on other platforms it might be the signaling thread, or in Chrome
+ // it can be the JS thread. We also don't know if it's consistently the same
+ // thread. So we always post to the network thread (even if the current thread
+ // might be the network thread - in theory a call could even come from within
+ // the `on_complete` callback).
+ network_thread_->PostTask(SafeTask(
+ network_safety_, [this, buffer = std::move(buffer),
+ on_complete = std::move(on_complete)]() mutable {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ RTCError err = SendImpl(std::move(buffer));
+ if (on_complete)
+ std::move(on_complete)(err);
+ }));
+}
+
+void SctpDataChannel::SetSctpSid_n(StreamId sid) {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ RTC_DCHECK(!id_n_.HasValue());
+ RTC_DCHECK(sid.HasValue());
+ RTC_DCHECK_NE(handshake_state_, kHandshakeWaitingForAck);
+ RTC_DCHECK_EQ(state_, kConnecting);
+ id_n_ = sid;
+}
+
+void SctpDataChannel::OnClosingProcedureStartedRemotely() {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ if (state_ != kClosing && state_ != kClosed) {
+ // Don't bother sending queued data since the side that initiated the
+ // closure wouldn't receive it anyway. See crbug.com/559394 for a lengthy
+ // discussion about this.
+ queued_send_data_.Clear();
+ queued_control_data_.Clear();
+ // Just need to change state to kClosing, SctpTransport will handle the
+ // rest of the closing procedure and OnClosingProcedureComplete will be
+ // called later.
+ started_closing_procedure_ = true;
+ SetState(kClosing);
+ }
+}
+
+void SctpDataChannel::OnClosingProcedureComplete() {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ // If the closing procedure is complete, we should have finished sending
+ // all pending data and transitioned to kClosing already.
+ RTC_DCHECK_EQ(state_, kClosing);
+ RTC_DCHECK(queued_send_data_.Empty());
+ SetState(kClosed);
+}
+
+void SctpDataChannel::OnTransportChannelCreated() {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ network_safety_->SetAlive();
+}
+
+void SctpDataChannel::OnTransportChannelClosed(RTCError error) {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ // The SctpTransport is unusable, which could come from multiple reasons:
+ // - the SCTP m= section was rejected
+ // - the DTLS transport is closed
+ // - the SCTP transport is closed
+ CloseAbruptlyWithError(std::move(error));
+}
+
+DataChannelStats SctpDataChannel::GetStats() const {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ DataChannelStats stats{internal_id_, id(), label(),
+ protocol(), state(), messages_sent(),
+ messages_received(), bytes_sent(), bytes_received()};
+ return stats;
+}
+
+void SctpDataChannel::OnDataReceived(DataMessageType type,
+ const rtc::CopyOnWriteBuffer& payload) {
+ RTC_DCHECK_RUN_ON(network_thread_);
+
+ if (type == DataMessageType::kControl) {
+ if (handshake_state_ != kHandshakeWaitingForAck) {
+ // Ignore it if we are not expecting an ACK message.
+ RTC_LOG(LS_WARNING)
+ << "DataChannel received unexpected CONTROL message, sid = "
+ << id_n_.stream_id_int();
+ return;
+ }
+ if (ParseDataChannelOpenAckMessage(payload)) {
+ // We can send unordered as soon as we receive the ACK message.
+ handshake_state_ = kHandshakeReady;
+ RTC_LOG(LS_INFO) << "DataChannel received OPEN_ACK message, sid = "
+ << id_n_.stream_id_int();
+ } else {
+ RTC_LOG(LS_WARNING)
+ << "DataChannel failed to parse OPEN_ACK message, sid = "
+ << id_n_.stream_id_int();
+ }
+ return;
+ }
+
+ RTC_DCHECK(type == DataMessageType::kBinary ||
+ type == DataMessageType::kText);
+
+ RTC_DLOG(LS_VERBOSE) << "DataChannel received DATA message, sid = "
+ << id_n_.stream_id_int();
+ // We can send unordered as soon as we receive any DATA message since the
+ // remote side must have received the OPEN (and old clients do not send
+ // OPEN_ACK).
+ if (handshake_state_ == kHandshakeWaitingForAck) {
+ handshake_state_ = kHandshakeReady;
+ }
+
+ bool binary = (type == DataMessageType::kBinary);
+ auto buffer = std::make_unique<DataBuffer>(payload, binary);
+ if (state_ == kOpen && observer_) {
+ ++messages_received_;
+ bytes_received_ += buffer->size();
+ observer_->OnMessage(*buffer.get());
+ } else {
+ if (queued_received_data_.byte_count() + payload.size() >
+ kMaxQueuedReceivedDataBytes) {
+ RTC_LOG(LS_ERROR) << "Queued received data exceeds the max buffer size.";
+
+ queued_received_data_.Clear();
+ CloseAbruptlyWithError(
+ RTCError(RTCErrorType::RESOURCE_EXHAUSTED,
+ "Queued received data exceeds the max buffer size."));
+
+ return;
+ }
+ queued_received_data_.PushBack(std::move(buffer));
+ }
+}
+
+void SctpDataChannel::OnTransportReady() {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ RTC_DCHECK(connected_to_transport());
+ RTC_DCHECK(id_n_.HasValue());
+
+ SendQueuedControlMessages();
+ SendQueuedDataMessages();
+
+ UpdateState();
+}
+
+void SctpDataChannel::CloseAbruptlyWithError(RTCError error) {
+ RTC_DCHECK_RUN_ON(network_thread_);
+
+ if (state_ == kClosed) {
+ return;
+ }
+
+ network_safety_->SetNotAlive();
+
+ // Closing abruptly means any queued data gets thrown away.
+ queued_send_data_.Clear();
+ queued_control_data_.Clear();
+
+ // Still go to "kClosing" before "kClosed", since observers may be expecting
+ // that.
+ SetState(kClosing);
+ error_ = std::move(error);
+ SetState(kClosed);
+}
+
+void SctpDataChannel::CloseAbruptlyWithDataChannelFailure(
+ const std::string& message) {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ RTCError error(RTCErrorType::OPERATION_ERROR_WITH_DATA, message);
+ error.set_error_detail(RTCErrorDetailType::DATA_CHANNEL_FAILURE);
+ CloseAbruptlyWithError(std::move(error));
+}
+
+// RTC_RUN_ON(network_thread_).
+void SctpDataChannel::UpdateState() {
+ // UpdateState determines what to do from a few state variables. Include
+ // all conditions required for each state transition here for
+ // clarity. OnTransportReady(true) will send any queued data and then invoke
+ // UpdateState().
+
+ switch (state_) {
+ case kConnecting: {
+ if (connected_to_transport() && controller_) {
+ if (handshake_state_ == kHandshakeShouldSendOpen) {
+ rtc::CopyOnWriteBuffer payload;
+ WriteDataChannelOpenMessage(label_, protocol_, priority_, ordered_,
+ max_retransmits_, max_retransmit_time_,
+ &payload);
+ SendControlMessage(payload);
+ } else if (handshake_state_ == kHandshakeShouldSendAck) {
+ rtc::CopyOnWriteBuffer payload;
+ WriteDataChannelOpenAckMessage(&payload);
+ SendControlMessage(payload);
+ }
+ if (handshake_state_ == kHandshakeReady ||
+ handshake_state_ == kHandshakeWaitingForAck) {
+ SetState(kOpen);
+ // If we have received buffers before the channel got writable.
+ // Deliver them now.
+ DeliverQueuedReceivedData();
+ }
+ } else {
+ RTC_DCHECK(!id_n_.HasValue());
+ }
+ break;
+ }
+ case kOpen: {
+ break;
+ }
+ case kClosing: {
+ if (connected_to_transport() && controller_) {
+ // Wait for all queued data to be sent before beginning the closing
+ // procedure.
+ if (queued_send_data_.Empty() && queued_control_data_.Empty()) {
+ // For SCTP data channels, we need to wait for the closing procedure
+ // to complete; after calling RemoveSctpDataStream,
+ // OnClosingProcedureComplete will end up called asynchronously
+ // afterwards.
+ if (!started_closing_procedure_ && id_n_.HasValue()) {
+ started_closing_procedure_ = true;
+ controller_->RemoveSctpDataStream(id_n_);
+ }
+ }
+ } else {
+ // When we're not connected to a transport, we'll transition
+ // directly to the `kClosed` state from here.
+ queued_send_data_.Clear();
+ queued_control_data_.Clear();
+ SetState(kClosed);
+ }
+ break;
+ }
+ case kClosed:
+ break;
+ }
+}
+
+// RTC_RUN_ON(network_thread_).
+void SctpDataChannel::SetState(DataState state) {
+ if (state_ == state) {
+ return;
+ }
+
+ state_ = state;
+ if (observer_) {
+ observer_->OnStateChange();
+ }
+
+ if (controller_)
+ controller_->OnChannelStateChanged(this, state_);
+}
+
+// RTC_RUN_ON(network_thread_).
+void SctpDataChannel::DeliverQueuedReceivedData() {
+ if (!observer_ || state_ != kOpen) {
+ return;
+ }
+
+ while (!queued_received_data_.Empty()) {
+ std::unique_ptr<DataBuffer> buffer = queued_received_data_.PopFront();
+ ++messages_received_;
+ bytes_received_ += buffer->size();
+ observer_->OnMessage(*buffer);
+ }
+}
+
+// RTC_RUN_ON(network_thread_).
+void SctpDataChannel::SendQueuedDataMessages() {
+ if (queued_send_data_.Empty()) {
+ return;
+ }
+
+ RTC_DCHECK(state_ == kOpen || state_ == kClosing);
+
+ while (!queued_send_data_.Empty()) {
+ std::unique_ptr<DataBuffer> buffer = queued_send_data_.PopFront();
+ if (!SendDataMessage(*buffer, false).ok()) {
+ // Return the message to the front of the queue if sending is aborted.
+ queued_send_data_.PushFront(std::move(buffer));
+ break;
+ }
+ }
+}
+
+// RTC_RUN_ON(network_thread_).
+RTCError SctpDataChannel::SendDataMessage(const DataBuffer& buffer,
+ bool queue_if_blocked) {
+ SendDataParams send_params;
+ if (!controller_) {
+ error_ = RTCError(RTCErrorType::INVALID_STATE);
+ return error_;
+ }
+
+ send_params.ordered = ordered_;
+ // Send as ordered if it is still going through OPEN/ACK signaling.
+ if (handshake_state_ != kHandshakeReady && !ordered_) {
+ send_params.ordered = true;
+ RTC_DLOG(LS_VERBOSE)
+ << "Sending data as ordered for unordered DataChannel "
+ "because the OPEN_ACK message has not been received.";
+ }
+
+ send_params.max_rtx_count = max_retransmits_;
+ send_params.max_rtx_ms = max_retransmit_time_;
+ send_params.type =
+ buffer.binary ? DataMessageType::kBinary : DataMessageType::kText;
+
+ error_ = controller_->SendData(id_n_, send_params, buffer.data);
+ if (error_.ok()) {
+ ++messages_sent_;
+ bytes_sent_ += buffer.size();
+
+ if (observer_ && buffer.size() > 0) {
+ observer_->OnBufferedAmountChange(buffer.size());
+ }
+ return error_;
+ }
+
+ if (error_.type() == RTCErrorType::RESOURCE_EXHAUSTED) {
+ if (!queue_if_blocked)
+ return error_;
+
+ if (QueueSendDataMessage(buffer)) {
+ error_ = RTCError::OK();
+ return error_;
+ }
+ }
+ // Close the channel if the error is not SDR_BLOCK, or if queuing the
+ // message failed.
+ RTC_LOG(LS_ERROR) << "Closing the DataChannel due to a failure to send data, "
+ "send_result = "
+ << ToString(error_.type()) << ":" << error_.message();
+ CloseAbruptlyWithError(
+ RTCError(RTCErrorType::NETWORK_ERROR, "Failure to send data"));
+
+ return error_;
+}
+
+// RTC_RUN_ON(network_thread_).
+bool SctpDataChannel::QueueSendDataMessage(const DataBuffer& buffer) {
+ size_t start_buffered_amount = queued_send_data_.byte_count();
+ if (start_buffered_amount + buffer.size() >
+ DataChannelInterface::MaxSendQueueSize()) {
+ RTC_LOG(LS_ERROR) << "Can't buffer any more data for the data channel.";
+ error_ = RTCError(RTCErrorType::RESOURCE_EXHAUSTED);
+ return false;
+ }
+ queued_send_data_.PushBack(std::make_unique<DataBuffer>(buffer));
+ return true;
+}
+
+// RTC_RUN_ON(network_thread_).
+void SctpDataChannel::SendQueuedControlMessages() {
+ PacketQueue control_packets;
+ control_packets.Swap(&queued_control_data_);
+
+ while (!control_packets.Empty()) {
+ std::unique_ptr<DataBuffer> buf = control_packets.PopFront();
+ SendControlMessage(buf->data);
+ }
+}
+
+// RTC_RUN_ON(network_thread_).
+bool SctpDataChannel::SendControlMessage(const rtc::CopyOnWriteBuffer& buffer) {
+ RTC_DCHECK(connected_to_transport());
+ RTC_DCHECK(id_n_.HasValue());
+ RTC_DCHECK(controller_);
+
+ bool is_open_message = handshake_state_ == kHandshakeShouldSendOpen;
+ RTC_DCHECK(!is_open_message || !negotiated_);
+
+ SendDataParams send_params;
+ // Send data as ordered before we receive any message from the remote peer to
+ // make sure the remote peer will not receive any data before it receives the
+ // OPEN message.
+ send_params.ordered = ordered_ || is_open_message;
+ send_params.type = DataMessageType::kControl;
+
+ RTCError err = controller_->SendData(id_n_, send_params, buffer);
+ if (err.ok()) {
+ RTC_DLOG(LS_VERBOSE) << "Sent CONTROL message on channel "
+ << id_n_.stream_id_int();
+
+ if (handshake_state_ == kHandshakeShouldSendAck) {
+ handshake_state_ = kHandshakeReady;
+ } else if (handshake_state_ == kHandshakeShouldSendOpen) {
+ handshake_state_ = kHandshakeWaitingForAck;
+ }
+ } else if (err.type() == RTCErrorType::RESOURCE_EXHAUSTED) {
+ queued_control_data_.PushBack(std::make_unique<DataBuffer>(buffer, true));
+ } else {
+ RTC_LOG(LS_ERROR) << "Closing the DataChannel due to a failure to send"
+ " the CONTROL message, send_result = "
+ << ToString(err.type());
+ err.set_message("Failed to send a CONTROL message");
+ CloseAbruptlyWithError(err);
+ }
+ return err.ok();
+}
+
+// static
+void SctpDataChannel::ResetInternalIdAllocatorForTesting(int new_value) {
+ g_unique_id = new_value;
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/sctp_data_channel.h b/third_party/libwebrtc/pc/sctp_data_channel.h
new file mode 100644
index 0000000000..13bebd4612
--- /dev/null
+++ b/third_party/libwebrtc/pc/sctp_data_channel.h
@@ -0,0 +1,305 @@
+/*
+ * Copyright 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_SCTP_DATA_CHANNEL_H_
+#define PC_SCTP_DATA_CHANNEL_H_
+
+#include <stdint.h>
+
+#include <memory>
+#include <set>
+#include <string>
+
+#include "absl/types/optional.h"
+#include "api/data_channel_interface.h"
+#include "api/priority.h"
+#include "api/rtc_error.h"
+#include "api/scoped_refptr.h"
+#include "api/sequence_checker.h"
+#include "api/task_queue/pending_task_safety_flag.h"
+#include "api/transport/data_channel_transport_interface.h"
+#include "pc/data_channel_utils.h"
+#include "pc/sctp_utils.h"
+#include "rtc_base/containers/flat_set.h"
+#include "rtc_base/copy_on_write_buffer.h"
+#include "rtc_base/ssl_stream_adapter.h" // For SSLRole
+#include "rtc_base/system/no_unique_address.h"
+#include "rtc_base/thread.h"
+#include "rtc_base/thread_annotations.h"
+#include "rtc_base/weak_ptr.h"
+
+namespace webrtc {
+
+class SctpDataChannel;
+
+// Interface that acts as a bridge from the data channel to the transport.
+// All methods in this interface need to be invoked on the network thread.
+class SctpDataChannelControllerInterface {
+ public:
+ // Sends the data to the transport.
+ virtual RTCError SendData(StreamId sid,
+ const SendDataParams& params,
+ const rtc::CopyOnWriteBuffer& payload) = 0;
+ // Adds the data channel SID to the transport for SCTP.
+ virtual void AddSctpDataStream(StreamId sid) = 0;
+ // Begins the closing procedure by sending an outgoing stream reset. Still
+ // need to wait for callbacks to tell when this completes.
+ virtual void RemoveSctpDataStream(StreamId sid) = 0;
+ // Notifies the controller of state changes.
+ virtual void OnChannelStateChanged(SctpDataChannel* data_channel,
+ DataChannelInterface::DataState state) = 0;
+
+ protected:
+ virtual ~SctpDataChannelControllerInterface() {}
+};
+
+struct InternalDataChannelInit : public DataChannelInit {
+ enum OpenHandshakeRole { kOpener, kAcker, kNone };
+ // The default role is kOpener because the default `negotiated` is false.
+ InternalDataChannelInit() : open_handshake_role(kOpener) {}
+ explicit InternalDataChannelInit(const DataChannelInit& base);
+
+ // Does basic validation to determine if a data channel instance can be
+ // constructed using the configuration.
+ bool IsValid() const;
+
+ OpenHandshakeRole open_handshake_role;
+ // Optional fallback or backup flag from PC that's used for non-prenegotiated
+ // stream ids in situations where we cannot determine the SSL role from the
+ // transport for purposes of generating a stream ID.
+ // See: https://www.rfc-editor.org/rfc/rfc8832.html#name-protocol-overview
+ absl::optional<rtc::SSLRole> fallback_ssl_role;
+};
+
+// Helper class to allocate unique IDs for SCTP DataChannels.
+class SctpSidAllocator {
+ public:
+ SctpSidAllocator() = default;
+ // Gets the first unused odd/even id based on the DTLS role. If `role` is
+ // SSL_CLIENT, the allocated id starts from 0 and takes even numbers;
+ // otherwise, the id starts from 1 and takes odd numbers.
+ // If a `StreamId` cannot be allocated, `StreamId::HasValue()` will be false.
+ StreamId AllocateSid(rtc::SSLRole role);
+
+ // Attempts to reserve a specific sid. Returns false if it's unavailable.
+ bool ReserveSid(StreamId sid);
+
+ // Indicates that `sid` isn't in use any more, and is thus available again.
+ void ReleaseSid(StreamId sid);
+
+ private:
+ flat_set<StreamId> used_sids_ RTC_GUARDED_BY(&sequence_checker_);
+ RTC_NO_UNIQUE_ADDRESS SequenceChecker sequence_checker_{
+ SequenceChecker::kDetached};
+};
+
+// SctpDataChannel is an implementation of the DataChannelInterface based on
+// SctpTransport. It provides an implementation of unreliable or
+// reliable data channels.
+
+// DataChannel states:
+// kConnecting: The channel has been created the transport might not yet be
+// ready.
+// kOpen: The open handshake has been performed (if relevant) and the data
+// channel is able to send messages.
+// kClosing: DataChannelInterface::Close has been called, or the remote side
+// initiated the closing procedure, but the closing procedure has not
+// yet finished.
+// kClosed: The closing handshake is finished (possibly initiated from this,
+// side, possibly from the peer).
+//
+// How the closing procedure works for SCTP:
+// 1. Alice calls Close(), state changes to kClosing.
+// 2. Alice finishes sending any queued data.
+// 3. Alice calls RemoveSctpDataStream, sends outgoing stream reset.
+// 4. Bob receives incoming stream reset; OnClosingProcedureStartedRemotely
+// called.
+// 5. Bob sends outgoing stream reset.
+// 6. Alice receives incoming reset, Bob receives acknowledgement. Both receive
+// OnClosingProcedureComplete callback and transition to kClosed.
+class SctpDataChannel : public DataChannelInterface {
+ public:
+ static rtc::scoped_refptr<SctpDataChannel> Create(
+ rtc::WeakPtr<SctpDataChannelControllerInterface> controller,
+ const std::string& label,
+ bool connected_to_transport,
+ const InternalDataChannelInit& config,
+ rtc::Thread* signaling_thread,
+ rtc::Thread* network_thread);
+
+ // Instantiates an API proxy for a SctpDataChannel instance that will be
+ // handed out to external callers.
+ // The `signaling_safety` flag is used for the ObserverAdapter callback proxy
+ // which delivers callbacks on the signaling thread but must not deliver such
+ // callbacks after the peerconnection has been closed. The data controller
+ // will update the flag when closed, which will cancel any pending event
+ // notifications.
+ static rtc::scoped_refptr<DataChannelInterface> CreateProxy(
+ rtc::scoped_refptr<SctpDataChannel> channel,
+ rtc::scoped_refptr<PendingTaskSafetyFlag> signaling_safety);
+
+ void RegisterObserver(DataChannelObserver* observer) override;
+ void UnregisterObserver() override;
+
+ std::string label() const override;
+ bool reliable() const override;
+ bool ordered() const override;
+
+ // Backwards compatible accessors
+ uint16_t maxRetransmitTime() const override;
+ uint16_t maxRetransmits() const override;
+
+ absl::optional<int> maxPacketLifeTime() const override;
+ absl::optional<int> maxRetransmitsOpt() const override;
+ std::string protocol() const override;
+ bool negotiated() const override;
+ int id() const override;
+ Priority priority() const override;
+
+ uint64_t buffered_amount() const override;
+ void Close() override;
+ DataState state() const override;
+ RTCError error() const override;
+ uint32_t messages_sent() const override;
+ uint64_t bytes_sent() const override;
+ uint32_t messages_received() const override;
+ uint64_t bytes_received() const override;
+ bool Send(const DataBuffer& buffer) override;
+ void SendAsync(DataBuffer buffer,
+ absl::AnyInvocable<void(RTCError) &&> on_complete) override;
+
+ // Close immediately, ignoring any queued data or closing procedure.
+ // This is called when the underlying SctpTransport is being destroyed.
+ // It is also called by the PeerConnection if SCTP ID assignment fails.
+ void CloseAbruptlyWithError(RTCError error);
+ // Specializations of CloseAbruptlyWithError
+ void CloseAbruptlyWithDataChannelFailure(const std::string& message);
+
+ // Called when the SctpTransport's ready to use. That can happen when we've
+ // finished negotiation, or if the channel was created after negotiation has
+ // already finished.
+ void OnTransportReady();
+
+ void OnDataReceived(DataMessageType type,
+ const rtc::CopyOnWriteBuffer& payload);
+
+ // Sets the SCTP sid and adds to transport layer if not set yet. Should only
+ // be called once.
+ void SetSctpSid_n(StreamId sid);
+
+ // The remote side started the closing procedure by resetting its outgoing
+ // stream (our incoming stream). Sets state to kClosing.
+ void OnClosingProcedureStartedRemotely();
+ // The closing procedure is complete; both incoming and outgoing stream
+ // resets are done and the channel can transition to kClosed. Called
+ // asynchronously after RemoveSctpDataStream.
+ void OnClosingProcedureComplete();
+ // Called when the transport channel is created.
+ void OnTransportChannelCreated();
+ // Called when the transport channel is unusable.
+ // This method makes sure the DataChannel is disconnected and changes state
+ // to kClosed.
+ void OnTransportChannelClosed(RTCError error);
+
+ DataChannelStats GetStats() const;
+
+ // Returns a unique identifier that's guaranteed to always be available,
+ // doesn't change throughout SctpDataChannel's lifetime and is used for
+ // stats purposes (see also `GetStats()`).
+ int internal_id() const { return internal_id_; }
+
+ StreamId sid_n() const {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ return id_n_;
+ }
+
+ // Reset the allocator for internal ID values for testing, so that
+ // the internal IDs generated are predictable. Test only.
+ static void ResetInternalIdAllocatorForTesting(int new_value);
+
+ protected:
+ SctpDataChannel(const InternalDataChannelInit& config,
+ rtc::WeakPtr<SctpDataChannelControllerInterface> controller,
+ const std::string& label,
+ bool connected_to_transport,
+ rtc::Thread* signaling_thread,
+ rtc::Thread* network_thread);
+ ~SctpDataChannel() override;
+
+ private:
+ class ObserverAdapter;
+
+ // The OPEN(_ACK) signaling state.
+ enum HandshakeState {
+ kHandshakeInit,
+ kHandshakeShouldSendOpen,
+ kHandshakeShouldSendAck,
+ kHandshakeWaitingForAck,
+ kHandshakeReady
+ };
+
+ RTCError SendImpl(DataBuffer buffer) RTC_RUN_ON(network_thread_);
+ void UpdateState() RTC_RUN_ON(network_thread_);
+ void SetState(DataState state) RTC_RUN_ON(network_thread_);
+
+ void DeliverQueuedReceivedData() RTC_RUN_ON(network_thread_);
+
+ void SendQueuedDataMessages() RTC_RUN_ON(network_thread_);
+ RTCError SendDataMessage(const DataBuffer& buffer, bool queue_if_blocked)
+ RTC_RUN_ON(network_thread_);
+ bool QueueSendDataMessage(const DataBuffer& buffer)
+ RTC_RUN_ON(network_thread_);
+
+ void SendQueuedControlMessages() RTC_RUN_ON(network_thread_);
+ bool SendControlMessage(const rtc::CopyOnWriteBuffer& buffer)
+ RTC_RUN_ON(network_thread_);
+
+ bool connected_to_transport() const RTC_RUN_ON(network_thread_) {
+ return network_safety_->alive();
+ }
+
+ rtc::Thread* const signaling_thread_;
+ rtc::Thread* const network_thread_;
+ StreamId id_n_ RTC_GUARDED_BY(network_thread_);
+ const int internal_id_;
+ const std::string label_;
+ const std::string protocol_;
+ const absl::optional<int> max_retransmit_time_;
+ const absl::optional<int> max_retransmits_;
+ const absl::optional<Priority> priority_;
+ const bool negotiated_;
+ const bool ordered_;
+
+ DataChannelObserver* observer_ RTC_GUARDED_BY(network_thread_) = nullptr;
+ std::unique_ptr<ObserverAdapter> observer_adapter_;
+ DataState state_ RTC_GUARDED_BY(network_thread_) = kConnecting;
+ RTCError error_ RTC_GUARDED_BY(network_thread_);
+ uint32_t messages_sent_ RTC_GUARDED_BY(network_thread_) = 0;
+ uint64_t bytes_sent_ RTC_GUARDED_BY(network_thread_) = 0;
+ uint32_t messages_received_ RTC_GUARDED_BY(network_thread_) = 0;
+ uint64_t bytes_received_ RTC_GUARDED_BY(network_thread_) = 0;
+ rtc::WeakPtr<SctpDataChannelControllerInterface> controller_
+ RTC_GUARDED_BY(network_thread_);
+ HandshakeState handshake_state_ RTC_GUARDED_BY(network_thread_) =
+ kHandshakeInit;
+ // Did we already start the graceful SCTP closing procedure?
+ bool started_closing_procedure_ RTC_GUARDED_BY(network_thread_) = false;
+ // Control messages that always have to get sent out before any queued
+ // data.
+ PacketQueue queued_control_data_ RTC_GUARDED_BY(network_thread_);
+ PacketQueue queued_received_data_ RTC_GUARDED_BY(network_thread_);
+ PacketQueue queued_send_data_ RTC_GUARDED_BY(network_thread_);
+ rtc::scoped_refptr<PendingTaskSafetyFlag> network_safety_ =
+ PendingTaskSafetyFlag::CreateDetachedInactive();
+};
+
+} // namespace webrtc
+
+#endif // PC_SCTP_DATA_CHANNEL_H_
diff --git a/third_party/libwebrtc/pc/sctp_transport.cc b/third_party/libwebrtc/pc/sctp_transport.cc
new file mode 100644
index 0000000000..7f55e39d9e
--- /dev/null
+++ b/third_party/libwebrtc/pc/sctp_transport.cc
@@ -0,0 +1,203 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/sctp_transport.h"
+
+#include <algorithm>
+#include <utility>
+
+#include "absl/types/optional.h"
+#include "api/dtls_transport_interface.h"
+#include "api/sequence_checker.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+
+namespace webrtc {
+
+SctpTransport::SctpTransport(
+ std::unique_ptr<cricket::SctpTransportInternal> internal)
+ : owner_thread_(rtc::Thread::Current()),
+ info_(SctpTransportState::kNew),
+ internal_sctp_transport_(std::move(internal)) {
+ RTC_DCHECK(internal_sctp_transport_.get());
+ internal_sctp_transport_->SetOnConnectedCallback(
+ [this]() { OnAssociationChangeCommunicationUp(); });
+
+ if (dtls_transport_) {
+ UpdateInformation(SctpTransportState::kConnecting);
+ } else {
+ UpdateInformation(SctpTransportState::kNew);
+ }
+}
+
+SctpTransport::~SctpTransport() {
+ // We depend on the network thread to call Clear() before dropping
+ // its last reference to this object.
+ RTC_DCHECK(owner_thread_->IsCurrent() || !internal_sctp_transport_);
+}
+
+SctpTransportInformation SctpTransport::Information() const {
+ // TODO(tommi): Update PeerConnection::GetSctpTransport to hand out a proxy
+ // to the transport so that we can be sure that methods get called on the
+ // expected thread. Chromium currently calls this method from
+ // TransceiverStateSurfacer.
+ if (!owner_thread_->IsCurrent()) {
+ return owner_thread_->BlockingCall([this] { return Information(); });
+ }
+ RTC_DCHECK_RUN_ON(owner_thread_);
+ return info_;
+}
+
+void SctpTransport::RegisterObserver(SctpTransportObserverInterface* observer) {
+ RTC_DCHECK_RUN_ON(owner_thread_);
+ RTC_DCHECK(observer);
+ RTC_DCHECK(!observer_);
+ observer_ = observer;
+}
+
+void SctpTransport::UnregisterObserver() {
+ RTC_DCHECK_RUN_ON(owner_thread_);
+ observer_ = nullptr;
+}
+
+RTCError SctpTransport::OpenChannel(int channel_id) {
+ RTC_DCHECK_RUN_ON(owner_thread_);
+ RTC_DCHECK(internal_sctp_transport_);
+ internal_sctp_transport_->OpenStream(channel_id);
+ return RTCError::OK();
+}
+
+RTCError SctpTransport::SendData(int channel_id,
+ const SendDataParams& params,
+ const rtc::CopyOnWriteBuffer& buffer) {
+ RTC_DCHECK_RUN_ON(owner_thread_);
+ return internal_sctp_transport_->SendData(channel_id, params, buffer);
+}
+
+RTCError SctpTransport::CloseChannel(int channel_id) {
+ RTC_DCHECK_RUN_ON(owner_thread_);
+ RTC_DCHECK(internal_sctp_transport_);
+ internal_sctp_transport_->ResetStream(channel_id);
+ return RTCError::OK();
+}
+
+void SctpTransport::SetDataSink(DataChannelSink* sink) {
+ RTC_DCHECK_RUN_ON(owner_thread_);
+ RTC_DCHECK(internal_sctp_transport_);
+ internal_sctp_transport_->SetDataChannelSink(sink);
+}
+
+bool SctpTransport::IsReadyToSend() const {
+ RTC_DCHECK_RUN_ON(owner_thread_);
+ RTC_DCHECK(internal_sctp_transport_);
+ return internal_sctp_transport_->ReadyToSendData();
+}
+
+rtc::scoped_refptr<DtlsTransportInterface> SctpTransport::dtls_transport()
+ const {
+ RTC_DCHECK_RUN_ON(owner_thread_);
+ return dtls_transport_;
+}
+
+// Internal functions
+void SctpTransport::Clear() {
+ RTC_DCHECK_RUN_ON(owner_thread_);
+ RTC_DCHECK(internal());
+ // Note that we delete internal_sctp_transport_, but
+ // only drop the reference to dtls_transport_.
+ dtls_transport_ = nullptr;
+ internal_sctp_transport_ = nullptr;
+ UpdateInformation(SctpTransportState::kClosed);
+}
+
+void SctpTransport::SetDtlsTransport(
+ rtc::scoped_refptr<DtlsTransport> transport) {
+ RTC_DCHECK_RUN_ON(owner_thread_);
+ SctpTransportState next_state = info_.state();
+ dtls_transport_ = transport;
+ if (internal_sctp_transport_) {
+ if (transport) {
+ internal_sctp_transport_->SetDtlsTransport(transport->internal());
+
+ transport->internal()->SubscribeDtlsTransportState(
+ [this](cricket::DtlsTransportInternal* transport,
+ DtlsTransportState state) {
+ OnDtlsStateChange(transport, state);
+ });
+ if (info_.state() == SctpTransportState::kNew) {
+ next_state = SctpTransportState::kConnecting;
+ }
+ } else {
+ internal_sctp_transport_->SetDtlsTransport(nullptr);
+ }
+ }
+
+ UpdateInformation(next_state);
+}
+
+void SctpTransport::Start(int local_port,
+ int remote_port,
+ int max_message_size) {
+ RTC_DCHECK_RUN_ON(owner_thread_);
+ info_ = SctpTransportInformation(info_.state(), info_.dtls_transport(),
+ max_message_size, info_.MaxChannels());
+
+ if (!internal()->Start(local_port, remote_port, max_message_size)) {
+ RTC_LOG(LS_ERROR) << "Failed to push down SCTP parameters, closing.";
+ UpdateInformation(SctpTransportState::kClosed);
+ }
+}
+
+void SctpTransport::UpdateInformation(SctpTransportState state) {
+ RTC_DCHECK_RUN_ON(owner_thread_);
+ bool must_send_update = (state != info_.state());
+ // TODO(https://bugs.webrtc.org/10358): Update max channels from internal
+ // SCTP transport when available.
+ if (internal_sctp_transport_) {
+ info_ = SctpTransportInformation(
+ state, dtls_transport_, info_.MaxMessageSize(), info_.MaxChannels());
+ } else {
+ info_ = SctpTransportInformation(
+ state, dtls_transport_, info_.MaxMessageSize(), info_.MaxChannels());
+ }
+
+ if (observer_ && must_send_update) {
+ observer_->OnStateChange(info_);
+ }
+}
+
+void SctpTransport::OnAssociationChangeCommunicationUp() {
+ RTC_DCHECK_RUN_ON(owner_thread_);
+ RTC_DCHECK(internal_sctp_transport_);
+ if (internal_sctp_transport_->max_outbound_streams() &&
+ internal_sctp_transport_->max_inbound_streams()) {
+ int max_channels =
+ std::min(*(internal_sctp_transport_->max_outbound_streams()),
+ *(internal_sctp_transport_->max_inbound_streams()));
+ // Record max channels.
+ info_ = SctpTransportInformation(info_.state(), info_.dtls_transport(),
+ info_.MaxMessageSize(), max_channels);
+ }
+
+ UpdateInformation(SctpTransportState::kConnected);
+}
+
+void SctpTransport::OnDtlsStateChange(cricket::DtlsTransportInternal* transport,
+ DtlsTransportState state) {
+ RTC_DCHECK_RUN_ON(owner_thread_);
+ RTC_CHECK(transport == dtls_transport_->internal());
+ if (state == DtlsTransportState::kClosed ||
+ state == DtlsTransportState::kFailed) {
+ UpdateInformation(SctpTransportState::kClosed);
+ // TODO(http://bugs.webrtc.org/11090): Close all the data channels
+ }
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/sctp_transport.h b/third_party/libwebrtc/pc/sctp_transport.h
new file mode 100644
index 0000000000..35e7656100
--- /dev/null
+++ b/third_party/libwebrtc/pc/sctp_transport.h
@@ -0,0 +1,101 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_SCTP_TRANSPORT_H_
+#define PC_SCTP_TRANSPORT_H_
+
+#include <memory>
+
+#include "api/dtls_transport_interface.h"
+#include "api/scoped_refptr.h"
+#include "api/sctp_transport_interface.h"
+#include "api/sequence_checker.h"
+#include "api/transport/data_channel_transport_interface.h"
+#include "media/sctp/sctp_transport_internal.h"
+#include "p2p/base/dtls_transport_internal.h"
+#include "pc/dtls_transport.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/thread.h"
+#include "rtc_base/thread_annotations.h"
+
+namespace webrtc {
+
+// This implementation wraps a cricket::SctpTransport, and takes
+// ownership of it.
+// This object must be constructed and updated on the networking thread,
+// the same thread as the one the cricket::SctpTransportInternal object
+// lives on.
+class SctpTransport : public SctpTransportInterface,
+ public DataChannelTransportInterface {
+ public:
+ explicit SctpTransport(
+ std::unique_ptr<cricket::SctpTransportInternal> internal);
+
+ // SctpTransportInterface
+ rtc::scoped_refptr<DtlsTransportInterface> dtls_transport() const override;
+ SctpTransportInformation Information() const override;
+ void RegisterObserver(SctpTransportObserverInterface* observer) override;
+ void UnregisterObserver() override;
+
+ // DataChannelTransportInterface
+ RTCError OpenChannel(int channel_id) override;
+ RTCError SendData(int channel_id,
+ const SendDataParams& params,
+ const rtc::CopyOnWriteBuffer& buffer) override;
+ RTCError CloseChannel(int channel_id) override;
+ void SetDataSink(DataChannelSink* sink) override;
+ bool IsReadyToSend() const override;
+
+ // Internal functions
+ void Clear();
+ void SetDtlsTransport(rtc::scoped_refptr<DtlsTransport>);
+ // Initialize the cricket::SctpTransport. This can be called from
+ // the signaling thread.
+ void Start(int local_port, int remote_port, int max_message_size);
+
+ // TODO(https://bugs.webrtc.org/10629): Move functions that need
+ // internal() to be functions on the webrtc::SctpTransport interface,
+ // and make the internal() function private.
+ cricket::SctpTransportInternal* internal() {
+ RTC_DCHECK_RUN_ON(owner_thread_);
+ return internal_sctp_transport_.get();
+ }
+
+ const cricket::SctpTransportInternal* internal() const {
+ RTC_DCHECK_RUN_ON(owner_thread_);
+ return internal_sctp_transport_.get();
+ }
+
+ protected:
+ ~SctpTransport() override;
+
+ private:
+ void UpdateInformation(SctpTransportState state);
+ void OnInternalReadyToSendData();
+ void OnAssociationChangeCommunicationUp();
+ void OnInternalClosingProcedureStartedRemotely(int sid);
+ void OnInternalClosingProcedureComplete(int sid);
+ void OnDtlsStateChange(cricket::DtlsTransportInternal* transport,
+ DtlsTransportState state);
+
+ // NOTE: `owner_thread_` is the thread that the SctpTransport object is
+ // constructed on. In the context of PeerConnection, it's the network thread.
+ rtc::Thread* const owner_thread_;
+ SctpTransportInformation info_ RTC_GUARDED_BY(owner_thread_);
+ std::unique_ptr<cricket::SctpTransportInternal> internal_sctp_transport_
+ RTC_GUARDED_BY(owner_thread_);
+ SctpTransportObserverInterface* observer_ RTC_GUARDED_BY(owner_thread_) =
+ nullptr;
+ rtc::scoped_refptr<DtlsTransport> dtls_transport_
+ RTC_GUARDED_BY(owner_thread_);
+};
+
+} // namespace webrtc
+#endif // PC_SCTP_TRANSPORT_H_
diff --git a/third_party/libwebrtc/pc/sctp_transport_unittest.cc b/third_party/libwebrtc/pc/sctp_transport_unittest.cc
new file mode 100644
index 0000000000..d18543f20c
--- /dev/null
+++ b/third_party/libwebrtc/pc/sctp_transport_unittest.cc
@@ -0,0 +1,216 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/sctp_transport.h"
+
+#include <utility>
+#include <vector>
+
+#include "absl/memory/memory.h"
+#include "absl/types/optional.h"
+#include "api/dtls_transport_interface.h"
+#include "api/transport/data_channel_transport_interface.h"
+#include "media/base/media_channel.h"
+#include "p2p/base/fake_dtls_transport.h"
+#include "p2p/base/p2p_constants.h"
+#include "p2p/base/packet_transport_internal.h"
+#include "pc/dtls_transport.h"
+#include "rtc_base/copy_on_write_buffer.h"
+#include "rtc_base/gunit.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+
+constexpr int kDefaultTimeout = 1000; // milliseconds
+constexpr int kTestMaxSctpStreams = 1234;
+
+using cricket::FakeDtlsTransport;
+using ::testing::ElementsAre;
+
+namespace webrtc {
+
+namespace {
+
+class FakeCricketSctpTransport : public cricket::SctpTransportInternal {
+ public:
+ void SetOnConnectedCallback(std::function<void()> callback) override {
+ on_connected_callback_ = std::move(callback);
+ }
+ void SetDataChannelSink(DataChannelSink* sink) override {}
+ void SetDtlsTransport(rtc::PacketTransportInternal* transport) override {}
+ bool Start(int local_port, int remote_port, int max_message_size) override {
+ return true;
+ }
+ bool OpenStream(int sid) override { return true; }
+ bool ResetStream(int sid) override { return true; }
+ RTCError SendData(int sid,
+ const SendDataParams& params,
+ const rtc::CopyOnWriteBuffer& payload) override {
+ return RTCError::OK();
+ }
+ bool ReadyToSendData() override { return true; }
+ void set_debug_name_for_testing(const char* debug_name) override {}
+ int max_message_size() const override { return 0; }
+ absl::optional<int> max_outbound_streams() const override {
+ return max_outbound_streams_;
+ }
+ absl::optional<int> max_inbound_streams() const override {
+ return max_inbound_streams_;
+ }
+
+ void SendSignalAssociationChangeCommunicationUp() {
+ ASSERT_TRUE(on_connected_callback_);
+ on_connected_callback_();
+ }
+
+ void set_max_outbound_streams(int streams) {
+ max_outbound_streams_ = streams;
+ }
+ void set_max_inbound_streams(int streams) { max_inbound_streams_ = streams; }
+
+ private:
+ absl::optional<int> max_outbound_streams_;
+ absl::optional<int> max_inbound_streams_;
+ std::function<void()> on_connected_callback_;
+};
+
+} // namespace
+
+class TestSctpTransportObserver : public SctpTransportObserverInterface {
+ public:
+ TestSctpTransportObserver() : info_(SctpTransportState::kNew) {}
+
+ void OnStateChange(SctpTransportInformation info) override {
+ info_ = info;
+ states_.push_back(info.state());
+ }
+
+ SctpTransportState State() {
+ if (states_.size() > 0) {
+ return states_[states_.size() - 1];
+ } else {
+ return SctpTransportState::kNew;
+ }
+ }
+
+ const std::vector<SctpTransportState>& States() { return states_; }
+
+ const SctpTransportInformation LastReceivedInformation() { return info_; }
+
+ private:
+ std::vector<SctpTransportState> states_;
+ SctpTransportInformation info_;
+};
+
+class SctpTransportTest : public ::testing::Test {
+ public:
+ SctpTransport* transport() { return transport_.get(); }
+ SctpTransportObserverInterface* observer() { return &observer_; }
+
+ void CreateTransport() {
+ auto cricket_sctp_transport =
+ absl::WrapUnique(new FakeCricketSctpTransport());
+ transport_ =
+ rtc::make_ref_counted<SctpTransport>(std::move(cricket_sctp_transport));
+ }
+
+ void AddDtlsTransport() {
+ std::unique_ptr<cricket::DtlsTransportInternal> cricket_transport =
+ std::make_unique<FakeDtlsTransport>(
+ "audio", cricket::ICE_CANDIDATE_COMPONENT_RTP);
+ dtls_transport_ =
+ rtc::make_ref_counted<DtlsTransport>(std::move(cricket_transport));
+ transport_->SetDtlsTransport(dtls_transport_);
+ }
+
+ void CompleteSctpHandshake() {
+ // The computed MaxChannels shall be the minimum of the outgoing
+ // and incoming # of streams.
+ CricketSctpTransport()->set_max_outbound_streams(kTestMaxSctpStreams);
+ CricketSctpTransport()->set_max_inbound_streams(kTestMaxSctpStreams + 1);
+ CricketSctpTransport()->SendSignalAssociationChangeCommunicationUp();
+ }
+
+ FakeCricketSctpTransport* CricketSctpTransport() {
+ return static_cast<FakeCricketSctpTransport*>(transport_->internal());
+ }
+
+ rtc::AutoThread main_thread_;
+ rtc::scoped_refptr<SctpTransport> transport_;
+ rtc::scoped_refptr<DtlsTransport> dtls_transport_;
+ TestSctpTransportObserver observer_;
+};
+
+TEST(SctpTransportSimpleTest, CreateClearDelete) {
+ rtc::AutoThread main_thread;
+ std::unique_ptr<cricket::SctpTransportInternal> fake_cricket_sctp_transport =
+ absl::WrapUnique(new FakeCricketSctpTransport());
+ rtc::scoped_refptr<SctpTransport> sctp_transport =
+ rtc::make_ref_counted<SctpTransport>(
+ std::move(fake_cricket_sctp_transport));
+ ASSERT_TRUE(sctp_transport->internal());
+ ASSERT_EQ(SctpTransportState::kNew, sctp_transport->Information().state());
+ sctp_transport->Clear();
+ ASSERT_FALSE(sctp_transport->internal());
+ ASSERT_EQ(SctpTransportState::kClosed, sctp_transport->Information().state());
+}
+
+TEST_F(SctpTransportTest, EventsObservedWhenConnecting) {
+ CreateTransport();
+ transport()->RegisterObserver(observer());
+ AddDtlsTransport();
+ CompleteSctpHandshake();
+ ASSERT_EQ_WAIT(SctpTransportState::kConnected, observer_.State(),
+ kDefaultTimeout);
+ EXPECT_THAT(observer_.States(), ElementsAre(SctpTransportState::kConnecting,
+ SctpTransportState::kConnected));
+}
+
+TEST_F(SctpTransportTest, CloseWhenClearing) {
+ CreateTransport();
+ transport()->RegisterObserver(observer());
+ AddDtlsTransport();
+ CompleteSctpHandshake();
+ ASSERT_EQ_WAIT(SctpTransportState::kConnected, observer_.State(),
+ kDefaultTimeout);
+ transport()->Clear();
+ ASSERT_EQ_WAIT(SctpTransportState::kClosed, observer_.State(),
+ kDefaultTimeout);
+}
+
+TEST_F(SctpTransportTest, MaxChannelsSignalled) {
+ CreateTransport();
+ transport()->RegisterObserver(observer());
+ AddDtlsTransport();
+ EXPECT_FALSE(transport()->Information().MaxChannels());
+ EXPECT_FALSE(observer_.LastReceivedInformation().MaxChannels());
+ CompleteSctpHandshake();
+ ASSERT_EQ_WAIT(SctpTransportState::kConnected, observer_.State(),
+ kDefaultTimeout);
+ EXPECT_TRUE(transport()->Information().MaxChannels());
+ EXPECT_EQ(kTestMaxSctpStreams, *(transport()->Information().MaxChannels()));
+ EXPECT_TRUE(observer_.LastReceivedInformation().MaxChannels());
+ EXPECT_EQ(kTestMaxSctpStreams,
+ *(observer_.LastReceivedInformation().MaxChannels()));
+}
+
+TEST_F(SctpTransportTest, CloseWhenTransportCloses) {
+ CreateTransport();
+ transport()->RegisterObserver(observer());
+ AddDtlsTransport();
+ CompleteSctpHandshake();
+ ASSERT_EQ_WAIT(SctpTransportState::kConnected, observer_.State(),
+ kDefaultTimeout);
+ static_cast<cricket::FakeDtlsTransport*>(dtls_transport_->internal())
+ ->SetDtlsState(DtlsTransportState::kClosed);
+ ASSERT_EQ_WAIT(SctpTransportState::kClosed, observer_.State(),
+ kDefaultTimeout);
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/sctp_utils.cc b/third_party/libwebrtc/pc/sctp_utils.cc
new file mode 100644
index 0000000000..54742c27a7
--- /dev/null
+++ b/third_party/libwebrtc/pc/sctp_utils.cc
@@ -0,0 +1,244 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/sctp_utils.h"
+
+#include <stddef.h>
+
+#include <cstdint>
+
+#include "absl/types/optional.h"
+#include "api/priority.h"
+#include "rtc_base/byte_buffer.h"
+#include "rtc_base/copy_on_write_buffer.h"
+#include "rtc_base/logging.h"
+
+namespace webrtc {
+
+// Format defined at
+// http://tools.ietf.org/html/draft-ietf-rtcweb-data-protocol-01#section
+
+static const uint8_t DATA_CHANNEL_OPEN_MESSAGE_TYPE = 0x03;
+static const uint8_t DATA_CHANNEL_OPEN_ACK_MESSAGE_TYPE = 0x02;
+
+enum DataChannelOpenMessageChannelType {
+ DCOMCT_ORDERED_RELIABLE = 0x00,
+ DCOMCT_ORDERED_PARTIAL_RTXS = 0x01,
+ DCOMCT_ORDERED_PARTIAL_TIME = 0x02,
+ DCOMCT_UNORDERED_RELIABLE = 0x80,
+ DCOMCT_UNORDERED_PARTIAL_RTXS = 0x81,
+ DCOMCT_UNORDERED_PARTIAL_TIME = 0x82,
+};
+
+// Values of priority in the DC open protocol message.
+// These are compared against an integer, so are enum, not enum class.
+enum DataChannelPriority {
+ DCO_PRIORITY_VERY_LOW = 128,
+ DCO_PRIORITY_LOW = 256,
+ DCO_PRIORITY_MEDIUM = 512,
+ DCO_PRIORITY_HIGH = 1024,
+};
+
+bool IsOpenMessage(const rtc::CopyOnWriteBuffer& payload) {
+ // Format defined at
+ // https://www.rfc-editor.org/rfc/rfc8832#section-5.1
+ if (payload.size() < 1) {
+ RTC_DLOG(LS_WARNING) << "Could not read OPEN message type.";
+ return false;
+ }
+
+ uint8_t message_type = payload[0];
+ return message_type == DATA_CHANNEL_OPEN_MESSAGE_TYPE;
+}
+
+bool ParseDataChannelOpenMessage(const rtc::CopyOnWriteBuffer& payload,
+ std::string* label,
+ DataChannelInit* config) {
+ // Format defined at
+ // http://tools.ietf.org/html/draft-jesup-rtcweb-data-protocol-04
+
+ rtc::ByteBufferReader buffer(payload.data<char>(), payload.size());
+ uint8_t message_type;
+ if (!buffer.ReadUInt8(&message_type)) {
+ RTC_LOG(LS_WARNING) << "Could not read OPEN message type.";
+ return false;
+ }
+ if (message_type != DATA_CHANNEL_OPEN_MESSAGE_TYPE) {
+ RTC_LOG(LS_WARNING) << "Data Channel OPEN message of unexpected type: "
+ << message_type;
+ return false;
+ }
+
+ uint8_t channel_type;
+ if (!buffer.ReadUInt8(&channel_type)) {
+ RTC_LOG(LS_WARNING) << "Could not read OPEN message channel type.";
+ return false;
+ }
+
+ uint16_t priority;
+ if (!buffer.ReadUInt16(&priority)) {
+ RTC_LOG(LS_WARNING)
+ << "Could not read OPEN message reliabilility prioirty.";
+ return false;
+ }
+ // Parse priority as defined in
+ // https://w3c.github.io/webrtc-priority/#rtcdatachannel-processing-steps
+ if (priority <= DCO_PRIORITY_VERY_LOW) {
+ config->priority = Priority::kVeryLow;
+ } else if (priority <= DCO_PRIORITY_LOW) {
+ config->priority = Priority::kLow;
+ } else if (priority <= DCO_PRIORITY_MEDIUM) {
+ config->priority = Priority::kMedium;
+ } else {
+ config->priority = Priority::kHigh;
+ }
+
+ uint32_t reliability_param;
+ if (!buffer.ReadUInt32(&reliability_param)) {
+ RTC_LOG(LS_WARNING) << "Could not read OPEN message reliabilility param.";
+ return false;
+ }
+ uint16_t label_length;
+ if (!buffer.ReadUInt16(&label_length)) {
+ RTC_LOG(LS_WARNING) << "Could not read OPEN message label length.";
+ return false;
+ }
+ uint16_t protocol_length;
+ if (!buffer.ReadUInt16(&protocol_length)) {
+ RTC_LOG(LS_WARNING) << "Could not read OPEN message protocol length.";
+ return false;
+ }
+ if (!buffer.ReadString(label, (size_t)label_length)) {
+ RTC_LOG(LS_WARNING) << "Could not read OPEN message label";
+ return false;
+ }
+ if (!buffer.ReadString(&config->protocol, protocol_length)) {
+ RTC_LOG(LS_WARNING) << "Could not read OPEN message protocol.";
+ return false;
+ }
+
+ config->ordered = true;
+ switch (channel_type) {
+ case DCOMCT_UNORDERED_RELIABLE:
+ case DCOMCT_UNORDERED_PARTIAL_RTXS:
+ case DCOMCT_UNORDERED_PARTIAL_TIME:
+ config->ordered = false;
+ }
+
+ config->maxRetransmits = absl::nullopt;
+ config->maxRetransmitTime = absl::nullopt;
+ switch (channel_type) {
+ case DCOMCT_ORDERED_PARTIAL_RTXS:
+ case DCOMCT_UNORDERED_PARTIAL_RTXS:
+ config->maxRetransmits = reliability_param;
+ break;
+ case DCOMCT_ORDERED_PARTIAL_TIME:
+ case DCOMCT_UNORDERED_PARTIAL_TIME:
+ config->maxRetransmitTime = reliability_param;
+ break;
+ }
+ return true;
+}
+
+bool ParseDataChannelOpenAckMessage(const rtc::CopyOnWriteBuffer& payload) {
+ if (payload.size() < 1) {
+ RTC_LOG(LS_WARNING) << "Could not read OPEN_ACK message type.";
+ return false;
+ }
+
+ uint8_t message_type = payload[0];
+ if (message_type != DATA_CHANNEL_OPEN_ACK_MESSAGE_TYPE) {
+ RTC_LOG(LS_WARNING) << "Data Channel OPEN_ACK message of unexpected type: "
+ << message_type;
+ return false;
+ }
+ return true;
+}
+
+bool WriteDataChannelOpenMessage(const std::string& label,
+ const DataChannelInit& config,
+ rtc::CopyOnWriteBuffer* payload) {
+ return WriteDataChannelOpenMessage(label, config.protocol, config.priority,
+ config.ordered, config.maxRetransmits,
+ config.maxRetransmitTime, payload);
+}
+
+bool WriteDataChannelOpenMessage(const std::string& label,
+ const std::string& protocol,
+ absl::optional<Priority> opt_priority,
+ bool ordered,
+ absl::optional<int> max_retransmits,
+ absl::optional<int> max_retransmit_time,
+ rtc::CopyOnWriteBuffer* payload) {
+ // Format defined at
+ // http://tools.ietf.org/html/draft-ietf-rtcweb-data-protocol-09#section-5.1
+ uint8_t channel_type = 0;
+ uint32_t reliability_param = 0;
+ uint16_t priority = 0;
+ // Set priority according to
+ // https://tools.ietf.org/html/draft-ietf-rtcweb-data-channel-12#section-6.4
+ if (opt_priority) {
+ switch (*opt_priority) {
+ case Priority::kVeryLow:
+ priority = DCO_PRIORITY_VERY_LOW;
+ break;
+ case Priority::kLow:
+ priority = DCO_PRIORITY_LOW;
+ break;
+ case Priority::kMedium:
+ priority = DCO_PRIORITY_MEDIUM;
+ break;
+ case Priority::kHigh:
+ priority = DCO_PRIORITY_HIGH;
+ break;
+ }
+ }
+ if (ordered) {
+ if (max_retransmits) {
+ channel_type = DCOMCT_ORDERED_PARTIAL_RTXS;
+ reliability_param = *max_retransmits;
+ } else if (max_retransmit_time) {
+ channel_type = DCOMCT_ORDERED_PARTIAL_TIME;
+ reliability_param = *max_retransmit_time;
+ } else {
+ channel_type = DCOMCT_ORDERED_RELIABLE;
+ }
+ } else {
+ if (max_retransmits) {
+ channel_type = DCOMCT_UNORDERED_PARTIAL_RTXS;
+ reliability_param = *max_retransmits;
+ } else if (max_retransmit_time) {
+ channel_type = DCOMCT_UNORDERED_PARTIAL_TIME;
+ reliability_param = *max_retransmit_time;
+ } else {
+ channel_type = DCOMCT_UNORDERED_RELIABLE;
+ }
+ }
+
+ rtc::ByteBufferWriter buffer(NULL, 20 + label.length() + protocol.length());
+ // TODO(tommi): Add error handling and check resulting length.
+ buffer.WriteUInt8(DATA_CHANNEL_OPEN_MESSAGE_TYPE);
+ buffer.WriteUInt8(channel_type);
+ buffer.WriteUInt16(priority);
+ buffer.WriteUInt32(reliability_param);
+ buffer.WriteUInt16(static_cast<uint16_t>(label.length()));
+ buffer.WriteUInt16(static_cast<uint16_t>(protocol.length()));
+ buffer.WriteString(label);
+ buffer.WriteString(protocol);
+ payload->SetData(buffer.Data(), buffer.Length());
+ return true;
+}
+
+void WriteDataChannelOpenAckMessage(rtc::CopyOnWriteBuffer* payload) {
+ uint8_t data = DATA_CHANNEL_OPEN_ACK_MESSAGE_TYPE;
+ payload->SetData(&data, sizeof(data));
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/sctp_utils.h b/third_party/libwebrtc/pc/sctp_utils.h
new file mode 100644
index 0000000000..868a8be826
--- /dev/null
+++ b/third_party/libwebrtc/pc/sctp_utils.h
@@ -0,0 +1,94 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_SCTP_UTILS_H_
+#define PC_SCTP_UTILS_H_
+
+#include <string>
+
+#include "api/data_channel_interface.h"
+#include "api/transport/data_channel_transport_interface.h"
+#include "media/base/media_channel.h"
+#include "media/sctp/sctp_transport_internal.h"
+#include "net/dcsctp/public/types.h"
+#include "rtc_base/copy_on_write_buffer.h"
+#include "rtc_base/ssl_stream_adapter.h" // For SSLRole
+
+namespace rtc {
+class CopyOnWriteBuffer;
+} // namespace rtc
+
+namespace webrtc {
+struct DataChannelInit;
+
+// Wraps the `uint16_t` sctp data channel stream id value and does range
+// checking. The class interface is `int` based to ease with DataChannelInit
+// compatibility and types used in `DataChannelController`'s interface. Going
+// forward, `int` compatibility won't be needed and we can either just use
+// this class or the internal dcsctp::StreamID type.
+class StreamId {
+ public:
+ StreamId() = default;
+ explicit StreamId(int id)
+ : id_(id >= cricket::kMinSctpSid && id <= cricket::kSpecMaxSctpSid
+ ? absl::optional<uint16_t>(static_cast<uint16_t>(id))
+ : absl::nullopt) {}
+ StreamId(const StreamId& sid) = default;
+ StreamId& operator=(const StreamId& sid) = default;
+
+ // Returns `true` if a valid stream id is contained, in the range of
+ // kMinSctpSid - kSpecMaxSctpSid ([0..0xffff]). Note that this
+ // is different than having `kMaxSctpSid` as the upper bound, which is
+ // the limit that is internally used by `SctpSidAllocator`. Sid values may
+ // be assigned to `StreamId` outside of `SctpSidAllocator` and have a higher
+ // id value than supplied by `SctpSidAllocator`, yet is still valid.
+ bool HasValue() const { return id_.has_value(); }
+
+ // Provided for compatibility with existing code that hasn't been updated
+ // to use `StreamId` directly. New code should not use 'int' for the stream
+ // id but rather `StreamId` directly.
+ int stream_id_int() const {
+ return id_.has_value() ? static_cast<int>(id_.value().value()) : -1;
+ }
+
+ void reset() { id_ = absl::nullopt; }
+
+ bool operator==(const StreamId& sid) const { return id_ == sid.id_; }
+ bool operator<(const StreamId& sid) const { return id_ < sid.id_; }
+ bool operator!=(const StreamId& sid) const { return !(operator==(sid)); }
+
+ private:
+ absl::optional<dcsctp::StreamID> id_;
+};
+
+// Read the message type and return true if it's an OPEN message.
+bool IsOpenMessage(const rtc::CopyOnWriteBuffer& payload);
+
+bool ParseDataChannelOpenMessage(const rtc::CopyOnWriteBuffer& payload,
+ std::string* label,
+ DataChannelInit* config);
+
+bool ParseDataChannelOpenAckMessage(const rtc::CopyOnWriteBuffer& payload);
+
+bool WriteDataChannelOpenMessage(const std::string& label,
+ const std::string& protocol,
+ absl::optional<Priority> priority,
+ bool ordered,
+ absl::optional<int> max_retransmits,
+ absl::optional<int> max_retransmit_time,
+ rtc::CopyOnWriteBuffer* payload);
+bool WriteDataChannelOpenMessage(const std::string& label,
+ const DataChannelInit& config,
+ rtc::CopyOnWriteBuffer* payload);
+void WriteDataChannelOpenAckMessage(rtc::CopyOnWriteBuffer* payload);
+
+} // namespace webrtc
+
+#endif // PC_SCTP_UTILS_H_
diff --git a/third_party/libwebrtc/pc/sctp_utils_unittest.cc b/third_party/libwebrtc/pc/sctp_utils_unittest.cc
new file mode 100644
index 0000000000..3e49824b45
--- /dev/null
+++ b/third_party/libwebrtc/pc/sctp_utils_unittest.cc
@@ -0,0 +1,242 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/sctp_utils.h"
+
+#include <stdint.h>
+
+#include <limits>
+
+#include "absl/types/optional.h"
+#include "api/priority.h"
+#include "media/sctp/sctp_transport_internal.h"
+#include "rtc_base/byte_buffer.h"
+#include "rtc_base/copy_on_write_buffer.h"
+#include "test/gtest.h"
+
+using webrtc::StreamId;
+
+class SctpUtilsTest : public ::testing::Test {
+ public:
+ void VerifyOpenMessageFormat(const rtc::CopyOnWriteBuffer& packet,
+ const std::string& label,
+ const webrtc::DataChannelInit& config) {
+ uint8_t message_type;
+ uint8_t channel_type;
+ uint32_t reliability;
+ uint16_t priority;
+ uint16_t label_length;
+ uint16_t protocol_length;
+
+ rtc::ByteBufferReader buffer(packet.data<char>(), packet.size());
+ ASSERT_TRUE(buffer.ReadUInt8(&message_type));
+ EXPECT_EQ(0x03, message_type);
+
+ ASSERT_TRUE(buffer.ReadUInt8(&channel_type));
+ if (config.ordered) {
+ EXPECT_EQ(
+ config.maxRetransmits ? 0x01 : (config.maxRetransmitTime ? 0x02 : 0),
+ channel_type);
+ } else {
+ EXPECT_EQ(config.maxRetransmits
+ ? 0x81
+ : (config.maxRetransmitTime ? 0x82 : 0x80),
+ channel_type);
+ }
+
+ ASSERT_TRUE(buffer.ReadUInt16(&priority));
+ if (config.priority) {
+ // Exact values are checked by round-trip conversion, but
+ // all values defined are greater than zero.
+ EXPECT_GT(priority, 0);
+ } else {
+ EXPECT_EQ(priority, 0);
+ }
+
+ ASSERT_TRUE(buffer.ReadUInt32(&reliability));
+ if (config.maxRetransmits || config.maxRetransmitTime) {
+ EXPECT_EQ(config.maxRetransmits ? *config.maxRetransmits
+ : *config.maxRetransmitTime,
+ static_cast<int>(reliability));
+ }
+
+ ASSERT_TRUE(buffer.ReadUInt16(&label_length));
+ ASSERT_TRUE(buffer.ReadUInt16(&protocol_length));
+ EXPECT_EQ(label.size(), label_length);
+ EXPECT_EQ(config.protocol.size(), protocol_length);
+
+ std::string label_output;
+ ASSERT_TRUE(buffer.ReadString(&label_output, label_length));
+ EXPECT_EQ(label, label_output);
+ std::string protocol_output;
+ ASSERT_TRUE(buffer.ReadString(&protocol_output, protocol_length));
+ EXPECT_EQ(config.protocol, protocol_output);
+ }
+};
+
+TEST_F(SctpUtilsTest, WriteParseOpenMessageWithOrderedReliable) {
+ webrtc::DataChannelInit config;
+ std::string label = "abc";
+ config.protocol = "y";
+
+ rtc::CopyOnWriteBuffer packet;
+ ASSERT_TRUE(webrtc::WriteDataChannelOpenMessage(label, config, &packet));
+
+ VerifyOpenMessageFormat(packet, label, config);
+
+ std::string output_label;
+ webrtc::DataChannelInit output_config;
+ ASSERT_TRUE(webrtc::ParseDataChannelOpenMessage(packet, &output_label,
+ &output_config));
+
+ EXPECT_EQ(label, output_label);
+ EXPECT_EQ(config.protocol, output_config.protocol);
+ EXPECT_EQ(config.ordered, output_config.ordered);
+ EXPECT_EQ(config.maxRetransmitTime, output_config.maxRetransmitTime);
+ EXPECT_EQ(config.maxRetransmits, output_config.maxRetransmits);
+}
+
+TEST_F(SctpUtilsTest, WriteParseOpenMessageWithMaxRetransmitTime) {
+ webrtc::DataChannelInit config;
+ std::string label = "abc";
+ config.ordered = false;
+ config.maxRetransmitTime = 10;
+ config.protocol = "y";
+
+ rtc::CopyOnWriteBuffer packet;
+ ASSERT_TRUE(webrtc::WriteDataChannelOpenMessage(label, config, &packet));
+
+ VerifyOpenMessageFormat(packet, label, config);
+
+ std::string output_label;
+ webrtc::DataChannelInit output_config;
+ ASSERT_TRUE(webrtc::ParseDataChannelOpenMessage(packet, &output_label,
+ &output_config));
+
+ EXPECT_EQ(label, output_label);
+ EXPECT_EQ(config.protocol, output_config.protocol);
+ EXPECT_EQ(config.ordered, output_config.ordered);
+ EXPECT_EQ(*config.maxRetransmitTime, *output_config.maxRetransmitTime);
+ EXPECT_FALSE(output_config.maxRetransmits);
+}
+
+TEST_F(SctpUtilsTest, WriteParseOpenMessageWithMaxRetransmits) {
+ webrtc::DataChannelInit config;
+ std::string label = "abc";
+ config.maxRetransmits = 10;
+ config.protocol = "y";
+
+ rtc::CopyOnWriteBuffer packet;
+ ASSERT_TRUE(webrtc::WriteDataChannelOpenMessage(label, config, &packet));
+
+ VerifyOpenMessageFormat(packet, label, config);
+
+ std::string output_label;
+ webrtc::DataChannelInit output_config;
+ ASSERT_TRUE(webrtc::ParseDataChannelOpenMessage(packet, &output_label,
+ &output_config));
+
+ EXPECT_EQ(label, output_label);
+ EXPECT_EQ(config.protocol, output_config.protocol);
+ EXPECT_EQ(config.ordered, output_config.ordered);
+ EXPECT_EQ(config.maxRetransmits, output_config.maxRetransmits);
+ EXPECT_FALSE(output_config.maxRetransmitTime);
+}
+
+TEST_F(SctpUtilsTest, WriteParseOpenMessageWithPriority) {
+ webrtc::DataChannelInit config;
+ std::string label = "abc";
+ config.protocol = "y";
+ config.priority = webrtc::Priority::kVeryLow;
+
+ rtc::CopyOnWriteBuffer packet;
+ ASSERT_TRUE(webrtc::WriteDataChannelOpenMessage(label, config, &packet));
+
+ VerifyOpenMessageFormat(packet, label, config);
+
+ std::string output_label;
+ webrtc::DataChannelInit output_config;
+ ASSERT_TRUE(webrtc::ParseDataChannelOpenMessage(packet, &output_label,
+ &output_config));
+
+ EXPECT_EQ(label, output_label);
+ ASSERT_TRUE(output_config.priority);
+ EXPECT_EQ(*config.priority, *output_config.priority);
+}
+
+TEST_F(SctpUtilsTest, WriteParseAckMessage) {
+ rtc::CopyOnWriteBuffer packet;
+ webrtc::WriteDataChannelOpenAckMessage(&packet);
+
+ uint8_t message_type;
+ rtc::ByteBufferReader buffer(packet.data<char>(), packet.size());
+ ASSERT_TRUE(buffer.ReadUInt8(&message_type));
+ EXPECT_EQ(0x02, message_type);
+
+ EXPECT_TRUE(webrtc::ParseDataChannelOpenAckMessage(packet));
+}
+
+TEST_F(SctpUtilsTest, TestIsOpenMessage) {
+ rtc::CopyOnWriteBuffer open(1);
+ open.MutableData()[0] = 0x03;
+ EXPECT_TRUE(webrtc::IsOpenMessage(open));
+
+ rtc::CopyOnWriteBuffer openAck(1);
+ openAck.MutableData()[0] = 0x02;
+ EXPECT_FALSE(webrtc::IsOpenMessage(openAck));
+
+ rtc::CopyOnWriteBuffer invalid(1);
+ invalid.MutableData()[0] = 0x01;
+ EXPECT_FALSE(webrtc::IsOpenMessage(invalid));
+
+ rtc::CopyOnWriteBuffer empty;
+ EXPECT_FALSE(webrtc::IsOpenMessage(empty));
+}
+
+TEST(SctpSidTest, Basics) {
+ // These static asserts are mostly here to aid with readability (i.e. knowing
+ // what these constants represent).
+ static_assert(cricket::kMinSctpSid == 0, "Min stream id should be 0");
+ static_assert(cricket::kMaxSctpSid <= cricket::kSpecMaxSctpSid, "");
+ static_assert(
+ cricket::kSpecMaxSctpSid == std::numeric_limits<uint16_t>::max(),
+ "Max legal sctp stream value should be 0xffff");
+
+ // cricket::kMaxSctpSid is a chosen value in the webrtc implementation,
+ // the highest generated `sid` value chosen for resource reservation reasons.
+ // It's one less than kMaxSctpStreams (1024) or 1023 since sid values are
+ // zero based.
+
+ EXPECT_TRUE(!StreamId(-1).HasValue());
+ EXPECT_TRUE(!StreamId(-2).HasValue());
+ EXPECT_TRUE(StreamId(cricket::kMinSctpSid).HasValue());
+ EXPECT_TRUE(StreamId(cricket::kMinSctpSid + 1).HasValue());
+ EXPECT_TRUE(StreamId(cricket::kSpecMaxSctpSid).HasValue());
+ EXPECT_TRUE(StreamId(cricket::kMaxSctpSid).HasValue());
+
+ // Two illegal values are equal (both not valid).
+ EXPECT_EQ(StreamId(-1), StreamId(-2));
+ // Two different, but legal, values, are not equal.
+ EXPECT_NE(StreamId(1), StreamId(2));
+ // Test operator<() for container compatibility.
+ EXPECT_LT(StreamId(1), StreamId(2));
+
+ // Test assignment, value() and reset().
+ StreamId sid1;
+ StreamId sid2(cricket::kMaxSctpSid);
+ EXPECT_NE(sid1, sid2);
+ sid1 = sid2;
+ EXPECT_EQ(sid1, sid2);
+
+ EXPECT_EQ(sid1.stream_id_int(), cricket::kMaxSctpSid);
+ EXPECT_TRUE(sid1.HasValue());
+ sid1.reset();
+ EXPECT_FALSE(sid1.HasValue());
+}
diff --git a/third_party/libwebrtc/pc/sdp_offer_answer.cc b/third_party/libwebrtc/pc/sdp_offer_answer.cc
new file mode 100644
index 0000000000..0261195bb0
--- /dev/null
+++ b/third_party/libwebrtc/pc/sdp_offer_answer.cc
@@ -0,0 +1,5503 @@
+/*
+ * Copyright 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/sdp_offer_answer.h"
+
+#include <algorithm>
+#include <cstddef>
+#include <iterator>
+#include <map>
+#include <memory>
+#include <queue>
+#include <string>
+#include <utility>
+
+#include "absl/algorithm/container.h"
+#include "absl/memory/memory.h"
+#include "absl/strings/match.h"
+#include "absl/strings/string_view.h"
+#include "api/array_view.h"
+#include "api/crypto/crypto_options.h"
+#include "api/dtls_transport_interface.h"
+#include "api/field_trials_view.h"
+#include "api/rtp_parameters.h"
+#include "api/rtp_receiver_interface.h"
+#include "api/rtp_sender_interface.h"
+#include "api/video/builtin_video_bitrate_allocator_factory.h"
+#include "media/base/codec.h"
+#include "media/base/rid_description.h"
+#include "p2p/base/ice_transport_internal.h"
+#include "p2p/base/p2p_constants.h"
+#include "p2p/base/p2p_transport_channel.h"
+#include "p2p/base/port.h"
+#include "p2p/base/transport_description.h"
+#include "p2p/base/transport_description_factory.h"
+#include "p2p/base/transport_info.h"
+#include "pc/channel_interface.h"
+#include "pc/dtls_transport.h"
+#include "pc/legacy_stats_collector.h"
+#include "pc/media_stream.h"
+#include "pc/media_stream_proxy.h"
+#include "pc/peer_connection_internal.h"
+#include "pc/peer_connection_message_handler.h"
+#include "pc/rtp_media_utils.h"
+#include "pc/rtp_receiver_proxy.h"
+#include "pc/rtp_sender.h"
+#include "pc/rtp_sender_proxy.h"
+#include "pc/simulcast_description.h"
+#include "pc/usage_pattern.h"
+#include "pc/used_ids.h"
+#include "pc/webrtc_session_description_factory.h"
+#include "rtc_base/helpers.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/rtc_certificate.h"
+#include "rtc_base/ssl_stream_adapter.h"
+#include "rtc_base/string_encode.h"
+#include "rtc_base/strings/string_builder.h"
+#include "rtc_base/trace_event.h"
+#include "system_wrappers/include/metrics.h"
+
+using cricket::ContentInfo;
+using cricket::ContentInfos;
+using cricket::MediaContentDescription;
+using cricket::MediaProtocolType;
+using cricket::RidDescription;
+using cricket::RidDirection;
+using cricket::SessionDescription;
+using cricket::SimulcastDescription;
+using cricket::SimulcastLayer;
+using cricket::SimulcastLayerList;
+using cricket::StreamParams;
+using cricket::TransportInfo;
+
+using cricket::LOCAL_PORT_TYPE;
+using cricket::PRFLX_PORT_TYPE;
+using cricket::RELAY_PORT_TYPE;
+using cricket::STUN_PORT_TYPE;
+
+namespace webrtc {
+
+namespace {
+
+typedef webrtc::PeerConnectionInterface::RTCOfferAnswerOptions
+ RTCOfferAnswerOptions;
+
+// Error messages
+const char kInvalidSdp[] = "Invalid session description.";
+const char kInvalidCandidates[] = "Description contains invalid candidates.";
+const char kBundleWithoutRtcpMux[] =
+ "rtcp-mux must be enabled when BUNDLE "
+ "is enabled.";
+const char kMlineMismatchInAnswer[] =
+ "The order of m-lines in answer doesn't match order in offer. Rejecting "
+ "answer.";
+const char kMlineMismatchInSubsequentOffer[] =
+ "The order of m-lines in subsequent offer doesn't match order from "
+ "previous offer/answer.";
+const char kSdpWithoutIceUfragPwd[] =
+ "Called with SDP without ice-ufrag and ice-pwd.";
+const char kSdpWithoutDtlsFingerprint[] =
+ "Called with SDP without DTLS fingerprint.";
+const char kSdpWithoutSdesCrypto[] = "Called with SDP without SDES crypto.";
+
+const char kSessionError[] = "Session error code: ";
+const char kSessionErrorDesc[] = "Session error description: ";
+
+// The length of RTCP CNAMEs.
+static const int kRtcpCnameLength = 16;
+
+// The maximum length of the MID attribute.
+static constexpr size_t kMidMaxSize = 16;
+
+const char kDefaultStreamId[] = "default";
+// NOTE: Duplicated in peer_connection.cc:
+static const char kDefaultAudioSenderId[] = "defaulta0";
+static const char kDefaultVideoSenderId[] = "defaultv0";
+
+void NoteAddIceCandidateResult(int result) {
+ RTC_HISTOGRAM_ENUMERATION("WebRTC.PeerConnection.AddIceCandidate", result,
+ kAddIceCandidateMax);
+}
+
+std::map<std::string, const cricket::ContentGroup*> GetBundleGroupsByMid(
+ const SessionDescription* desc) {
+ std::vector<const cricket::ContentGroup*> bundle_groups =
+ desc->GetGroupsByName(cricket::GROUP_TYPE_BUNDLE);
+ std::map<std::string, const cricket::ContentGroup*> bundle_groups_by_mid;
+ for (const cricket::ContentGroup* bundle_group : bundle_groups) {
+ for (const std::string& content_name : bundle_group->content_names()) {
+ bundle_groups_by_mid[content_name] = bundle_group;
+ }
+ }
+ return bundle_groups_by_mid;
+}
+
+// Returns true if `new_desc` requests an ICE restart (i.e., new ufrag/pwd).
+bool CheckForRemoteIceRestart(const SessionDescriptionInterface* old_desc,
+ const SessionDescriptionInterface* new_desc,
+ const std::string& content_name) {
+ if (!old_desc) {
+ return false;
+ }
+ const SessionDescription* new_sd = new_desc->description();
+ const SessionDescription* old_sd = old_desc->description();
+ const ContentInfo* cinfo = new_sd->GetContentByName(content_name);
+ if (!cinfo || cinfo->rejected) {
+ return false;
+ }
+ // If the content isn't rejected, check if ufrag and password has changed.
+ const cricket::TransportDescription* new_transport_desc =
+ new_sd->GetTransportDescriptionByName(content_name);
+ const cricket::TransportDescription* old_transport_desc =
+ old_sd->GetTransportDescriptionByName(content_name);
+ if (!new_transport_desc || !old_transport_desc) {
+ // No transport description exists. This is not an ICE restart.
+ return false;
+ }
+ if (cricket::IceCredentialsChanged(
+ old_transport_desc->ice_ufrag, old_transport_desc->ice_pwd,
+ new_transport_desc->ice_ufrag, new_transport_desc->ice_pwd)) {
+ RTC_LOG(LS_INFO) << "Remote peer requests ICE restart for " << content_name
+ << ".";
+ return true;
+ }
+ return false;
+}
+
+// Generates a string error message for SetLocalDescription/SetRemoteDescription
+// from an RTCError.
+std::string GetSetDescriptionErrorMessage(cricket::ContentSource source,
+ SdpType type,
+ const RTCError& error) {
+ rtc::StringBuilder oss;
+ oss << "Failed to set " << (source == cricket::CS_LOCAL ? "local" : "remote")
+ << " " << SdpTypeToString(type) << " sdp: ";
+ RTC_DCHECK(!absl::StartsWith(error.message(), oss.str())) << error.message();
+ oss << error.message();
+ return oss.Release();
+}
+
+std::string GetStreamIdsString(rtc::ArrayView<const std::string> stream_ids) {
+ std::string output = "streams=[";
+ const char* separator = "";
+ for (const auto& stream_id : stream_ids) {
+ output.append(separator).append(stream_id);
+ separator = ", ";
+ }
+ output.append("]");
+ return output;
+}
+
+const ContentInfo* FindTransceiverMSection(
+ RtpTransceiver* transceiver,
+ const SessionDescriptionInterface* session_description) {
+ return transceiver->mid()
+ ? session_description->description()->GetContentByName(
+ *transceiver->mid())
+ : nullptr;
+}
+
+// If the direction is "recvonly" or "inactive", treat the description
+// as containing no streams.
+// See: https://code.google.com/p/webrtc/issues/detail?id=5054
+std::vector<cricket::StreamParams> GetActiveStreams(
+ const cricket::MediaContentDescription* desc) {
+ return RtpTransceiverDirectionHasSend(desc->direction())
+ ? desc->streams()
+ : std::vector<cricket::StreamParams>();
+}
+
+// Logic to decide if an m= section can be recycled. This means that the new
+// m= section is not rejected, but the old local or remote m= section is
+// rejected. `old_content_one` and `old_content_two` refer to the m= section
+// of the old remote and old local descriptions in no particular order.
+// We need to check both the old local and remote because either
+// could be the most current from the latest negotation.
+bool IsMediaSectionBeingRecycled(SdpType type,
+ const ContentInfo& content,
+ const ContentInfo* old_content_one,
+ const ContentInfo* old_content_two) {
+ return type == SdpType::kOffer && !content.rejected &&
+ ((old_content_one && old_content_one->rejected) ||
+ (old_content_two && old_content_two->rejected));
+}
+
+// Verify that the order of media sections in `new_desc` matches
+// `current_desc`. The number of m= sections in `new_desc` should be no
+// less than `current_desc`. In the case of checking an answer's
+// `new_desc`, the `current_desc` is the last offer that was set as the
+// local or remote. In the case of checking an offer's `new_desc` we
+// check against the local and remote descriptions stored from the last
+// negotiation, because either of these could be the most up to date for
+// possible rejected m sections. These are the `current_desc` and
+// `secondary_current_desc`.
+bool MediaSectionsInSameOrder(const SessionDescription& current_desc,
+ const SessionDescription* secondary_current_desc,
+ const SessionDescription& new_desc,
+ const SdpType type) {
+ if (current_desc.contents().size() > new_desc.contents().size()) {
+ return false;
+ }
+
+ for (size_t i = 0; i < current_desc.contents().size(); ++i) {
+ const cricket::ContentInfo* secondary_content_info = nullptr;
+ if (secondary_current_desc &&
+ i < secondary_current_desc->contents().size()) {
+ secondary_content_info = &secondary_current_desc->contents()[i];
+ }
+ if (IsMediaSectionBeingRecycled(type, new_desc.contents()[i],
+ &current_desc.contents()[i],
+ secondary_content_info)) {
+ // For new offer descriptions, if the media section can be recycled, it's
+ // valid for the MID and media type to change.
+ continue;
+ }
+ if (new_desc.contents()[i].name != current_desc.contents()[i].name) {
+ return false;
+ }
+ const MediaContentDescription* new_desc_mdesc =
+ new_desc.contents()[i].media_description();
+ const MediaContentDescription* current_desc_mdesc =
+ current_desc.contents()[i].media_description();
+ if (new_desc_mdesc->type() != current_desc_mdesc->type()) {
+ return false;
+ }
+ }
+ return true;
+}
+
+bool MediaSectionsHaveSameCount(const SessionDescription& desc1,
+ const SessionDescription& desc2) {
+ return desc1.contents().size() == desc2.contents().size();
+}
+// Checks that each non-rejected content has SDES crypto keys or a DTLS
+// fingerprint, unless it's in a BUNDLE group, in which case only the
+// BUNDLE-tag section (first media section/description in the BUNDLE group)
+// needs a ufrag and pwd. Mismatches, such as replying with a DTLS fingerprint
+// to SDES keys, will be caught in JsepTransport negotiation, and backstopped
+// by Channel's `srtp_required` check.
+RTCError VerifyCrypto(const SessionDescription* desc,
+ bool dtls_enabled,
+ const std::map<std::string, const cricket::ContentGroup*>&
+ bundle_groups_by_mid) {
+ for (const cricket::ContentInfo& content_info : desc->contents()) {
+ if (content_info.rejected) {
+ continue;
+ }
+#if !defined(WEBRTC_FUCHSIA)
+ RTC_CHECK(dtls_enabled) << "SDES protocol is only allowed in Fuchsia";
+#endif
+ const std::string& mid = content_info.name;
+ auto it = bundle_groups_by_mid.find(mid);
+ const cricket::ContentGroup* bundle =
+ it != bundle_groups_by_mid.end() ? it->second : nullptr;
+ if (bundle && mid != *(bundle->FirstContentName())) {
+ // This isn't the first media section in the BUNDLE group, so it's not
+ // required to have crypto attributes, since only the crypto attributes
+ // from the first section actually get used.
+ continue;
+ }
+
+ // If the content isn't rejected or bundled into another m= section, crypto
+ // must be present.
+ const MediaContentDescription* media = content_info.media_description();
+ const TransportInfo* tinfo = desc->GetTransportInfoByName(mid);
+ if (!media || !tinfo) {
+ // Something is not right.
+ LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, kInvalidSdp);
+ }
+ if (dtls_enabled) {
+ if (!tinfo->description.identity_fingerprint) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER,
+ kSdpWithoutDtlsFingerprint);
+ }
+ } else {
+ if (media->cryptos().empty()) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER,
+ kSdpWithoutSdesCrypto);
+ }
+ }
+ }
+ return RTCError::OK();
+}
+
+// Checks that each non-rejected content has ice-ufrag and ice-pwd set, unless
+// it's in a BUNDLE group, in which case only the BUNDLE-tag section (first
+// media section/description in the BUNDLE group) needs a ufrag and pwd.
+bool VerifyIceUfragPwdPresent(
+ const SessionDescription* desc,
+ const std::map<std::string, const cricket::ContentGroup*>&
+ bundle_groups_by_mid) {
+ for (const cricket::ContentInfo& content_info : desc->contents()) {
+ if (content_info.rejected) {
+ continue;
+ }
+ const std::string& mid = content_info.name;
+ auto it = bundle_groups_by_mid.find(mid);
+ const cricket::ContentGroup* bundle =
+ it != bundle_groups_by_mid.end() ? it->second : nullptr;
+ if (bundle && mid != *(bundle->FirstContentName())) {
+ // This isn't the first media section in the BUNDLE group, so it's not
+ // required to have ufrag/password, since only the ufrag/password from
+ // the first section actually get used.
+ continue;
+ }
+
+ // If the content isn't rejected or bundled into another m= section,
+ // ice-ufrag and ice-pwd must be present.
+ const TransportInfo* tinfo = desc->GetTransportInfoByName(mid);
+ if (!tinfo) {
+ // Something is not right.
+ RTC_LOG(LS_ERROR) << kInvalidSdp;
+ return false;
+ }
+ if (tinfo->description.ice_ufrag.empty() ||
+ tinfo->description.ice_pwd.empty()) {
+ RTC_LOG(LS_ERROR) << "Session description must have ice ufrag and pwd.";
+ return false;
+ }
+ }
+ return true;
+}
+
+RTCError ValidateMids(const cricket::SessionDescription& description) {
+ std::set<std::string> mids;
+ for (const cricket::ContentInfo& content : description.contents()) {
+ if (content.name.empty()) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER,
+ "A media section is missing a MID attribute.");
+ }
+ if (content.name.size() > kMidMaxSize) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER,
+ "The MID attribute exceeds the maximum supported "
+ "length of 16 characters.");
+ }
+ if (!mids.insert(content.name).second) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER,
+ "Duplicate a=mid value '" + content.name + "'.");
+ }
+ }
+ return RTCError::OK();
+}
+
+RTCError FindDuplicateCodecParameters(
+ const RtpCodecParameters codec_parameters,
+ std::map<int, RtpCodecParameters>& payload_to_codec_parameters) {
+ auto existing_codec_parameters =
+ payload_to_codec_parameters.find(codec_parameters.payload_type);
+ if (existing_codec_parameters != payload_to_codec_parameters.end() &&
+ codec_parameters != existing_codec_parameters->second) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER,
+ "A BUNDLE group contains a codec collision for "
+ "payload_type='" +
+ rtc::ToString(codec_parameters.payload_type) +
+ ". All codecs must share the same type, "
+ "encoding name, clock rate and parameters.");
+ }
+ payload_to_codec_parameters.insert(
+ std::make_pair(codec_parameters.payload_type, codec_parameters));
+ return RTCError::OK();
+}
+
+RTCError ValidateBundledPayloadTypes(
+ const cricket::SessionDescription& description) {
+ // https://www.rfc-editor.org/rfc/rfc8843#name-payload-type-pt-value-reuse
+ // ... all codecs associated with the payload type number MUST share an
+ // identical codec configuration. This means that the codecs MUST share
+ // the same media type, encoding name, clock rate, and any parameter
+ // that can affect the codec configuration and packetization.
+ std::vector<const cricket::ContentGroup*> bundle_groups =
+ description.GetGroupsByName(cricket::GROUP_TYPE_BUNDLE);
+ for (const cricket::ContentGroup* bundle_group : bundle_groups) {
+ std::map<int, RtpCodecParameters> payload_to_codec_parameters;
+ for (const std::string& content_name : bundle_group->content_names()) {
+ const ContentInfo* content_description =
+ description.GetContentByName(content_name);
+ if (!content_description) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER,
+ "A BUNDLE group contains a MID='" + content_name +
+ "' matching no m= section.");
+ }
+ const cricket::MediaContentDescription* media_description =
+ content_description->media_description();
+ RTC_DCHECK(media_description);
+ if (content_description->rejected || !media_description ||
+ !media_description->has_codecs()) {
+ continue;
+ }
+ const auto type = media_description->type();
+ if (type == cricket::MEDIA_TYPE_AUDIO ||
+ type == cricket::MEDIA_TYPE_VIDEO) {
+ for (const auto& c : media_description->codecs()) {
+ auto error = FindDuplicateCodecParameters(
+ c.ToCodecParameters(), payload_to_codec_parameters);
+ if (!error.ok()) {
+ return error;
+ }
+ }
+ }
+ }
+ }
+ return RTCError::OK();
+}
+
+RTCError FindDuplicateHeaderExtensionIds(
+ const RtpExtension extension,
+ std::map<int, RtpExtension>& id_to_extension) {
+ auto existing_extension = id_to_extension.find(extension.id);
+ if (existing_extension != id_to_extension.end() &&
+ !(extension.uri == existing_extension->second.uri &&
+ extension.encrypt == existing_extension->second.encrypt)) {
+ LOG_AND_RETURN_ERROR(
+ RTCErrorType::INVALID_PARAMETER,
+ "A BUNDLE group contains a codec collision for "
+ "header extension id=" +
+ rtc::ToString(extension.id) +
+ ". The id must be the same across all bundled media descriptions");
+ }
+ id_to_extension.insert(std::make_pair(extension.id, extension));
+ return RTCError::OK();
+}
+
+RTCError ValidateBundledRtpHeaderExtensions(
+ const cricket::SessionDescription& description) {
+ // https://www.rfc-editor.org/rfc/rfc8843#name-rtp-header-extensions-consi
+ // ... the identifier used for a given extension MUST identify the same
+ // extension across all the bundled media descriptions.
+ std::vector<const cricket::ContentGroup*> bundle_groups =
+ description.GetGroupsByName(cricket::GROUP_TYPE_BUNDLE);
+ for (const cricket::ContentGroup* bundle_group : bundle_groups) {
+ std::map<int, RtpExtension> id_to_extension;
+ for (const std::string& content_name : bundle_group->content_names()) {
+ const ContentInfo* content_description =
+ description.GetContentByName(content_name);
+ if (!content_description) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER,
+ "A BUNDLE group contains a MID='" + content_name +
+ "' matching no m= section.");
+ }
+ const cricket::MediaContentDescription* media_description =
+ content_description->media_description();
+ RTC_DCHECK(media_description);
+ if (content_description->rejected || !media_description ||
+ !media_description->has_codecs()) {
+ continue;
+ }
+
+ for (const auto& extension : media_description->rtp_header_extensions()) {
+ auto error =
+ FindDuplicateHeaderExtensionIds(extension, id_to_extension);
+ if (!error.ok()) {
+ return error;
+ }
+ }
+ }
+ }
+ return RTCError::OK();
+}
+
+RTCError ValidateRtpHeaderExtensionsForSpecSimulcast(
+ const cricket::SessionDescription& description) {
+ for (const ContentInfo& content : description.contents()) {
+ if (content.type != MediaProtocolType::kRtp || content.rejected) {
+ continue;
+ }
+ const auto media_description = content.media_description();
+ if (!media_description->HasSimulcast()) {
+ continue;
+ }
+ auto extensions = media_description->rtp_header_extensions();
+ auto it = absl::c_find_if(extensions, [](const RtpExtension& ext) {
+ return ext.uri == RtpExtension::kRidUri;
+ });
+ if (it == extensions.end()) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER,
+ "The media section with MID='" + content.mid() +
+ "' negotiates simulcast but does not negotiate "
+ "the RID RTP header extension.");
+ }
+ }
+ return RTCError::OK();
+}
+
+RTCError ValidateSsrcGroups(const cricket::SessionDescription& description) {
+ for (const ContentInfo& content : description.contents()) {
+ if (content.type != MediaProtocolType::kRtp) {
+ continue;
+ }
+ for (const StreamParams& stream : content.media_description()->streams()) {
+ for (const cricket::SsrcGroup& group : stream.ssrc_groups) {
+ // Validate the number of SSRCs for standard SSRC group semantics such
+ // as FID and FEC-FR and the non-standard SIM group.
+ if ((group.semantics == cricket::kFidSsrcGroupSemantics &&
+ group.ssrcs.size() != 2) ||
+ (group.semantics == cricket::kFecFrSsrcGroupSemantics &&
+ group.ssrcs.size() != 2) ||
+ (group.semantics == cricket::kSimSsrcGroupSemantics &&
+ group.ssrcs.size() > kMaxSimulcastStreams)) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER,
+ "The media section with MID='" + content.mid() +
+ "' has a ssrc-group with semantics " +
+ group.semantics +
+ " and an unexpected number of SSRCs.");
+ }
+ }
+ }
+ }
+ return RTCError::OK();
+}
+
+RTCError ValidatePayloadTypes(const cricket::SessionDescription& description) {
+ for (const ContentInfo& content : description.contents()) {
+ if (content.type != MediaProtocolType::kRtp) {
+ continue;
+ }
+ const auto media_description = content.media_description();
+ RTC_DCHECK(media_description);
+ if (content.rejected || !media_description ||
+ !media_description->has_codecs()) {
+ continue;
+ }
+ const auto type = media_description->type();
+ if (type == cricket::MEDIA_TYPE_AUDIO) {
+ RTC_DCHECK(media_description->as_audio());
+ for (const auto& codec : media_description->as_audio()->codecs()) {
+ if (!cricket::UsedPayloadTypes::IsIdValid(
+ codec, media_description->rtcp_mux())) {
+ LOG_AND_RETURN_ERROR(
+ RTCErrorType::INVALID_PARAMETER,
+ "The media section with MID='" + content.mid() +
+ "' used an invalid payload type " + rtc::ToString(codec.id) +
+ " for codec '" + codec.name + ", rtcp-mux:" +
+ (media_description->rtcp_mux() ? "enabled" : "disabled"));
+ }
+ }
+ } else if (type == cricket::MEDIA_TYPE_VIDEO) {
+ RTC_DCHECK(media_description->as_video());
+ for (const auto& codec : media_description->as_video()->codecs()) {
+ if (!cricket::UsedPayloadTypes::IsIdValid(
+ codec, media_description->rtcp_mux())) {
+ LOG_AND_RETURN_ERROR(
+ RTCErrorType::INVALID_PARAMETER,
+ "The media section with MID='" + content.mid() +
+ "' used an invalid payload type " + rtc::ToString(codec.id) +
+ " for codec '" + codec.name + ", rtcp-mux:" +
+ (media_description->rtcp_mux() ? "enabled" : "disabled"));
+ }
+ }
+ }
+ }
+ return RTCError::OK();
+}
+
+bool IsValidOfferToReceiveMedia(int value) {
+ typedef PeerConnectionInterface::RTCOfferAnswerOptions Options;
+ return (value >= Options::kUndefined) &&
+ (value <= Options::kMaxOfferToReceiveMedia);
+}
+
+bool ValidateOfferAnswerOptions(
+ const PeerConnectionInterface::RTCOfferAnswerOptions& rtc_options) {
+ return IsValidOfferToReceiveMedia(rtc_options.offer_to_receive_audio) &&
+ IsValidOfferToReceiveMedia(rtc_options.offer_to_receive_video);
+}
+
+// This method will extract any send encodings that were sent by the remote
+// connection. This is currently only relevant for Simulcast scenario (where
+// the number of layers may be communicated by the server).
+std::vector<RtpEncodingParameters> GetSendEncodingsFromRemoteDescription(
+ const MediaContentDescription& desc) {
+ if (!desc.HasSimulcast()) {
+ return {};
+ }
+ std::vector<RtpEncodingParameters> result;
+ const SimulcastDescription& simulcast = desc.simulcast_description();
+
+ // This is a remote description, the parameters we are after should appear
+ // as receive streams.
+ for (const auto& alternatives : simulcast.receive_layers()) {
+ RTC_DCHECK(!alternatives.empty());
+ // There is currently no way to specify or choose from alternatives.
+ // We will always use the first alternative, which is the most preferred.
+ const SimulcastLayer& layer = alternatives[0];
+ RtpEncodingParameters parameters;
+ parameters.rid = layer.rid;
+ parameters.active = !layer.is_paused;
+ result.push_back(parameters);
+ }
+
+ return result;
+}
+
+RTCError UpdateSimulcastLayerStatusInSender(
+ const std::vector<SimulcastLayer>& layers,
+ rtc::scoped_refptr<RtpSenderInternal> sender) {
+ RTC_DCHECK(sender);
+ RtpParameters parameters = sender->GetParametersInternalWithAllLayers();
+ std::vector<std::string> disabled_layers;
+
+ // The simulcast envelope cannot be changed, only the status of the streams.
+ // So we will iterate over the send encodings rather than the layers.
+ for (RtpEncodingParameters& encoding : parameters.encodings) {
+ auto iter = std::find_if(layers.begin(), layers.end(),
+ [&encoding](const SimulcastLayer& layer) {
+ return layer.rid == encoding.rid;
+ });
+ // A layer that cannot be found may have been removed by the remote party.
+ if (iter == layers.end()) {
+ disabled_layers.push_back(encoding.rid);
+ continue;
+ }
+
+ encoding.active = !iter->is_paused;
+ }
+
+ RTCError result = sender->SetParametersInternalWithAllLayers(parameters);
+ if (result.ok()) {
+ result = sender->DisableEncodingLayers(disabled_layers);
+ }
+
+ return result;
+}
+
+bool SimulcastIsRejected(const ContentInfo* local_content,
+ const MediaContentDescription& answer_media_desc,
+ bool enable_encrypted_rtp_header_extensions) {
+ bool simulcast_offered = local_content &&
+ local_content->media_description() &&
+ local_content->media_description()->HasSimulcast();
+ bool simulcast_answered = answer_media_desc.HasSimulcast();
+ bool rids_supported = RtpExtension::FindHeaderExtensionByUri(
+ answer_media_desc.rtp_header_extensions(), RtpExtension::kRidUri,
+ enable_encrypted_rtp_header_extensions
+ ? RtpExtension::Filter::kPreferEncryptedExtension
+ : RtpExtension::Filter::kDiscardEncryptedExtension);
+ return simulcast_offered && (!simulcast_answered || !rids_supported);
+}
+
+RTCError DisableSimulcastInSender(
+ rtc::scoped_refptr<RtpSenderInternal> sender) {
+ RTC_DCHECK(sender);
+ RtpParameters parameters = sender->GetParametersInternalWithAllLayers();
+ if (parameters.encodings.size() <= 1) {
+ return RTCError::OK();
+ }
+
+ std::vector<std::string> disabled_layers;
+ std::transform(
+ parameters.encodings.begin() + 1, parameters.encodings.end(),
+ std::back_inserter(disabled_layers),
+ [](const RtpEncodingParameters& encoding) { return encoding.rid; });
+ return sender->DisableEncodingLayers(disabled_layers);
+}
+
+// The SDP parser used to populate these values by default for the 'content
+// name' if an a=mid line was absent.
+absl::string_view GetDefaultMidForPlanB(cricket::MediaType media_type) {
+ switch (media_type) {
+ case cricket::MEDIA_TYPE_AUDIO:
+ return cricket::CN_AUDIO;
+ case cricket::MEDIA_TYPE_VIDEO:
+ return cricket::CN_VIDEO;
+ case cricket::MEDIA_TYPE_DATA:
+ return cricket::CN_DATA;
+ case cricket::MEDIA_TYPE_UNSUPPORTED:
+ return "not supported";
+ }
+ RTC_DCHECK_NOTREACHED();
+ return "";
+}
+
+// Add options to |[audio/video]_media_description_options| from `senders`.
+void AddPlanBRtpSenderOptions(
+ const std::vector<rtc::scoped_refptr<
+ RtpSenderProxyWithInternal<RtpSenderInternal>>>& senders,
+ cricket::MediaDescriptionOptions* audio_media_description_options,
+ cricket::MediaDescriptionOptions* video_media_description_options,
+ int num_sim_layers) {
+ for (const auto& sender : senders) {
+ if (sender->media_type() == cricket::MEDIA_TYPE_AUDIO) {
+ if (audio_media_description_options) {
+ audio_media_description_options->AddAudioSender(
+ sender->id(), sender->internal()->stream_ids());
+ }
+ } else {
+ RTC_DCHECK(sender->media_type() == cricket::MEDIA_TYPE_VIDEO);
+ if (video_media_description_options) {
+ video_media_description_options->AddVideoSender(
+ sender->id(), sender->internal()->stream_ids(), {},
+ SimulcastLayerList(), num_sim_layers);
+ }
+ }
+ }
+}
+
+cricket::MediaDescriptionOptions GetMediaDescriptionOptionsForTransceiver(
+ RtpTransceiver* transceiver,
+ const std::string& mid,
+ bool is_create_offer) {
+ // NOTE: a stopping transceiver should be treated as a stopped one in
+ // createOffer as specified in
+ // https://w3c.github.io/webrtc-pc/#dom-rtcpeerconnection-createoffer.
+ bool stopped =
+ is_create_offer ? transceiver->stopping() : transceiver->stopped();
+ cricket::MediaDescriptionOptions media_description_options(
+ transceiver->media_type(), mid, transceiver->direction(), stopped);
+ media_description_options.codec_preferences =
+ transceiver->codec_preferences();
+ media_description_options.header_extensions =
+ transceiver->GetHeaderExtensionsToNegotiate();
+ // This behavior is specified in JSEP. The gist is that:
+ // 1. The MSID is included if the RtpTransceiver's direction is sendonly or
+ // sendrecv.
+ // 2. If the MSID is included, then it must be included in any subsequent
+ // offer/answer exactly the same until the RtpTransceiver is stopped.
+ if (stopped || (!RtpTransceiverDirectionHasSend(transceiver->direction()) &&
+ !transceiver->has_ever_been_used_to_send())) {
+ return media_description_options;
+ }
+
+ cricket::SenderOptions sender_options;
+ sender_options.track_id = transceiver->sender()->id();
+ sender_options.stream_ids = transceiver->sender()->stream_ids();
+
+ // The following sets up RIDs and Simulcast.
+ // RIDs are included if Simulcast is requested or if any RID was specified.
+ RtpParameters send_parameters =
+ transceiver->sender_internal()->GetParametersInternalWithAllLayers();
+ bool has_rids = std::any_of(send_parameters.encodings.begin(),
+ send_parameters.encodings.end(),
+ [](const RtpEncodingParameters& encoding) {
+ return !encoding.rid.empty();
+ });
+
+ std::vector<RidDescription> send_rids;
+ SimulcastLayerList send_layers;
+ for (const RtpEncodingParameters& encoding : send_parameters.encodings) {
+ if (encoding.rid.empty()) {
+ continue;
+ }
+ send_rids.push_back(RidDescription(encoding.rid, RidDirection::kSend));
+ send_layers.AddLayer(SimulcastLayer(encoding.rid, !encoding.active));
+ }
+
+ if (has_rids) {
+ sender_options.rids = send_rids;
+ }
+
+ sender_options.simulcast_layers = send_layers;
+ // When RIDs are configured, we must set num_sim_layers to 0 to.
+ // Otherwise, num_sim_layers must be 1 because either there is no
+ // simulcast, or simulcast is acheived by munging the SDP.
+ sender_options.num_sim_layers = has_rids ? 0 : 1;
+ media_description_options.sender_options.push_back(sender_options);
+
+ return media_description_options;
+}
+
+// Returns the ContentInfo at mline index `i`, or null if none exists.
+const ContentInfo* GetContentByIndex(const SessionDescriptionInterface* sdesc,
+ size_t i) {
+ if (!sdesc) {
+ return nullptr;
+ }
+ const ContentInfos& contents = sdesc->description()->contents();
+ return (i < contents.size() ? &contents[i] : nullptr);
+}
+
+// From `rtc_options`, fill parts of `session_options` shared by all generated
+// m= sectionss (in other words, nothing that involves a map/array).
+void ExtractSharedMediaSessionOptions(
+ const PeerConnectionInterface::RTCOfferAnswerOptions& rtc_options,
+ cricket::MediaSessionOptions* session_options) {
+ session_options->vad_enabled = rtc_options.voice_activity_detection;
+ session_options->bundle_enabled = rtc_options.use_rtp_mux;
+ session_options->raw_packetization_for_video =
+ rtc_options.raw_packetization_for_video;
+}
+
+// Generate a RTCP CNAME when a PeerConnection is created.
+std::string GenerateRtcpCname() {
+ std::string cname;
+ if (!rtc::CreateRandomString(kRtcpCnameLength, &cname)) {
+ RTC_LOG(LS_ERROR) << "Failed to generate CNAME.";
+ RTC_DCHECK_NOTREACHED();
+ }
+ return cname;
+}
+
+// Check if we can send `new_stream` on a PeerConnection.
+bool CanAddLocalMediaStream(webrtc::StreamCollectionInterface* current_streams,
+ webrtc::MediaStreamInterface* new_stream) {
+ if (!new_stream || !current_streams) {
+ return false;
+ }
+ if (current_streams->find(new_stream->id()) != nullptr) {
+ RTC_LOG(LS_ERROR) << "MediaStream with ID " << new_stream->id()
+ << " is already added.";
+ return false;
+ }
+ return true;
+}
+
+rtc::scoped_refptr<webrtc::DtlsTransport> LookupDtlsTransportByMid(
+ rtc::Thread* network_thread,
+ JsepTransportController* controller,
+ const std::string& mid) {
+ // TODO(tommi): Can we post this (and associated operations where this
+ // function is called) to the network thread and avoid this BlockingCall?
+ // We might be able to simplify a few things if we set the transport on
+ // the network thread and then update the implementation to check that
+ // the set_ and relevant get methods are always called on the network
+ // thread (we'll need to update proxy maps).
+ return network_thread->BlockingCall(
+ [controller, &mid] { return controller->LookupDtlsTransportByMid(mid); });
+}
+
+bool ContentHasHeaderExtension(const cricket::ContentInfo& content_info,
+ absl::string_view header_extension_uri) {
+ for (const RtpExtension& rtp_header_extension :
+ content_info.media_description()->rtp_header_extensions()) {
+ if (rtp_header_extension.uri == header_extension_uri) {
+ return true;
+ }
+ }
+ return false;
+}
+
+} // namespace
+
+void UpdateRtpHeaderExtensionPreferencesFromSdpMunging(
+ const cricket::SessionDescription* description,
+ TransceiverList* transceivers) {
+ // This integrates the RTP Header Extension Control API and local SDP munging
+ // for backward compability reasons. If something was enabled in the local
+ // description via SDP munging, consider it non-stopped in the API as well
+ // so that is shows up in subsequent offers/answers.
+ RTC_DCHECK(description);
+ RTC_DCHECK(transceivers);
+ for (const auto& content : description->contents()) {
+ auto transceiver = transceivers->FindByMid(content.name);
+ if (!transceiver) {
+ continue;
+ }
+ auto extension_capabilities = transceiver->GetHeaderExtensionsToNegotiate();
+ // Set the capability of every extension we see here to "sendrecv".
+ for (auto& ext : content.media_description()->rtp_header_extensions()) {
+ auto it = absl::c_find_if(extension_capabilities,
+ [&ext](const RtpHeaderExtensionCapability c) {
+ return ext.uri == c.uri;
+ });
+ if (it != extension_capabilities.end()) {
+ it->direction = RtpTransceiverDirection::kSendRecv;
+ }
+ }
+ transceiver->SetHeaderExtensionsToNegotiate(extension_capabilities);
+ }
+}
+
+// This class stores state related to a SetRemoteDescription operation, captures
+// and reports potential errors that might occur and makes sure to notify the
+// observer of the operation and the operations chain of completion.
+class SdpOfferAnswerHandler::RemoteDescriptionOperation {
+ public:
+ RemoteDescriptionOperation(
+ SdpOfferAnswerHandler* handler,
+ std::unique_ptr<SessionDescriptionInterface> desc,
+ rtc::scoped_refptr<SetRemoteDescriptionObserverInterface> observer,
+ std::function<void()> operations_chain_callback)
+ : handler_(handler),
+ desc_(std::move(desc)),
+ observer_(std::move(observer)),
+ operations_chain_callback_(std::move(operations_chain_callback)),
+ unified_plan_(handler_->IsUnifiedPlan()) {
+ if (!desc_) {
+ type_ = static_cast<SdpType>(-1);
+ InvalidParam("SessionDescription is NULL.");
+ } else {
+ type_ = desc_->GetType();
+ }
+ }
+
+ ~RemoteDescriptionOperation() {
+ RTC_DCHECK_RUN_ON(handler_->signaling_thread());
+ SignalCompletion();
+ operations_chain_callback_();
+ }
+
+ bool ok() const { return error_.ok(); }
+
+ // Notifies the observer that the operation is complete and releases the
+ // reference to the observer.
+ void SignalCompletion() {
+ if (!observer_)
+ return;
+
+ if (!error_.ok() && type_ != static_cast<SdpType>(-1)) {
+ std::string error_message =
+ GetSetDescriptionErrorMessage(cricket::CS_REMOTE, type_, error_);
+ RTC_LOG(LS_ERROR) << error_message;
+ error_.set_message(error_message);
+ }
+
+ observer_->OnSetRemoteDescriptionComplete(error_);
+ observer_ = nullptr; // Only fire the notification once.
+ }
+
+ // If a session error has occurred the PeerConnection is in a possibly
+ // inconsistent state so fail right away.
+ bool HaveSessionError() {
+ RTC_DCHECK(ok());
+ if (handler_->session_error() != SessionError::kNone)
+ InternalError(handler_->GetSessionErrorMsg());
+ return !ok();
+ }
+
+ // Returns true if the operation was a rollback operation. If this function
+ // returns true, the caller should consider the operation complete. Otherwise
+ // proceed to the next step.
+ bool MaybeRollback() {
+ RTC_DCHECK_RUN_ON(handler_->signaling_thread());
+ RTC_DCHECK(ok());
+ if (type_ != SdpType::kRollback) {
+ // Check if we can do an implicit rollback.
+ if (type_ == SdpType::kOffer && unified_plan_ &&
+ handler_->pc_->configuration()->enable_implicit_rollback &&
+ handler_->signaling_state() ==
+ PeerConnectionInterface::kHaveLocalOffer) {
+ handler_->Rollback(type_);
+ }
+ return false;
+ }
+
+ if (unified_plan_) {
+ error_ = handler_->Rollback(type_);
+ } else if (type_ == SdpType::kRollback) {
+ Unsupported("Rollback not supported in Plan B");
+ }
+
+ return true;
+ }
+
+ // Report to UMA the format of the received offer or answer.
+ void ReportOfferAnswerUma() {
+ RTC_DCHECK(ok());
+ if (type_ == SdpType::kOffer || type_ == SdpType::kAnswer) {
+ handler_->pc_->ReportSdpBundleUsage(*desc_.get());
+ }
+ }
+
+ // Checks if the session description for the operation is valid. If not, the
+ // function captures error information and returns false. Note that if the
+ // return value is false, the operation should be considered done.
+ bool IsDescriptionValid() {
+ RTC_DCHECK_RUN_ON(handler_->signaling_thread());
+ RTC_DCHECK(ok());
+ RTC_DCHECK(bundle_groups_by_mid_.empty()) << "Already called?";
+ bundle_groups_by_mid_ = GetBundleGroupsByMid(description());
+ error_ = handler_->ValidateSessionDescription(
+ desc_.get(), cricket::CS_REMOTE, bundle_groups_by_mid_);
+ return ok();
+ }
+
+ // Transfers ownership of the session description object over to `handler_`.
+ bool ReplaceRemoteDescriptionAndCheckError() {
+ RTC_DCHECK_RUN_ON(handler_->signaling_thread());
+ RTC_DCHECK(ok());
+ RTC_DCHECK(desc_);
+ RTC_DCHECK(!replaced_remote_description_);
+#if RTC_DCHECK_IS_ON
+ const auto* existing_remote_description = handler_->remote_description();
+#endif
+
+ error_ = handler_->ReplaceRemoteDescription(std::move(desc_), type_,
+ &replaced_remote_description_);
+
+ if (ok()) {
+#if RTC_DCHECK_IS_ON
+ // Sanity check that our `old_remote_description()` method always returns
+ // the same value as `remote_description()` did before the call to
+ // ReplaceRemoteDescription.
+ RTC_DCHECK_EQ(existing_remote_description, old_remote_description());
+#endif
+ } else {
+ SetAsSessionError();
+ }
+
+ return ok();
+ }
+
+ bool UpdateChannels() {
+ RTC_DCHECK(ok());
+ RTC_DCHECK(!desc_) << "ReplaceRemoteDescription hasn't been called";
+
+ const auto* remote_description = handler_->remote_description();
+
+ const cricket::SessionDescription* session_desc =
+ remote_description->description();
+
+ // Transport and Media channels will be created only when offer is set.
+ if (unified_plan_) {
+ error_ = handler_->UpdateTransceiversAndDataChannels(
+ cricket::CS_REMOTE, *remote_description,
+ handler_->local_description(), old_remote_description(),
+ bundle_groups_by_mid_);
+ } else {
+ // Media channels will be created only when offer is set. These may use
+ // new transports just created by PushdownTransportDescription.
+ if (type_ == SdpType::kOffer) {
+ // TODO(mallinath) - Handle CreateChannel failure, as new local
+ // description is applied. Restore back to old description.
+ error_ = handler_->CreateChannels(*session_desc);
+ }
+ // Remove unused channels if MediaContentDescription is rejected.
+ handler_->RemoveUnusedChannels(session_desc);
+ }
+
+ return ok();
+ }
+
+ bool UpdateSessionState() {
+ RTC_DCHECK(ok());
+ error_ = handler_->UpdateSessionState(
+ type_, cricket::CS_REMOTE,
+ handler_->remote_description()->description(), bundle_groups_by_mid_);
+ if (!ok())
+ SetAsSessionError();
+ return ok();
+ }
+
+ bool UseCandidatesInRemoteDescription() {
+ RTC_DCHECK(ok());
+ if (handler_->local_description() &&
+ !handler_->UseCandidatesInRemoteDescription()) {
+ InvalidParam(kInvalidCandidates);
+ }
+ return ok();
+ }
+
+ // Convenience getter for desc_->GetType().
+ SdpType type() const { return type_; }
+ bool unified_plan() const { return unified_plan_; }
+ cricket::SessionDescription* description() { return desc_->description(); }
+
+ const SessionDescriptionInterface* old_remote_description() const {
+ RTC_DCHECK(!desc_) << "Called before replacing the remote description";
+ if (type_ == SdpType::kAnswer)
+ return replaced_remote_description_.get();
+ return replaced_remote_description_
+ ? replaced_remote_description_.get()
+ : handler_->current_remote_description();
+ }
+
+ // Returns a reference to a cached map of bundle groups ordered by mid.
+ // Note that this will only be valid after a successful call to
+ // `IsDescriptionValid`.
+ const std::map<std::string, const cricket::ContentGroup*>&
+ bundle_groups_by_mid() const {
+ RTC_DCHECK(ok());
+ return bundle_groups_by_mid_;
+ }
+
+ private:
+ // Convenience methods for populating the embedded `error_` object.
+ void Unsupported(std::string message) {
+ SetError(RTCErrorType::UNSUPPORTED_OPERATION, std::move(message));
+ }
+
+ void InvalidParam(std::string message) {
+ SetError(RTCErrorType::INVALID_PARAMETER, std::move(message));
+ }
+
+ void InternalError(std::string message) {
+ SetError(RTCErrorType::INTERNAL_ERROR, std::move(message));
+ }
+
+ void SetError(RTCErrorType type, std::string message) {
+ RTC_DCHECK(ok()) << "Overwriting an existing error?";
+ error_ = RTCError(type, std::move(message));
+ }
+
+ // Called when the PeerConnection could be in an inconsistent state and we set
+ // the session error so that future calls to
+ // SetLocalDescription/SetRemoteDescription fail.
+ void SetAsSessionError() {
+ RTC_DCHECK(!ok());
+ handler_->SetSessionError(SessionError::kContent, error_.message());
+ }
+
+ SdpOfferAnswerHandler* const handler_;
+ std::unique_ptr<SessionDescriptionInterface> desc_;
+ // Keeps the replaced session description object alive while the operation
+ // is taking place since methods that depend on `old_remote_description()`
+ // for updating the state, need it.
+ std::unique_ptr<SessionDescriptionInterface> replaced_remote_description_;
+ rtc::scoped_refptr<SetRemoteDescriptionObserverInterface> observer_;
+ std::function<void()> operations_chain_callback_;
+ RTCError error_ = RTCError::OK();
+ std::map<std::string, const cricket::ContentGroup*> bundle_groups_by_mid_;
+ SdpType type_;
+ const bool unified_plan_;
+};
+// Used by parameterless SetLocalDescription() to create an offer or answer.
+// Upon completion of creating the session description, SetLocalDescription() is
+// invoked with the result.
+class SdpOfferAnswerHandler::ImplicitCreateSessionDescriptionObserver
+ : public CreateSessionDescriptionObserver {
+ public:
+ ImplicitCreateSessionDescriptionObserver(
+ rtc::WeakPtr<SdpOfferAnswerHandler> sdp_handler,
+ rtc::scoped_refptr<SetLocalDescriptionObserverInterface>
+ set_local_description_observer)
+ : sdp_handler_(std::move(sdp_handler)),
+ set_local_description_observer_(
+ std::move(set_local_description_observer)) {}
+ ~ImplicitCreateSessionDescriptionObserver() override {
+ RTC_DCHECK(was_called_);
+ }
+
+ void SetOperationCompleteCallback(
+ std::function<void()> operation_complete_callback) {
+ operation_complete_callback_ = std::move(operation_complete_callback);
+ }
+
+ bool was_called() const { return was_called_; }
+
+ void OnSuccess(SessionDescriptionInterface* desc_ptr) override {
+ RTC_DCHECK(!was_called_);
+ std::unique_ptr<SessionDescriptionInterface> desc(desc_ptr);
+ was_called_ = true;
+
+ // Abort early if `pc_` is no longer valid.
+ if (!sdp_handler_) {
+ operation_complete_callback_();
+ return;
+ }
+ // DoSetLocalDescription() is a synchronous operation that invokes
+ // `set_local_description_observer_` with the result.
+ sdp_handler_->DoSetLocalDescription(
+ std::move(desc), std::move(set_local_description_observer_));
+ operation_complete_callback_();
+ }
+
+ void OnFailure(RTCError error) override {
+ RTC_DCHECK(!was_called_);
+ was_called_ = true;
+ set_local_description_observer_->OnSetLocalDescriptionComplete(RTCError(
+ error.type(), std::string("SetLocalDescription failed to create "
+ "session description - ") +
+ error.message()));
+ operation_complete_callback_();
+ }
+
+ private:
+ bool was_called_ = false;
+ rtc::WeakPtr<SdpOfferAnswerHandler> sdp_handler_;
+ rtc::scoped_refptr<SetLocalDescriptionObserverInterface>
+ set_local_description_observer_;
+ std::function<void()> operation_complete_callback_;
+};
+
+// Wraps a CreateSessionDescriptionObserver and an OperationsChain operation
+// complete callback. When the observer is invoked, the wrapped observer is
+// invoked followed by invoking the completion callback.
+class CreateSessionDescriptionObserverOperationWrapper
+ : public CreateSessionDescriptionObserver {
+ public:
+ CreateSessionDescriptionObserverOperationWrapper(
+ rtc::scoped_refptr<CreateSessionDescriptionObserver> observer,
+ std::function<void()> operation_complete_callback)
+ : observer_(std::move(observer)),
+ operation_complete_callback_(std::move(operation_complete_callback)) {
+ RTC_DCHECK(observer_);
+ }
+ ~CreateSessionDescriptionObserverOperationWrapper() override {
+#if RTC_DCHECK_IS_ON
+ RTC_DCHECK(was_called_);
+#endif
+ }
+
+ void OnSuccess(SessionDescriptionInterface* desc) override {
+#if RTC_DCHECK_IS_ON
+ RTC_DCHECK(!was_called_);
+ was_called_ = true;
+#endif // RTC_DCHECK_IS_ON
+ // Completing the operation before invoking the observer allows the observer
+ // to execute SetLocalDescription() without delay.
+ operation_complete_callback_();
+ observer_->OnSuccess(desc);
+ }
+
+ void OnFailure(RTCError error) override {
+#if RTC_DCHECK_IS_ON
+ RTC_DCHECK(!was_called_);
+ was_called_ = true;
+#endif // RTC_DCHECK_IS_ON
+ operation_complete_callback_();
+ observer_->OnFailure(std::move(error));
+ }
+
+ private:
+#if RTC_DCHECK_IS_ON
+ bool was_called_ = false;
+#endif // RTC_DCHECK_IS_ON
+ rtc::scoped_refptr<CreateSessionDescriptionObserver> observer_;
+ std::function<void()> operation_complete_callback_;
+};
+
+// Wrapper for SetSessionDescriptionObserver that invokes the success or failure
+// callback in a posted message handled by the peer connection. This introduces
+// a delay that prevents recursive API calls by the observer, but this also
+// means that the PeerConnection can be modified before the observer sees the
+// result of the operation. This is ill-advised for synchronizing states.
+//
+// Implements both the SetLocalDescriptionObserverInterface and the
+// SetRemoteDescriptionObserverInterface.
+class SdpOfferAnswerHandler::SetSessionDescriptionObserverAdapter
+ : public SetLocalDescriptionObserverInterface,
+ public SetRemoteDescriptionObserverInterface {
+ public:
+ SetSessionDescriptionObserverAdapter(
+ rtc::WeakPtr<SdpOfferAnswerHandler> handler,
+ rtc::scoped_refptr<SetSessionDescriptionObserver> inner_observer)
+ : handler_(std::move(handler)),
+ inner_observer_(std::move(inner_observer)) {}
+
+ // SetLocalDescriptionObserverInterface implementation.
+ void OnSetLocalDescriptionComplete(RTCError error) override {
+ OnSetDescriptionComplete(std::move(error));
+ }
+ // SetRemoteDescriptionObserverInterface implementation.
+ void OnSetRemoteDescriptionComplete(RTCError error) override {
+ OnSetDescriptionComplete(std::move(error));
+ }
+
+ private:
+ void OnSetDescriptionComplete(RTCError error) {
+ if (!handler_)
+ return;
+ if (error.ok()) {
+ handler_->pc_->message_handler()->PostSetSessionDescriptionSuccess(
+ inner_observer_.get());
+ } else {
+ handler_->pc_->message_handler()->PostSetSessionDescriptionFailure(
+ inner_observer_.get(), std::move(error));
+ }
+ }
+
+ rtc::WeakPtr<SdpOfferAnswerHandler> handler_;
+ rtc::scoped_refptr<SetSessionDescriptionObserver> inner_observer_;
+};
+
+class SdpOfferAnswerHandler::LocalIceCredentialsToReplace {
+ public:
+ // Sets the ICE credentials that need restarting to the ICE credentials of
+ // the current and pending descriptions.
+ void SetIceCredentialsFromLocalDescriptions(
+ const SessionDescriptionInterface* current_local_description,
+ const SessionDescriptionInterface* pending_local_description) {
+ ice_credentials_.clear();
+ if (current_local_description) {
+ AppendIceCredentialsFromSessionDescription(*current_local_description);
+ }
+ if (pending_local_description) {
+ AppendIceCredentialsFromSessionDescription(*pending_local_description);
+ }
+ }
+
+ void ClearIceCredentials() { ice_credentials_.clear(); }
+
+ // Returns true if we have ICE credentials that need restarting.
+ bool HasIceCredentials() const { return !ice_credentials_.empty(); }
+
+ // Returns true if `local_description` shares no ICE credentials with the
+ // ICE credentials that need restarting.
+ bool SatisfiesIceRestart(
+ const SessionDescriptionInterface& local_description) const {
+ for (const auto& transport_info :
+ local_description.description()->transport_infos()) {
+ if (ice_credentials_.find(std::make_pair(
+ transport_info.description.ice_ufrag,
+ transport_info.description.ice_pwd)) != ice_credentials_.end()) {
+ return false;
+ }
+ }
+ return true;
+ }
+
+ private:
+ void AppendIceCredentialsFromSessionDescription(
+ const SessionDescriptionInterface& desc) {
+ for (const auto& transport_info : desc.description()->transport_infos()) {
+ ice_credentials_.insert(
+ std::make_pair(transport_info.description.ice_ufrag,
+ transport_info.description.ice_pwd));
+ }
+ }
+
+ std::set<std::pair<std::string, std::string>> ice_credentials_;
+};
+
+SdpOfferAnswerHandler::SdpOfferAnswerHandler(PeerConnectionSdpMethods* pc,
+ ConnectionContext* context)
+ : pc_(pc),
+ context_(context),
+ local_streams_(StreamCollection::Create()),
+ remote_streams_(StreamCollection::Create()),
+ operations_chain_(rtc::OperationsChain::Create()),
+ rtcp_cname_(GenerateRtcpCname()),
+ local_ice_credentials_to_replace_(new LocalIceCredentialsToReplace()),
+ weak_ptr_factory_(this) {
+ operations_chain_->SetOnChainEmptyCallback(
+ [this_weak_ptr = weak_ptr_factory_.GetWeakPtr()]() {
+ if (!this_weak_ptr)
+ return;
+ this_weak_ptr->OnOperationsChainEmpty();
+ });
+}
+
+SdpOfferAnswerHandler::~SdpOfferAnswerHandler() {}
+
+// Static
+std::unique_ptr<SdpOfferAnswerHandler> SdpOfferAnswerHandler::Create(
+ PeerConnectionSdpMethods* pc,
+ const PeerConnectionInterface::RTCConfiguration& configuration,
+ PeerConnectionDependencies& dependencies,
+ ConnectionContext* context) {
+ auto handler = absl::WrapUnique(new SdpOfferAnswerHandler(pc, context));
+ handler->Initialize(configuration, dependencies, context);
+ return handler;
+}
+
+void SdpOfferAnswerHandler::Initialize(
+ const PeerConnectionInterface::RTCConfiguration& configuration,
+ PeerConnectionDependencies& dependencies,
+ ConnectionContext* context) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ // 100 kbps is used by default, but can be overriden by a non-standard
+ // RTCConfiguration value (not available on Web).
+ video_options_.screencast_min_bitrate_kbps =
+ configuration.screencast_min_bitrate.value_or(100);
+
+ audio_options_.audio_jitter_buffer_max_packets =
+ configuration.audio_jitter_buffer_max_packets;
+
+ audio_options_.audio_jitter_buffer_fast_accelerate =
+ configuration.audio_jitter_buffer_fast_accelerate;
+
+ audio_options_.audio_jitter_buffer_min_delay_ms =
+ configuration.audio_jitter_buffer_min_delay_ms;
+
+ // Obtain a certificate from RTCConfiguration if any were provided (optional).
+ rtc::scoped_refptr<rtc::RTCCertificate> certificate;
+ if (!configuration.certificates.empty()) {
+ // TODO(hbos,torbjorng): Decide on certificate-selection strategy instead of
+ // just picking the first one. The decision should be made based on the DTLS
+ // handshake. The DTLS negotiations need to know about all certificates.
+ certificate = configuration.certificates[0];
+ }
+
+ webrtc_session_desc_factory_ =
+ std::make_unique<WebRtcSessionDescriptionFactory>(
+ context, this, pc_->session_id(), pc_->dtls_enabled(),
+ std::move(dependencies.cert_generator), std::move(certificate),
+ [this](const rtc::scoped_refptr<rtc::RTCCertificate>& certificate) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ transport_controller_s()->SetLocalCertificate(certificate);
+ },
+ pc_->trials());
+
+ if (pc_->options()->disable_encryption) {
+ webrtc_session_desc_factory_->SetSdesPolicy(cricket::SEC_DISABLED);
+ }
+
+ webrtc_session_desc_factory_->set_enable_encrypted_rtp_header_extensions(
+ pc_->GetCryptoOptions().srtp.enable_encrypted_rtp_header_extensions);
+ webrtc_session_desc_factory_->set_is_unified_plan(IsUnifiedPlan());
+
+ if (dependencies.video_bitrate_allocator_factory) {
+ video_bitrate_allocator_factory_ =
+ std::move(dependencies.video_bitrate_allocator_factory);
+ } else {
+ video_bitrate_allocator_factory_ =
+ CreateBuiltinVideoBitrateAllocatorFactory();
+ }
+}
+
+// ==================================================================
+// Access to pc_ variables
+cricket::MediaEngineInterface* SdpOfferAnswerHandler::media_engine() const {
+ RTC_DCHECK(context_);
+ return context_->media_engine();
+}
+
+TransceiverList* SdpOfferAnswerHandler::transceivers() {
+ if (!pc_->rtp_manager()) {
+ return nullptr;
+ }
+ return pc_->rtp_manager()->transceivers();
+}
+
+const TransceiverList* SdpOfferAnswerHandler::transceivers() const {
+ if (!pc_->rtp_manager()) {
+ return nullptr;
+ }
+ return pc_->rtp_manager()->transceivers();
+}
+JsepTransportController* SdpOfferAnswerHandler::transport_controller_s() {
+ return pc_->transport_controller_s();
+}
+JsepTransportController* SdpOfferAnswerHandler::transport_controller_n() {
+ return pc_->transport_controller_n();
+}
+const JsepTransportController* SdpOfferAnswerHandler::transport_controller_s()
+ const {
+ return pc_->transport_controller_s();
+}
+const JsepTransportController* SdpOfferAnswerHandler::transport_controller_n()
+ const {
+ return pc_->transport_controller_n();
+}
+DataChannelController* SdpOfferAnswerHandler::data_channel_controller() {
+ return pc_->data_channel_controller();
+}
+const DataChannelController* SdpOfferAnswerHandler::data_channel_controller()
+ const {
+ return pc_->data_channel_controller();
+}
+cricket::PortAllocator* SdpOfferAnswerHandler::port_allocator() {
+ return pc_->port_allocator();
+}
+const cricket::PortAllocator* SdpOfferAnswerHandler::port_allocator() const {
+ return pc_->port_allocator();
+}
+RtpTransmissionManager* SdpOfferAnswerHandler::rtp_manager() {
+ return pc_->rtp_manager();
+}
+const RtpTransmissionManager* SdpOfferAnswerHandler::rtp_manager() const {
+ return pc_->rtp_manager();
+}
+
+// ===================================================================
+
+void SdpOfferAnswerHandler::PrepareForShutdown() {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ weak_ptr_factory_.InvalidateWeakPtrs();
+}
+
+void SdpOfferAnswerHandler::Close() {
+ ChangeSignalingState(PeerConnectionInterface::kClosed);
+}
+
+void SdpOfferAnswerHandler::RestartIce() {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ local_ice_credentials_to_replace_->SetIceCredentialsFromLocalDescriptions(
+ current_local_description(), pending_local_description());
+ UpdateNegotiationNeeded();
+}
+
+rtc::Thread* SdpOfferAnswerHandler::signaling_thread() const {
+ return context_->signaling_thread();
+}
+
+rtc::Thread* SdpOfferAnswerHandler::network_thread() const {
+ return context_->network_thread();
+}
+
+void SdpOfferAnswerHandler::CreateOffer(
+ CreateSessionDescriptionObserver* observer,
+ const PeerConnectionInterface::RTCOfferAnswerOptions& options) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ // Chain this operation. If asynchronous operations are pending on the chain,
+ // this operation will be queued to be invoked, otherwise the contents of the
+ // lambda will execute immediately.
+ operations_chain_->ChainOperation(
+ [this_weak_ptr = weak_ptr_factory_.GetWeakPtr(),
+ observer_refptr =
+ rtc::scoped_refptr<CreateSessionDescriptionObserver>(observer),
+ options](std::function<void()> operations_chain_callback) {
+ // Abort early if `this_weak_ptr` is no longer valid.
+ if (!this_weak_ptr) {
+ observer_refptr->OnFailure(
+ RTCError(RTCErrorType::INTERNAL_ERROR,
+ "CreateOffer failed because the session was shut down"));
+ operations_chain_callback();
+ return;
+ }
+ // The operation completes asynchronously when the wrapper is invoked.
+ auto observer_wrapper = rtc::make_ref_counted<
+ CreateSessionDescriptionObserverOperationWrapper>(
+ std::move(observer_refptr), std::move(operations_chain_callback));
+ this_weak_ptr->DoCreateOffer(options, observer_wrapper);
+ });
+}
+
+void SdpOfferAnswerHandler::SetLocalDescription(
+ SetSessionDescriptionObserver* observer,
+ SessionDescriptionInterface* desc_ptr) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ // Chain this operation. If asynchronous operations are pending on the chain,
+ // this operation will be queued to be invoked, otherwise the contents of the
+ // lambda will execute immediately.
+ operations_chain_->ChainOperation(
+ [this_weak_ptr = weak_ptr_factory_.GetWeakPtr(),
+ observer_refptr =
+ rtc::scoped_refptr<SetSessionDescriptionObserver>(observer),
+ desc = std::unique_ptr<SessionDescriptionInterface>(desc_ptr)](
+ std::function<void()> operations_chain_callback) mutable {
+ // Abort early if `this_weak_ptr` is no longer valid.
+ if (!this_weak_ptr) {
+ // For consistency with SetSessionDescriptionObserverAdapter whose
+ // posted messages doesn't get processed when the PC is destroyed, we
+ // do not inform `observer_refptr` that the operation failed.
+ operations_chain_callback();
+ return;
+ }
+ // SetSessionDescriptionObserverAdapter takes care of making sure the
+ // `observer_refptr` is invoked in a posted message.
+ this_weak_ptr->DoSetLocalDescription(
+ std::move(desc),
+ rtc::make_ref_counted<SetSessionDescriptionObserverAdapter>(
+ this_weak_ptr, observer_refptr));
+ // For backwards-compatability reasons, we declare the operation as
+ // completed here (rather than in a post), so that the operation chain
+ // is not blocked by this operation when the observer is invoked. This
+ // allows the observer to trigger subsequent offer/answer operations
+ // synchronously if the operation chain is now empty.
+ operations_chain_callback();
+ });
+}
+
+void SdpOfferAnswerHandler::SetLocalDescription(
+ std::unique_ptr<SessionDescriptionInterface> desc,
+ rtc::scoped_refptr<SetLocalDescriptionObserverInterface> observer) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ // Chain this operation. If asynchronous operations are pending on the chain,
+ // this operation will be queued to be invoked, otherwise the contents of the
+ // lambda will execute immediately.
+ operations_chain_->ChainOperation(
+ [this_weak_ptr = weak_ptr_factory_.GetWeakPtr(), observer,
+ desc = std::move(desc)](
+ std::function<void()> operations_chain_callback) mutable {
+ // Abort early if `this_weak_ptr` is no longer valid.
+ if (!this_weak_ptr) {
+ observer->OnSetLocalDescriptionComplete(RTCError(
+ RTCErrorType::INTERNAL_ERROR,
+ "SetLocalDescription failed because the session was shut down"));
+ operations_chain_callback();
+ return;
+ }
+ this_weak_ptr->DoSetLocalDescription(std::move(desc), observer);
+ // DoSetLocalDescription() is implemented as a synchronous operation.
+ // The `observer` will already have been informed that it completed, and
+ // we can mark this operation as complete without any loose ends.
+ operations_chain_callback();
+ });
+}
+
+void SdpOfferAnswerHandler::SetLocalDescription(
+ SetSessionDescriptionObserver* observer) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ SetLocalDescription(
+ rtc::make_ref_counted<SetSessionDescriptionObserverAdapter>(
+ weak_ptr_factory_.GetWeakPtr(),
+ rtc::scoped_refptr<SetSessionDescriptionObserver>(observer)));
+}
+
+void SdpOfferAnswerHandler::SetLocalDescription(
+ rtc::scoped_refptr<SetLocalDescriptionObserverInterface> observer) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ // The `create_sdp_observer` handles performing DoSetLocalDescription() with
+ // the resulting description as well as completing the operation.
+ auto create_sdp_observer =
+ rtc::make_ref_counted<ImplicitCreateSessionDescriptionObserver>(
+ weak_ptr_factory_.GetWeakPtr(), observer);
+ // Chain this operation. If asynchronous operations are pending on the chain,
+ // this operation will be queued to be invoked, otherwise the contents of the
+ // lambda will execute immediately.
+ operations_chain_->ChainOperation(
+ [this_weak_ptr = weak_ptr_factory_.GetWeakPtr(),
+ create_sdp_observer](std::function<void()> operations_chain_callback) {
+ // The `create_sdp_observer` is responsible for completing the
+ // operation.
+ create_sdp_observer->SetOperationCompleteCallback(
+ std::move(operations_chain_callback));
+ // Abort early if `this_weak_ptr` is no longer valid. This triggers the
+ // same code path as if DoCreateOffer() or DoCreateAnswer() failed.
+ if (!this_weak_ptr) {
+ create_sdp_observer->OnFailure(RTCError(
+ RTCErrorType::INTERNAL_ERROR,
+ "SetLocalDescription failed because the session was shut down"));
+ return;
+ }
+ switch (this_weak_ptr->signaling_state()) {
+ case PeerConnectionInterface::kStable:
+ case PeerConnectionInterface::kHaveLocalOffer:
+ case PeerConnectionInterface::kHaveRemotePrAnswer:
+ // TODO(hbos): If [LastCreatedOffer] exists and still represents the
+ // current state of the system, use that instead of creating another
+ // offer.
+ this_weak_ptr->DoCreateOffer(
+ PeerConnectionInterface::RTCOfferAnswerOptions(),
+ create_sdp_observer);
+ break;
+ case PeerConnectionInterface::kHaveLocalPrAnswer:
+ case PeerConnectionInterface::kHaveRemoteOffer:
+ // TODO(hbos): If [LastCreatedAnswer] exists and still represents
+ // the current state of the system, use that instead of creating
+ // another answer.
+ this_weak_ptr->DoCreateAnswer(
+ PeerConnectionInterface::RTCOfferAnswerOptions(),
+ create_sdp_observer);
+ break;
+ case PeerConnectionInterface::kClosed:
+ create_sdp_observer->OnFailure(RTCError(
+ RTCErrorType::INVALID_STATE,
+ "SetLocalDescription called when PeerConnection is closed."));
+ break;
+ }
+ });
+}
+
+RTCError SdpOfferAnswerHandler::ApplyLocalDescription(
+ std::unique_ptr<SessionDescriptionInterface> desc,
+ const std::map<std::string, const cricket::ContentGroup*>&
+ bundle_groups_by_mid) {
+ TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::ApplyLocalDescription");
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ RTC_DCHECK(desc);
+
+ // Invalidate the stats caches to make sure that they get
+ // updated the next time getStats() gets called, as updating the session
+ // description affects the stats.
+ pc_->ClearStatsCache();
+
+ // Take a reference to the old local description since it's used below to
+ // compare against the new local description. When setting the new local
+ // description, grab ownership of the replaced session description in case it
+ // is the same as `old_local_description`, to keep it alive for the duration
+ // of the method.
+ const SessionDescriptionInterface* old_local_description =
+ local_description();
+ std::unique_ptr<SessionDescriptionInterface> replaced_local_description;
+ SdpType type = desc->GetType();
+ if (type == SdpType::kAnswer) {
+ replaced_local_description = pending_local_description_
+ ? std::move(pending_local_description_)
+ : std::move(current_local_description_);
+ current_local_description_ = std::move(desc);
+ pending_local_description_ = nullptr;
+ current_remote_description_ = std::move(pending_remote_description_);
+ } else {
+ replaced_local_description = std::move(pending_local_description_);
+ pending_local_description_ = std::move(desc);
+ }
+ if (!initial_offerer_) {
+ initial_offerer_.emplace(type == SdpType::kOffer);
+ }
+ // The session description to apply now must be accessed by
+ // `local_description()`.
+ RTC_DCHECK(local_description());
+
+ if (!is_caller_) {
+ if (remote_description()) {
+ // Remote description was applied first, so this PC is the callee.
+ is_caller_ = false;
+ } else {
+ // Local description is applied first, so this PC is the caller.
+ is_caller_ = true;
+ }
+ }
+
+ RTCError error = PushdownTransportDescription(cricket::CS_LOCAL, type);
+ if (!error.ok()) {
+ return error;
+ }
+
+ if (IsUnifiedPlan()) {
+ error = UpdateTransceiversAndDataChannels(
+ cricket::CS_LOCAL, *local_description(), old_local_description,
+ remote_description(), bundle_groups_by_mid);
+ if (!error.ok()) {
+ RTC_LOG(LS_ERROR) << error.message() << " (" << SdpTypeToString(type)
+ << ")";
+ return error;
+ }
+ if (ConfiguredForMedia()) {
+ std::vector<rtc::scoped_refptr<RtpTransceiverInterface>> remove_list;
+ std::vector<rtc::scoped_refptr<MediaStreamInterface>> removed_streams;
+ for (const auto& transceiver_ext : transceivers()->List()) {
+ auto transceiver = transceiver_ext->internal();
+ if (transceiver->stopped()) {
+ continue;
+ }
+
+ // 2.2.7.1.1.(6-9): Set sender and receiver's transport slots.
+ // Note that code paths that don't set MID won't be able to use
+ // information about DTLS transports.
+ if (transceiver->mid()) {
+ auto dtls_transport = LookupDtlsTransportByMid(
+ context_->network_thread(), transport_controller_s(),
+ *transceiver->mid());
+ transceiver->sender_internal()->set_transport(dtls_transport);
+ transceiver->receiver_internal()->set_transport(dtls_transport);
+ }
+
+ const ContentInfo* content =
+ FindMediaSectionForTransceiver(transceiver, local_description());
+ if (!content) {
+ continue;
+ }
+ const MediaContentDescription* media_desc =
+ content->media_description();
+ // 2.2.7.1.6: If description is of type "answer" or "pranswer", then run
+ // the following steps:
+ if (type == SdpType::kPrAnswer || type == SdpType::kAnswer) {
+ // 2.2.7.1.6.1: If direction is "sendonly" or "inactive", and
+ // transceiver's [[FiredDirection]] slot is either "sendrecv" or
+ // "recvonly", process the removal of a remote track for the media
+ // description, given transceiver, removeList, and muteTracks.
+ if (!RtpTransceiverDirectionHasRecv(media_desc->direction()) &&
+ (transceiver->fired_direction() &&
+ RtpTransceiverDirectionHasRecv(
+ *transceiver->fired_direction()))) {
+ ProcessRemovalOfRemoteTrack(transceiver_ext, &remove_list,
+ &removed_streams);
+ }
+ // 2.2.7.1.6.2: Set transceiver's [[CurrentDirection]] and
+ // [[FiredDirection]] slots to direction.
+ transceiver->set_current_direction(media_desc->direction());
+ transceiver->set_fired_direction(media_desc->direction());
+ }
+ }
+ auto observer = pc_->Observer();
+ for (const auto& transceiver : remove_list) {
+ observer->OnRemoveTrack(transceiver->receiver());
+ }
+ for (const auto& stream : removed_streams) {
+ observer->OnRemoveStream(stream);
+ }
+ }
+ } else {
+ // Media channels will be created only when offer is set. These may use new
+ // transports just created by PushdownTransportDescription.
+ if (type == SdpType::kOffer) {
+ // TODO(bugs.webrtc.org/4676) - Handle CreateChannel failure, as new local
+ // description is applied. Restore back to old description.
+ error = CreateChannels(*local_description()->description());
+ if (!error.ok()) {
+ RTC_LOG(LS_ERROR) << error.message() << " (" << SdpTypeToString(type)
+ << ")";
+ return error;
+ }
+ }
+ // Remove unused channels if MediaContentDescription is rejected.
+ RemoveUnusedChannels(local_description()->description());
+ }
+
+ error = UpdateSessionState(type, cricket::CS_LOCAL,
+ local_description()->description(),
+ bundle_groups_by_mid);
+ if (!error.ok()) {
+ RTC_LOG(LS_ERROR) << error.message() << " (" << SdpTypeToString(type)
+ << ")";
+ return error;
+ }
+
+ // Now that we have a local description, we can push down remote candidates.
+ UseCandidatesInRemoteDescription();
+
+ pending_ice_restarts_.clear();
+ if (session_error() != SessionError::kNone) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR, GetSessionErrorMsg());
+ }
+
+ // If setting the description decided our SSL role, allocate any necessary
+ // SCTP sids.
+ AllocateSctpSids();
+
+ // Validate SSRCs, we do not allow duplicates.
+ if (ConfiguredForMedia()) {
+ std::set<uint32_t> used_ssrcs;
+ for (const auto& content : local_description()->description()->contents()) {
+ for (const auto& stream : content.media_description()->streams()) {
+ for (uint32_t ssrc : stream.ssrcs) {
+ auto result = used_ssrcs.insert(ssrc);
+ if (!result.second) {
+ LOG_AND_RETURN_ERROR(
+ RTCErrorType::INVALID_PARAMETER,
+ "Duplicate ssrc " + rtc::ToString(ssrc) + " is not allowed");
+ }
+ }
+ }
+ }
+ }
+
+ if (IsUnifiedPlan()) {
+ if (ConfiguredForMedia()) {
+ // We must use List and not ListInternal here because
+ // transceivers()->StableState() is indexed by the non-internal refptr.
+ for (const auto& transceiver_ext : transceivers()->List()) {
+ auto transceiver = transceiver_ext->internal();
+ if (transceiver->stopped()) {
+ continue;
+ }
+ const ContentInfo* content =
+ FindMediaSectionForTransceiver(transceiver, local_description());
+ if (!content) {
+ continue;
+ }
+ cricket::ChannelInterface* channel = transceiver->channel();
+ if (content->rejected || !channel || channel->local_streams().empty()) {
+ // 0 is a special value meaning "this sender has no associated send
+ // stream". Need to call this so the sender won't attempt to configure
+ // a no longer existing stream and run into DCHECKs in the lower
+ // layers.
+ transceiver->sender_internal()->SetSsrc(0);
+ } else {
+ // Get the StreamParams from the channel which could generate SSRCs.
+ const std::vector<StreamParams>& streams = channel->local_streams();
+ transceiver->sender_internal()->set_stream_ids(
+ streams[0].stream_ids());
+ auto encodings =
+ transceiver->sender_internal()->init_send_encodings();
+ transceiver->sender_internal()->SetSsrc(streams[0].first_ssrc());
+ if (!encodings.empty()) {
+ transceivers()
+ ->StableState(transceiver_ext)
+ ->SetInitSendEncodings(encodings);
+ }
+ }
+ }
+ }
+ } else {
+ // Plan B semantics.
+
+ // Update state and SSRC of local MediaStreams and DataChannels based on the
+ // local session description.
+ const cricket::ContentInfo* audio_content =
+ GetFirstAudioContent(local_description()->description());
+ if (audio_content) {
+ if (audio_content->rejected) {
+ RemoveSenders(cricket::MEDIA_TYPE_AUDIO);
+ } else {
+ const cricket::AudioContentDescription* audio_desc =
+ audio_content->media_description()->as_audio();
+ UpdateLocalSenders(audio_desc->streams(), audio_desc->type());
+ }
+ }
+
+ const cricket::ContentInfo* video_content =
+ GetFirstVideoContent(local_description()->description());
+ if (video_content) {
+ if (video_content->rejected) {
+ RemoveSenders(cricket::MEDIA_TYPE_VIDEO);
+ } else {
+ const cricket::VideoContentDescription* video_desc =
+ video_content->media_description()->as_video();
+ UpdateLocalSenders(video_desc->streams(), video_desc->type());
+ }
+ }
+ }
+
+ // This function does nothing with data content.
+
+ if (type == SdpType::kAnswer &&
+ local_ice_credentials_to_replace_->SatisfiesIceRestart(
+ *current_local_description_)) {
+ local_ice_credentials_to_replace_->ClearIceCredentials();
+ }
+
+ if (IsUnifiedPlan()) {
+ UpdateRtpHeaderExtensionPreferencesFromSdpMunging(
+ local_description()->description(), transceivers());
+ }
+
+ return RTCError::OK();
+}
+
+void SdpOfferAnswerHandler::SetRemoteDescription(
+ SetSessionDescriptionObserver* observer,
+ SessionDescriptionInterface* desc_ptr) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ // Chain this operation. If asynchronous operations are pending on the chain,
+ // this operation will be queued to be invoked, otherwise the contents of the
+ // lambda will execute immediately.
+ operations_chain_->ChainOperation(
+ [this_weak_ptr = weak_ptr_factory_.GetWeakPtr(),
+ observer_refptr =
+ rtc::scoped_refptr<SetSessionDescriptionObserver>(observer),
+ desc = std::unique_ptr<SessionDescriptionInterface>(desc_ptr)](
+ std::function<void()> operations_chain_callback) mutable {
+ // Abort early if `this_weak_ptr` is no longer valid.
+ if (!this_weak_ptr) {
+ // For consistency with SetSessionDescriptionObserverAdapter whose
+ // posted messages doesn't get processed when the PC is destroyed, we
+ // do not inform `observer_refptr` that the operation failed.
+ operations_chain_callback();
+ return;
+ }
+ // SetSessionDescriptionObserverAdapter takes care of making sure the
+ // `observer_refptr` is invoked in a posted message.
+ this_weak_ptr->DoSetRemoteDescription(
+ std::make_unique<RemoteDescriptionOperation>(
+ this_weak_ptr.get(), std::move(desc),
+ rtc::make_ref_counted<SetSessionDescriptionObserverAdapter>(
+ this_weak_ptr, observer_refptr),
+ std::move(operations_chain_callback)));
+ });
+}
+
+void SdpOfferAnswerHandler::SetRemoteDescription(
+ std::unique_ptr<SessionDescriptionInterface> desc,
+ rtc::scoped_refptr<SetRemoteDescriptionObserverInterface> observer) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ // Chain this operation. If asynchronous operations are pending on the chain,
+ // this operation will be queued to be invoked, otherwise the contents of the
+ // lambda will execute immediately.
+ operations_chain_->ChainOperation(
+ [this_weak_ptr = weak_ptr_factory_.GetWeakPtr(), observer,
+ desc = std::move(desc)](
+ std::function<void()> operations_chain_callback) mutable {
+ if (!observer) {
+ RTC_DLOG(LS_ERROR) << "SetRemoteDescription - observer is NULL.";
+ operations_chain_callback();
+ return;
+ }
+
+ // Abort early if `this_weak_ptr` is no longer valid.
+ if (!this_weak_ptr) {
+ observer->OnSetRemoteDescriptionComplete(RTCError(
+ RTCErrorType::INTERNAL_ERROR,
+ "SetRemoteDescription failed because the session was shut down"));
+ operations_chain_callback();
+ return;
+ }
+
+ this_weak_ptr->DoSetRemoteDescription(
+ std::make_unique<RemoteDescriptionOperation>(
+ this_weak_ptr.get(), std::move(desc), std::move(observer),
+ std::move(operations_chain_callback)));
+ });
+}
+
+RTCError SdpOfferAnswerHandler::ReplaceRemoteDescription(
+ std::unique_ptr<SessionDescriptionInterface> desc,
+ SdpType sdp_type,
+ std::unique_ptr<SessionDescriptionInterface>* replaced_description) {
+ RTC_DCHECK(replaced_description);
+ if (sdp_type == SdpType::kAnswer) {
+ *replaced_description = pending_remote_description_
+ ? std::move(pending_remote_description_)
+ : std::move(current_remote_description_);
+ current_remote_description_ = std::move(desc);
+ pending_remote_description_ = nullptr;
+ current_local_description_ = std::move(pending_local_description_);
+ } else {
+ *replaced_description = std::move(pending_remote_description_);
+ pending_remote_description_ = std::move(desc);
+ }
+
+ // The session description to apply now must be accessed by
+ // `remote_description()`.
+ const cricket::SessionDescription* session_desc =
+ remote_description()->description();
+
+ // NOTE: This will perform a BlockingCall() to the network thread.
+ return transport_controller_s()->SetRemoteDescription(sdp_type, session_desc);
+}
+
+void SdpOfferAnswerHandler::ApplyRemoteDescription(
+ std::unique_ptr<RemoteDescriptionOperation> operation) {
+ TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::ApplyRemoteDescription");
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ RTC_DCHECK(operation->description());
+
+ // Invalidate the stats caches to make sure that they get
+ // updated next time getStats() gets called, as updating the session
+ // description affects the stats.
+ pc_->ClearStatsCache();
+
+ if (!operation->ReplaceRemoteDescriptionAndCheckError())
+ return;
+
+ if (!operation->UpdateChannels())
+ return;
+
+ // NOTE: Candidates allocation will be initiated only when
+ // SetLocalDescription is called.
+ if (!operation->UpdateSessionState())
+ return;
+
+ if (!operation->UseCandidatesInRemoteDescription())
+ return;
+
+ if (operation->old_remote_description()) {
+ for (const cricket::ContentInfo& content :
+ operation->old_remote_description()->description()->contents()) {
+ // Check if this new SessionDescription contains new ICE ufrag and
+ // password that indicates the remote peer requests an ICE restart.
+ // TODO(deadbeef): When we start storing both the current and pending
+ // remote description, this should reset pending_ice_restarts and compare
+ // against the current description.
+ if (CheckForRemoteIceRestart(operation->old_remote_description(),
+ remote_description(), content.name)) {
+ if (operation->type() == SdpType::kOffer) {
+ pending_ice_restarts_.insert(content.name);
+ }
+ } else {
+ // We retain all received candidates only if ICE is not restarted.
+ // When ICE is restarted, all previous candidates belong to an old
+ // generation and should not be kept.
+ // TODO(deadbeef): This goes against the W3C spec which says the remote
+ // description should only contain candidates from the last set remote
+ // description plus any candidates added since then. We should remove
+ // this once we're sure it won't break anything.
+ WebRtcSessionDescriptionFactory::CopyCandidatesFromSessionDescription(
+ operation->old_remote_description(), content.name,
+ mutable_remote_description());
+ }
+ }
+ }
+
+ if (operation->HaveSessionError())
+ return;
+
+ // Set the the ICE connection state to connecting since the connection may
+ // become writable with peer reflexive candidates before any remote candidate
+ // is signaled.
+ // TODO(pthatcher): This is a short-term solution for crbug/446908. A real fix
+ // is to have a new signal the indicates a change in checking state from the
+ // transport and expose a new checking() member from transport that can be
+ // read to determine the current checking state. The existing SignalConnecting
+ // actually means "gathering candidates", so cannot be be used here.
+ if (remote_description()->GetType() != SdpType::kOffer &&
+ remote_description()->number_of_mediasections() > 0u &&
+ pc_->ice_connection_state_internal() ==
+ PeerConnectionInterface::kIceConnectionNew) {
+ pc_->SetIceConnectionState(PeerConnectionInterface::kIceConnectionChecking);
+ }
+
+ // If setting the description decided our SSL role, allocate any necessary
+ // SCTP sids.
+ AllocateSctpSids();
+
+ if (operation->unified_plan()) {
+ ApplyRemoteDescriptionUpdateTransceiverState(operation->type());
+ }
+
+ const cricket::AudioContentDescription* audio_desc =
+ GetFirstAudioContentDescription(remote_description()->description());
+ const cricket::VideoContentDescription* video_desc =
+ GetFirstVideoContentDescription(remote_description()->description());
+
+ // Check if the descriptions include streams, just in case the peer supports
+ // MSID, but doesn't indicate so with "a=msid-semantic".
+ if (remote_description()->description()->msid_supported() ||
+ (audio_desc && !audio_desc->streams().empty()) ||
+ (video_desc && !video_desc->streams().empty())) {
+ remote_peer_supports_msid_ = true;
+ }
+
+ if (!operation->unified_plan()) {
+ PlanBUpdateSendersAndReceivers(
+ GetFirstAudioContent(remote_description()->description()), audio_desc,
+ GetFirstVideoContent(remote_description()->description()), video_desc);
+ }
+
+ if (operation->type() == SdpType::kAnswer) {
+ if (local_ice_credentials_to_replace_->SatisfiesIceRestart(
+ *current_local_description_)) {
+ local_ice_credentials_to_replace_->ClearIceCredentials();
+ }
+
+ RemoveStoppedTransceivers();
+ }
+
+ // Consider the operation complete at this point.
+ operation->SignalCompletion();
+
+ SetRemoteDescriptionPostProcess(operation->type() == SdpType::kAnswer);
+}
+
+void SdpOfferAnswerHandler::ApplyRemoteDescriptionUpdateTransceiverState(
+ SdpType sdp_type) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ RTC_DCHECK(IsUnifiedPlan());
+ if (!ConfiguredForMedia()) {
+ return;
+ }
+ std::vector<rtc::scoped_refptr<RtpTransceiverInterface>>
+ now_receiving_transceivers;
+ std::vector<rtc::scoped_refptr<RtpTransceiverInterface>> remove_list;
+ std::vector<rtc::scoped_refptr<MediaStreamInterface>> added_streams;
+ std::vector<rtc::scoped_refptr<MediaStreamInterface>> removed_streams;
+ for (const auto& transceiver_ext : transceivers()->List()) {
+ const auto transceiver = transceiver_ext->internal();
+ const ContentInfo* content =
+ FindMediaSectionForTransceiver(transceiver, remote_description());
+ if (!content) {
+ continue;
+ }
+ const MediaContentDescription* media_desc = content->media_description();
+ RtpTransceiverDirection local_direction =
+ RtpTransceiverDirectionReversed(media_desc->direction());
+ // Remember the previous remote streams if this is a remote offer. This
+ // makes it possible to rollback modifications to the streams.
+ if (sdp_type == SdpType::kOffer) {
+ transceivers()
+ ->StableState(transceiver_ext)
+ ->SetRemoteStreamIds(transceiver->receiver()->stream_ids());
+ }
+ // Roughly the same as steps 2.2.8.6 of section 4.4.1.6 "Set the
+ // RTCSessionDescription: Set the associated remote streams given
+ // transceiver.[[Receiver]], msids, addList, and removeList".
+ // https://w3c.github.io/webrtc-pc/#set-the-rtcsessiondescription
+ if (RtpTransceiverDirectionHasRecv(local_direction)) {
+ std::vector<std::string> stream_ids;
+ if (!media_desc->streams().empty()) {
+ // The remote description has signaled the stream IDs.
+ stream_ids = media_desc->streams()[0].stream_ids();
+ }
+
+ RTC_LOG(LS_INFO) << "Processing the MSIDs for MID=" << content->name
+ << " (" << GetStreamIdsString(stream_ids) << ").";
+ SetAssociatedRemoteStreams(transceiver->receiver_internal(), stream_ids,
+ &added_streams, &removed_streams);
+ // From the WebRTC specification, steps 2.2.8.5/6 of section 4.4.1.6
+ // "Set the RTCSessionDescription: If direction is sendrecv or recvonly,
+ // and transceiver's current direction is neither sendrecv nor recvonly,
+ // process the addition of a remote track for the media description.
+ if (!transceiver->fired_direction() ||
+ !RtpTransceiverDirectionHasRecv(*transceiver->fired_direction())) {
+ RTC_LOG(LS_INFO) << "Processing the addition of a remote track for MID="
+ << content->name << ".";
+ // Since the transceiver is passed to the user in an
+ // OnTrack event, we must use the proxied transceiver.
+ now_receiving_transceivers.push_back(transceiver_ext);
+ }
+ }
+ // 2.2.8.1.9: If direction is "sendonly" or "inactive", and transceiver's
+ // [[FiredDirection]] slot is either "sendrecv" or "recvonly", process the
+ // removal of a remote track for the media description, given transceiver,
+ // removeList, and muteTracks.
+ if (!RtpTransceiverDirectionHasRecv(local_direction) &&
+ (transceiver->fired_direction() &&
+ RtpTransceiverDirectionHasRecv(*transceiver->fired_direction()))) {
+ ProcessRemovalOfRemoteTrack(transceiver_ext, &remove_list,
+ &removed_streams);
+ }
+ // 2.2.8.1.10: Set transceiver's [[FiredDirection]] slot to direction.
+ if (sdp_type == SdpType::kOffer) {
+ // Remember the previous fired direction if this is a remote offer. This
+ // makes it possible to rollback modifications to [[FiredDirection]],
+ // which is necessary for "ontrack" to fire in or after rollback.
+ transceivers()
+ ->StableState(transceiver_ext)
+ ->SetFiredDirection(transceiver->fired_direction());
+ }
+ transceiver->set_fired_direction(local_direction);
+ // 2.2.8.1.11: If description is of type "answer" or "pranswer", then run
+ // the following steps:
+ if (sdp_type == SdpType::kPrAnswer || sdp_type == SdpType::kAnswer) {
+ // 2.2.8.1.11.1: Set transceiver's [[CurrentDirection]] slot to
+ // direction.
+ transceiver->set_current_direction(local_direction);
+ // 2.2.8.1.11.[3-6]: Set the transport internal slots.
+ if (transceiver->mid()) {
+ auto dtls_transport = LookupDtlsTransportByMid(
+ context_->network_thread(), transport_controller_s(),
+ *transceiver->mid());
+ transceiver->sender_internal()->set_transport(dtls_transport);
+ transceiver->receiver_internal()->set_transport(dtls_transport);
+ }
+ }
+ // 2.2.8.1.12: If the media description is rejected, and transceiver is
+ // not already stopped, stop the RTCRtpTransceiver transceiver.
+ if (content->rejected && !transceiver->stopped()) {
+ RTC_LOG(LS_INFO) << "Stopping transceiver for MID=" << content->name
+ << " since the media section was rejected.";
+ transceiver->StopTransceiverProcedure();
+ }
+ if (!content->rejected && RtpTransceiverDirectionHasRecv(local_direction)) {
+ if (!media_desc->streams().empty() &&
+ media_desc->streams()[0].has_ssrcs()) {
+ uint32_t ssrc = media_desc->streams()[0].first_ssrc();
+ transceiver->receiver_internal()->SetupMediaChannel(ssrc);
+ } else {
+ transceiver->receiver_internal()->SetupUnsignaledMediaChannel();
+ }
+ }
+ }
+ // Once all processing has finished, fire off callbacks.
+ auto observer = pc_->Observer();
+ for (const auto& transceiver : now_receiving_transceivers) {
+ pc_->legacy_stats()->AddTrack(transceiver->receiver()->track().get());
+ observer->OnTrack(transceiver);
+ observer->OnAddTrack(transceiver->receiver(),
+ transceiver->receiver()->streams());
+ }
+ for (const auto& stream : added_streams) {
+ observer->OnAddStream(stream);
+ }
+ for (const auto& transceiver : remove_list) {
+ observer->OnRemoveTrack(transceiver->receiver());
+ }
+ for (const auto& stream : removed_streams) {
+ observer->OnRemoveStream(stream);
+ }
+}
+
+void SdpOfferAnswerHandler::PlanBUpdateSendersAndReceivers(
+ const cricket::ContentInfo* audio_content,
+ const cricket::AudioContentDescription* audio_desc,
+ const cricket::ContentInfo* video_content,
+ const cricket::VideoContentDescription* video_desc) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ RTC_DCHECK(!IsUnifiedPlan());
+
+ // We wait to signal new streams until we finish processing the description,
+ // since only at that point will new streams have all their tracks.
+ rtc::scoped_refptr<StreamCollection> new_streams(StreamCollection::Create());
+
+ // TODO(steveanton): When removing RTP senders/receivers in response to a
+ // rejected media section, there is some cleanup logic that expects the
+ // voice/ video channel to still be set. But in this method the voice/video
+ // channel would have been destroyed by the SetRemoteDescription caller
+ // above so the cleanup that relies on them fails to run. The RemoveSenders
+ // calls should be moved to right before the DestroyChannel calls to fix
+ // this.
+
+ // Find all audio rtp streams and create corresponding remote AudioTracks
+ // and MediaStreams.
+ if (audio_content) {
+ if (audio_content->rejected) {
+ RemoveSenders(cricket::MEDIA_TYPE_AUDIO);
+ } else {
+ bool default_audio_track_needed =
+ !remote_peer_supports_msid_ &&
+ RtpTransceiverDirectionHasSend(audio_desc->direction());
+ UpdateRemoteSendersList(GetActiveStreams(audio_desc),
+ default_audio_track_needed, audio_desc->type(),
+ new_streams.get());
+ }
+ }
+
+ // Find all video rtp streams and create corresponding remote VideoTracks
+ // and MediaStreams.
+ if (video_content) {
+ if (video_content->rejected) {
+ RemoveSenders(cricket::MEDIA_TYPE_VIDEO);
+ } else {
+ bool default_video_track_needed =
+ !remote_peer_supports_msid_ &&
+ RtpTransceiverDirectionHasSend(video_desc->direction());
+ UpdateRemoteSendersList(GetActiveStreams(video_desc),
+ default_video_track_needed, video_desc->type(),
+ new_streams.get());
+ }
+ }
+
+ // Iterate new_streams and notify the observer about new MediaStreams.
+ auto observer = pc_->Observer();
+ for (size_t i = 0; i < new_streams->count(); ++i) {
+ MediaStreamInterface* new_stream = new_streams->at(i);
+ pc_->legacy_stats()->AddStream(new_stream);
+ observer->OnAddStream(rtc::scoped_refptr<MediaStreamInterface>(new_stream));
+ }
+
+ UpdateEndedRemoteMediaStreams();
+}
+
+void SdpOfferAnswerHandler::DoSetLocalDescription(
+ std::unique_ptr<SessionDescriptionInterface> desc,
+ rtc::scoped_refptr<SetLocalDescriptionObserverInterface> observer) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::DoSetLocalDescription");
+
+ if (!observer) {
+ RTC_LOG(LS_ERROR) << "SetLocalDescription - observer is NULL.";
+ return;
+ }
+
+ if (!desc) {
+ observer->OnSetLocalDescriptionComplete(
+ RTCError(RTCErrorType::INTERNAL_ERROR, "SessionDescription is NULL."));
+ return;
+ }
+
+ // If a session error has occurred the PeerConnection is in a possibly
+ // inconsistent state so fail right away.
+ if (session_error() != SessionError::kNone) {
+ std::string error_message = GetSessionErrorMsg();
+ RTC_LOG(LS_ERROR) << "SetLocalDescription: " << error_message;
+ observer->OnSetLocalDescriptionComplete(
+ RTCError(RTCErrorType::INTERNAL_ERROR, std::move(error_message)));
+ return;
+ }
+
+ // For SLD we support only explicit rollback.
+ if (desc->GetType() == SdpType::kRollback) {
+ if (IsUnifiedPlan()) {
+ observer->OnSetLocalDescriptionComplete(Rollback(desc->GetType()));
+ } else {
+ observer->OnSetLocalDescriptionComplete(
+ RTCError(RTCErrorType::UNSUPPORTED_OPERATION,
+ "Rollback not supported in Plan B"));
+ }
+ return;
+ }
+
+ std::map<std::string, const cricket::ContentGroup*> bundle_groups_by_mid =
+ GetBundleGroupsByMid(desc->description());
+ RTCError error = ValidateSessionDescription(desc.get(), cricket::CS_LOCAL,
+ bundle_groups_by_mid);
+ if (!error.ok()) {
+ std::string error_message = GetSetDescriptionErrorMessage(
+ cricket::CS_LOCAL, desc->GetType(), error);
+ RTC_LOG(LS_ERROR) << error_message;
+ observer->OnSetLocalDescriptionComplete(
+ RTCError(error.type(), std::move(error_message)));
+ return;
+ }
+
+ // Grab the description type before moving ownership to ApplyLocalDescription,
+ // which may destroy it before returning.
+ const SdpType type = desc->GetType();
+
+ error = ApplyLocalDescription(std::move(desc), bundle_groups_by_mid);
+ // `desc` may be destroyed at this point.
+
+ if (!error.ok()) {
+ // If ApplyLocalDescription fails, the PeerConnection could be in an
+ // inconsistent state, so act conservatively here and set the session error
+ // so that future calls to SetLocalDescription/SetRemoteDescription fail.
+ SetSessionError(SessionError::kContent, error.message());
+ std::string error_message =
+ GetSetDescriptionErrorMessage(cricket::CS_LOCAL, type, error);
+ RTC_LOG(LS_ERROR) << error_message;
+ observer->OnSetLocalDescriptionComplete(
+ RTCError(RTCErrorType::INTERNAL_ERROR, std::move(error_message)));
+ return;
+ }
+ RTC_DCHECK(local_description());
+
+ if (local_description()->GetType() == SdpType::kAnswer) {
+ RemoveStoppedTransceivers();
+
+ // TODO(deadbeef): We already had to hop to the network thread for
+ // MaybeStartGathering...
+ context_->network_thread()->BlockingCall(
+ [this] { port_allocator()->DiscardCandidatePool(); });
+ }
+
+ observer->OnSetLocalDescriptionComplete(RTCError::OK());
+ pc_->NoteUsageEvent(UsageEvent::SET_LOCAL_DESCRIPTION_SUCCEEDED);
+
+ // Check if negotiation is needed. We must do this after informing the
+ // observer that SetLocalDescription() has completed to ensure negotiation is
+ // not needed prior to the promise resolving.
+ if (IsUnifiedPlan()) {
+ bool was_negotiation_needed = is_negotiation_needed_;
+ UpdateNegotiationNeeded();
+ if (signaling_state() == PeerConnectionInterface::kStable &&
+ was_negotiation_needed && is_negotiation_needed_) {
+ // Legacy version.
+ pc_->Observer()->OnRenegotiationNeeded();
+ // Spec-compliant version; the event may get invalidated before firing.
+ GenerateNegotiationNeededEvent();
+ }
+ }
+
+ // MaybeStartGathering needs to be called after informing the observer so that
+ // we don't signal any candidates before signaling that SetLocalDescription
+ // completed.
+ transport_controller_s()->MaybeStartGathering();
+}
+
+void SdpOfferAnswerHandler::DoCreateOffer(
+ const PeerConnectionInterface::RTCOfferAnswerOptions& options,
+ rtc::scoped_refptr<CreateSessionDescriptionObserver> observer) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::DoCreateOffer");
+
+ if (!observer) {
+ RTC_LOG(LS_ERROR) << "CreateOffer - observer is NULL.";
+ return;
+ }
+
+ if (pc_->IsClosed()) {
+ std::string error = "CreateOffer called when PeerConnection is closed.";
+ RTC_LOG(LS_ERROR) << error;
+ pc_->message_handler()->PostCreateSessionDescriptionFailure(
+ observer.get(),
+ RTCError(RTCErrorType::INVALID_STATE, std::move(error)));
+ return;
+ }
+
+ // If a session error has occurred the PeerConnection is in a possibly
+ // inconsistent state so fail right away.
+ if (session_error() != SessionError::kNone) {
+ std::string error_message = GetSessionErrorMsg();
+ RTC_LOG(LS_ERROR) << "CreateOffer: " << error_message;
+ pc_->message_handler()->PostCreateSessionDescriptionFailure(
+ observer.get(),
+ RTCError(RTCErrorType::INTERNAL_ERROR, std::move(error_message)));
+ return;
+ }
+
+ if (!ValidateOfferAnswerOptions(options)) {
+ std::string error = "CreateOffer called with invalid options.";
+ RTC_LOG(LS_ERROR) << error;
+ pc_->message_handler()->PostCreateSessionDescriptionFailure(
+ observer.get(),
+ RTCError(RTCErrorType::INVALID_PARAMETER, std::move(error)));
+ return;
+ }
+
+ // Legacy handling for offer_to_receive_audio and offer_to_receive_video.
+ // Specified in WebRTC section 4.4.3.2 "Legacy configuration extensions".
+ if (IsUnifiedPlan()) {
+ RTCError error = HandleLegacyOfferOptions(options);
+ if (!error.ok()) {
+ pc_->message_handler()->PostCreateSessionDescriptionFailure(
+ observer.get(), std::move(error));
+ return;
+ }
+ }
+
+ cricket::MediaSessionOptions session_options;
+ GetOptionsForOffer(options, &session_options);
+ webrtc_session_desc_factory_->CreateOffer(observer.get(), options,
+ session_options);
+}
+
+void SdpOfferAnswerHandler::CreateAnswer(
+ CreateSessionDescriptionObserver* observer,
+ const PeerConnectionInterface::RTCOfferAnswerOptions& options) {
+ TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::CreateAnswer");
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ // Chain this operation. If asynchronous operations are pending on the chain,
+ // this operation will be queued to be invoked, otherwise the contents of the
+ // lambda will execute immediately.
+ operations_chain_->ChainOperation(
+ [this_weak_ptr = weak_ptr_factory_.GetWeakPtr(),
+ observer_refptr =
+ rtc::scoped_refptr<CreateSessionDescriptionObserver>(observer),
+ options](std::function<void()> operations_chain_callback) {
+ // Abort early if `this_weak_ptr` is no longer valid.
+ if (!this_weak_ptr) {
+ observer_refptr->OnFailure(RTCError(
+ RTCErrorType::INTERNAL_ERROR,
+ "CreateAnswer failed because the session was shut down"));
+ operations_chain_callback();
+ return;
+ }
+ // The operation completes asynchronously when the wrapper is invoked.
+ auto observer_wrapper = rtc::make_ref_counted<
+ CreateSessionDescriptionObserverOperationWrapper>(
+ std::move(observer_refptr), std::move(operations_chain_callback));
+ this_weak_ptr->DoCreateAnswer(options, observer_wrapper);
+ });
+}
+
+void SdpOfferAnswerHandler::DoCreateAnswer(
+ const PeerConnectionInterface::RTCOfferAnswerOptions& options,
+ rtc::scoped_refptr<CreateSessionDescriptionObserver> observer) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::DoCreateAnswer");
+ if (!observer) {
+ RTC_LOG(LS_ERROR) << "CreateAnswer - observer is NULL.";
+ return;
+ }
+
+ // If a session error has occurred the PeerConnection is in a possibly
+ // inconsistent state so fail right away.
+ if (session_error() != SessionError::kNone) {
+ std::string error_message = GetSessionErrorMsg();
+ RTC_LOG(LS_ERROR) << "CreateAnswer: " << error_message;
+ pc_->message_handler()->PostCreateSessionDescriptionFailure(
+ observer.get(),
+ RTCError(RTCErrorType::INTERNAL_ERROR, std::move(error_message)));
+ return;
+ }
+
+ if (!(signaling_state_ == PeerConnectionInterface::kHaveRemoteOffer ||
+ signaling_state_ == PeerConnectionInterface::kHaveLocalPrAnswer)) {
+ std::string error =
+ "PeerConnection cannot create an answer in a state other than "
+ "have-remote-offer or have-local-pranswer.";
+ RTC_LOG(LS_ERROR) << error;
+ pc_->message_handler()->PostCreateSessionDescriptionFailure(
+ observer.get(),
+ RTCError(RTCErrorType::INVALID_STATE, std::move(error)));
+ return;
+ }
+
+ // The remote description should be set if we're in the right state.
+ RTC_DCHECK(remote_description());
+
+ if (IsUnifiedPlan()) {
+ if (options.offer_to_receive_audio !=
+ PeerConnectionInterface::RTCOfferAnswerOptions::kUndefined) {
+ RTC_LOG(LS_WARNING) << "CreateAnswer: offer_to_receive_audio is not "
+ "supported with Unified Plan semantics. Use the "
+ "RtpTransceiver API instead.";
+ }
+ if (options.offer_to_receive_video !=
+ PeerConnectionInterface::RTCOfferAnswerOptions::kUndefined) {
+ RTC_LOG(LS_WARNING) << "CreateAnswer: offer_to_receive_video is not "
+ "supported with Unified Plan semantics. Use the "
+ "RtpTransceiver API instead.";
+ }
+ }
+
+ cricket::MediaSessionOptions session_options;
+ GetOptionsForAnswer(options, &session_options);
+ webrtc_session_desc_factory_->CreateAnswer(observer.get(), session_options);
+}
+
+void SdpOfferAnswerHandler::DoSetRemoteDescription(
+ std::unique_ptr<RemoteDescriptionOperation> operation) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::DoSetRemoteDescription");
+
+ if (!operation->ok())
+ return;
+
+ if (operation->HaveSessionError())
+ return;
+
+ if (operation->MaybeRollback())
+ return;
+
+ operation->ReportOfferAnswerUma();
+
+ // Handle remote descriptions missing a=mid lines for interop with legacy
+ // end points.
+ FillInMissingRemoteMids(operation->description());
+ if (!operation->IsDescriptionValid())
+ return;
+
+ ApplyRemoteDescription(std::move(operation));
+}
+
+// Called after a DoSetRemoteDescription operation completes.
+void SdpOfferAnswerHandler::SetRemoteDescriptionPostProcess(bool was_answer) {
+ RTC_DCHECK(remote_description());
+
+ if (was_answer) {
+ // TODO(deadbeef): We already had to hop to the network thread for
+ // MaybeStartGathering...
+ context_->network_thread()->BlockingCall(
+ [this] { port_allocator()->DiscardCandidatePool(); });
+ }
+
+ pc_->NoteUsageEvent(UsageEvent::SET_REMOTE_DESCRIPTION_SUCCEEDED);
+
+ // Check if negotiation is needed. We must do this after informing the
+ // observer that SetRemoteDescription() has completed to ensure negotiation
+ // is not needed prior to the promise resolving.
+ if (IsUnifiedPlan()) {
+ bool was_negotiation_needed = is_negotiation_needed_;
+ UpdateNegotiationNeeded();
+ if (signaling_state() == PeerConnectionInterface::kStable &&
+ was_negotiation_needed && is_negotiation_needed_) {
+ // Legacy version.
+ pc_->Observer()->OnRenegotiationNeeded();
+ // Spec-compliant version; the event may get invalidated before firing.
+ GenerateNegotiationNeededEvent();
+ }
+ }
+}
+
+void SdpOfferAnswerHandler::SetAssociatedRemoteStreams(
+ rtc::scoped_refptr<RtpReceiverInternal> receiver,
+ const std::vector<std::string>& stream_ids,
+ std::vector<rtc::scoped_refptr<MediaStreamInterface>>* added_streams,
+ std::vector<rtc::scoped_refptr<MediaStreamInterface>>* removed_streams) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ std::vector<rtc::scoped_refptr<MediaStreamInterface>> media_streams;
+ for (const std::string& stream_id : stream_ids) {
+ rtc::scoped_refptr<MediaStreamInterface> stream(
+ remote_streams_->find(stream_id));
+ if (!stream) {
+ stream = MediaStreamProxy::Create(rtc::Thread::Current(),
+ MediaStream::Create(stream_id));
+ remote_streams_->AddStream(stream);
+ added_streams->push_back(stream);
+ }
+ media_streams.push_back(stream);
+ }
+ // Special case: "a=msid" missing, use random stream ID.
+ if (media_streams.empty() &&
+ !(remote_description()->description()->msid_signaling() &
+ cricket::kMsidSignalingMediaSection)) {
+ if (!missing_msid_default_stream_) {
+ missing_msid_default_stream_ = MediaStreamProxy::Create(
+ rtc::Thread::Current(), MediaStream::Create(rtc::CreateRandomUuid()));
+ added_streams->push_back(missing_msid_default_stream_);
+ }
+ media_streams.push_back(missing_msid_default_stream_);
+ }
+ std::vector<rtc::scoped_refptr<MediaStreamInterface>> previous_streams =
+ receiver->streams();
+ // SetStreams() will add/remove the receiver's track to/from the streams.
+ // This differs from the spec - the spec uses an "addList" and "removeList"
+ // to update the stream-track relationships in a later step. We do this
+ // earlier, changing the order of things, but the end-result is the same.
+ // TODO(hbos): When we remove remote_streams(), use set_stream_ids()
+ // instead. https://crbug.com/webrtc/9480
+ receiver->SetStreams(media_streams);
+ RemoveRemoteStreamsIfEmpty(previous_streams, removed_streams);
+}
+
+bool SdpOfferAnswerHandler::AddIceCandidate(
+ const IceCandidateInterface* ice_candidate) {
+ const AddIceCandidateResult result = AddIceCandidateInternal(ice_candidate);
+ NoteAddIceCandidateResult(result);
+ // If the return value is kAddIceCandidateFailNotReady, the candidate has
+ // been added, although not 'ready', but that's a success.
+ return result == kAddIceCandidateSuccess ||
+ result == kAddIceCandidateFailNotReady;
+}
+
+AddIceCandidateResult SdpOfferAnswerHandler::AddIceCandidateInternal(
+ const IceCandidateInterface* ice_candidate) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::AddIceCandidate");
+ if (pc_->IsClosed()) {
+ RTC_LOG(LS_ERROR) << "AddIceCandidate: PeerConnection is closed.";
+ return kAddIceCandidateFailClosed;
+ }
+
+ if (!remote_description()) {
+ RTC_LOG(LS_ERROR) << "AddIceCandidate: ICE candidates can't be added "
+ "without any remote session description.";
+ return kAddIceCandidateFailNoRemoteDescription;
+ }
+
+ if (!ice_candidate) {
+ RTC_LOG(LS_ERROR) << "AddIceCandidate: Candidate is null.";
+ return kAddIceCandidateFailNullCandidate;
+ }
+
+ bool valid = false;
+ bool ready = ReadyToUseRemoteCandidate(ice_candidate, nullptr, &valid);
+ if (!valid) {
+ return kAddIceCandidateFailNotValid;
+ }
+
+ // Add this candidate to the remote session description.
+ if (!mutable_remote_description()->AddCandidate(ice_candidate)) {
+ RTC_LOG(LS_ERROR) << "AddIceCandidate: Candidate cannot be used.";
+ return kAddIceCandidateFailInAddition;
+ }
+
+ if (!ready) {
+ RTC_LOG(LS_INFO) << "AddIceCandidate: Not ready to use candidate.";
+ return kAddIceCandidateFailNotReady;
+ }
+
+ if (!UseCandidate(ice_candidate)) {
+ return kAddIceCandidateFailNotUsable;
+ }
+
+ pc_->NoteUsageEvent(UsageEvent::ADD_ICE_CANDIDATE_SUCCEEDED);
+
+ return kAddIceCandidateSuccess;
+}
+
+void SdpOfferAnswerHandler::AddIceCandidate(
+ std::unique_ptr<IceCandidateInterface> candidate,
+ std::function<void(RTCError)> callback) {
+ TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::AddIceCandidate");
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ // Chain this operation. If asynchronous operations are pending on the
+ // chain, this operation will be queued to be invoked, otherwise the
+ // contents of the lambda will execute immediately.
+ operations_chain_->ChainOperation(
+ [this_weak_ptr = weak_ptr_factory_.GetWeakPtr(),
+ candidate = std::move(candidate), callback = std::move(callback)](
+ std::function<void()> operations_chain_callback) {
+ auto result =
+ this_weak_ptr
+ ? this_weak_ptr->AddIceCandidateInternal(candidate.get())
+ : kAddIceCandidateFailClosed;
+ NoteAddIceCandidateResult(result);
+ operations_chain_callback();
+ switch (result) {
+ case AddIceCandidateResult::kAddIceCandidateSuccess:
+ case AddIceCandidateResult::kAddIceCandidateFailNotReady:
+ // Success!
+ callback(RTCError::OK());
+ break;
+ case AddIceCandidateResult::kAddIceCandidateFailClosed:
+ // Note that the spec says to just abort without resolving the
+ // promise in this case, but this layer must return an RTCError.
+ callback(RTCError(
+ RTCErrorType::INVALID_STATE,
+ "AddIceCandidate failed because the session was shut down"));
+ break;
+ case AddIceCandidateResult::kAddIceCandidateFailNoRemoteDescription:
+ // Spec: "If remoteDescription is null return a promise rejected
+ // with a newly created InvalidStateError."
+ callback(RTCError(RTCErrorType::INVALID_STATE,
+ "The remote description was null"));
+ break;
+ case AddIceCandidateResult::kAddIceCandidateFailNullCandidate:
+ // TODO(https://crbug.com/935898): Handle end-of-candidates instead
+ // of treating null candidate as an error.
+ callback(RTCError(RTCErrorType::UNSUPPORTED_OPERATION,
+ "Error processing ICE candidate"));
+ break;
+ case AddIceCandidateResult::kAddIceCandidateFailNotValid:
+ case AddIceCandidateResult::kAddIceCandidateFailInAddition:
+ case AddIceCandidateResult::kAddIceCandidateFailNotUsable:
+ // Spec: "If candidate could not be successfully added [...] Reject
+ // p with a newly created OperationError and abort these steps."
+ // UNSUPPORTED_OPERATION maps to OperationError.
+ callback(RTCError(RTCErrorType::UNSUPPORTED_OPERATION,
+ "Error processing ICE candidate"));
+ break;
+ default:
+ RTC_DCHECK_NOTREACHED();
+ }
+ });
+}
+
+bool SdpOfferAnswerHandler::RemoveIceCandidates(
+ const std::vector<cricket::Candidate>& candidates) {
+ TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::RemoveIceCandidates");
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ if (pc_->IsClosed()) {
+ RTC_LOG(LS_ERROR) << "RemoveIceCandidates: PeerConnection is closed.";
+ return false;
+ }
+
+ if (!remote_description()) {
+ RTC_LOG(LS_ERROR) << "RemoveIceCandidates: ICE candidates can't be removed "
+ "without any remote session description.";
+ return false;
+ }
+
+ if (candidates.empty()) {
+ RTC_LOG(LS_ERROR) << "RemoveIceCandidates: candidates are empty.";
+ return false;
+ }
+
+ size_t number_removed =
+ mutable_remote_description()->RemoveCandidates(candidates);
+ if (number_removed != candidates.size()) {
+ RTC_LOG(LS_ERROR)
+ << "RemoveIceCandidates: Failed to remove candidates. Requested "
+ << candidates.size() << " but only " << number_removed
+ << " are removed.";
+ }
+
+ // Remove the candidates from the transport controller.
+ RTCError error = transport_controller_s()->RemoveRemoteCandidates(candidates);
+ if (!error.ok()) {
+ RTC_LOG(LS_ERROR)
+ << "RemoveIceCandidates: Error when removing remote candidates: "
+ << error.message();
+ }
+ return true;
+}
+
+void SdpOfferAnswerHandler::AddLocalIceCandidate(
+ const JsepIceCandidate* candidate) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ if (local_description()) {
+ mutable_local_description()->AddCandidate(candidate);
+ }
+}
+
+void SdpOfferAnswerHandler::RemoveLocalIceCandidates(
+ const std::vector<cricket::Candidate>& candidates) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ if (local_description()) {
+ mutable_local_description()->RemoveCandidates(candidates);
+ }
+}
+
+const SessionDescriptionInterface* SdpOfferAnswerHandler::local_description()
+ const {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ return pending_local_description_ ? pending_local_description_.get()
+ : current_local_description_.get();
+}
+
+const SessionDescriptionInterface* SdpOfferAnswerHandler::remote_description()
+ const {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ return pending_remote_description_ ? pending_remote_description_.get()
+ : current_remote_description_.get();
+}
+
+const SessionDescriptionInterface*
+SdpOfferAnswerHandler::current_local_description() const {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ return current_local_description_.get();
+}
+
+const SessionDescriptionInterface*
+SdpOfferAnswerHandler::current_remote_description() const {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ return current_remote_description_.get();
+}
+
+const SessionDescriptionInterface*
+SdpOfferAnswerHandler::pending_local_description() const {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ return pending_local_description_.get();
+}
+
+const SessionDescriptionInterface*
+SdpOfferAnswerHandler::pending_remote_description() const {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ return pending_remote_description_.get();
+}
+
+PeerConnectionInterface::SignalingState SdpOfferAnswerHandler::signaling_state()
+ const {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ return signaling_state_;
+}
+
+void SdpOfferAnswerHandler::ChangeSignalingState(
+ PeerConnectionInterface::SignalingState signaling_state) {
+ TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::ChangeSignalingState");
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ if (signaling_state_ == signaling_state) {
+ return;
+ }
+ RTC_LOG(LS_INFO) << "Session: " << pc_->session_id() << " Old state: "
+ << PeerConnectionInterface::AsString(signaling_state_)
+ << " New state: "
+ << PeerConnectionInterface::AsString(signaling_state);
+ signaling_state_ = signaling_state;
+ pc_->Observer()->OnSignalingChange(signaling_state_);
+}
+
+RTCError SdpOfferAnswerHandler::UpdateSessionState(
+ SdpType type,
+ cricket::ContentSource source,
+ const cricket::SessionDescription* description,
+ const std::map<std::string, const cricket::ContentGroup*>&
+ bundle_groups_by_mid) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+
+ // If there's already a pending error then no state transition should
+ // happen. But all call-sites should be verifying this before calling us!
+ RTC_DCHECK(session_error() == SessionError::kNone);
+
+ // If this is answer-ish we're ready to let media flow.
+ if (type == SdpType::kPrAnswer || type == SdpType::kAnswer) {
+ EnableSending();
+ }
+
+ // Update the signaling state according to the specified state machine (see
+ // https://w3c.github.io/webrtc-pc/#rtcsignalingstate-enum).
+ if (type == SdpType::kOffer) {
+ ChangeSignalingState(source == cricket::CS_LOCAL
+ ? PeerConnectionInterface::kHaveLocalOffer
+ : PeerConnectionInterface::kHaveRemoteOffer);
+ } else if (type == SdpType::kPrAnswer) {
+ ChangeSignalingState(source == cricket::CS_LOCAL
+ ? PeerConnectionInterface::kHaveLocalPrAnswer
+ : PeerConnectionInterface::kHaveRemotePrAnswer);
+ } else {
+ RTC_DCHECK(type == SdpType::kAnswer);
+ ChangeSignalingState(PeerConnectionInterface::kStable);
+ if (ConfiguredForMedia()) {
+ transceivers()->DiscardStableStates();
+ }
+ }
+
+ // Update internal objects according to the session description's media
+ // descriptions.
+ return PushdownMediaDescription(type, source, bundle_groups_by_mid);
+}
+
+bool SdpOfferAnswerHandler::ShouldFireNegotiationNeededEvent(
+ uint32_t event_id) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ // Plan B? Always fire to conform with useless legacy behavior.
+ if (!IsUnifiedPlan()) {
+ return true;
+ }
+ // The event ID has been invalidated. Either negotiation is no longer needed
+ // or a newer negotiation needed event has been generated.
+ if (event_id != negotiation_needed_event_id_) {
+ return false;
+ }
+ // The chain is no longer empty, update negotiation needed when it becomes
+ // empty. This should generate a newer negotiation needed event, making this
+ // one obsolete.
+ if (!operations_chain_->IsEmpty()) {
+ // Since we just suppressed an event that would have been fired, if
+ // negotiation is still needed by the time the chain becomes empty again,
+ // we must make sure to generate another event if negotiation is needed
+ // then. This happens when `is_negotiation_needed_` goes from false to
+ // true, so we set it to false until UpdateNegotiationNeeded() is called.
+ is_negotiation_needed_ = false;
+ update_negotiation_needed_on_empty_chain_ = true;
+ return false;
+ }
+ // We must not fire if the signaling state is no longer "stable". If
+ // negotiation is still needed when we return to "stable", a new negotiation
+ // needed event will be generated, so this one can safely be suppressed.
+ if (signaling_state_ != PeerConnectionInterface::kStable) {
+ return false;
+ }
+ // All checks have passed - please fire "negotiationneeded" now!
+ return true;
+}
+
+rtc::scoped_refptr<StreamCollectionInterface>
+SdpOfferAnswerHandler::local_streams() {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ RTC_CHECK(!IsUnifiedPlan()) << "local_streams is not available with Unified "
+ "Plan SdpSemantics. Please use GetSenders "
+ "instead.";
+ return local_streams_;
+}
+
+rtc::scoped_refptr<StreamCollectionInterface>
+SdpOfferAnswerHandler::remote_streams() {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ RTC_CHECK(!IsUnifiedPlan()) << "remote_streams is not available with Unified "
+ "Plan SdpSemantics. Please use GetReceivers "
+ "instead.";
+ return remote_streams_;
+}
+
+bool SdpOfferAnswerHandler::AddStream(MediaStreamInterface* local_stream) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ RTC_CHECK(!IsUnifiedPlan()) << "AddStream is not available with Unified Plan "
+ "SdpSemantics. Please use AddTrack instead.";
+ if (pc_->IsClosed()) {
+ return false;
+ }
+ if (!CanAddLocalMediaStream(local_streams_.get(), local_stream)) {
+ return false;
+ }
+
+ local_streams_->AddStream(
+ rtc::scoped_refptr<MediaStreamInterface>(local_stream));
+ auto observer = std::make_unique<MediaStreamObserver>(
+ local_stream,
+ [this](AudioTrackInterface* audio_track,
+ MediaStreamInterface* media_stream) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ OnAudioTrackAdded(audio_track, media_stream);
+ },
+ [this](AudioTrackInterface* audio_track,
+ MediaStreamInterface* media_stream) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ OnAudioTrackRemoved(audio_track, media_stream);
+ },
+ [this](VideoTrackInterface* video_track,
+ MediaStreamInterface* media_stream) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ OnVideoTrackAdded(video_track, media_stream);
+ },
+ [this](VideoTrackInterface* video_track,
+ MediaStreamInterface* media_stream) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ OnVideoTrackRemoved(video_track, media_stream);
+ });
+ stream_observers_.push_back(std::move(observer));
+
+ for (const auto& track : local_stream->GetAudioTracks()) {
+ rtp_manager()->AddAudioTrack(track.get(), local_stream);
+ }
+ for (const auto& track : local_stream->GetVideoTracks()) {
+ rtp_manager()->AddVideoTrack(track.get(), local_stream);
+ }
+
+ pc_->legacy_stats()->AddStream(local_stream);
+ UpdateNegotiationNeeded();
+ return true;
+}
+
+void SdpOfferAnswerHandler::RemoveStream(MediaStreamInterface* local_stream) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ RTC_CHECK(!IsUnifiedPlan()) << "RemoveStream is not available with Unified "
+ "Plan SdpSemantics. Please use RemoveTrack "
+ "instead.";
+ TRACE_EVENT0("webrtc", "PeerConnection::RemoveStream");
+ if (!pc_->IsClosed()) {
+ for (const auto& track : local_stream->GetAudioTracks()) {
+ rtp_manager()->RemoveAudioTrack(track.get(), local_stream);
+ }
+ for (const auto& track : local_stream->GetVideoTracks()) {
+ rtp_manager()->RemoveVideoTrack(track.get(), local_stream);
+ }
+ }
+ local_streams_->RemoveStream(local_stream);
+ stream_observers_.erase(
+ std::remove_if(
+ stream_observers_.begin(), stream_observers_.end(),
+ [local_stream](const std::unique_ptr<MediaStreamObserver>& observer) {
+ return observer->stream()->id().compare(local_stream->id()) == 0;
+ }),
+ stream_observers_.end());
+
+ if (pc_->IsClosed()) {
+ return;
+ }
+ UpdateNegotiationNeeded();
+}
+
+void SdpOfferAnswerHandler::OnAudioTrackAdded(AudioTrackInterface* track,
+ MediaStreamInterface* stream) {
+ if (pc_->IsClosed()) {
+ return;
+ }
+ rtp_manager()->AddAudioTrack(track, stream);
+ UpdateNegotiationNeeded();
+}
+
+void SdpOfferAnswerHandler::OnAudioTrackRemoved(AudioTrackInterface* track,
+ MediaStreamInterface* stream) {
+ if (pc_->IsClosed()) {
+ return;
+ }
+ rtp_manager()->RemoveAudioTrack(track, stream);
+ UpdateNegotiationNeeded();
+}
+
+void SdpOfferAnswerHandler::OnVideoTrackAdded(VideoTrackInterface* track,
+ MediaStreamInterface* stream) {
+ if (pc_->IsClosed()) {
+ return;
+ }
+ rtp_manager()->AddVideoTrack(track, stream);
+ UpdateNegotiationNeeded();
+}
+
+void SdpOfferAnswerHandler::OnVideoTrackRemoved(VideoTrackInterface* track,
+ MediaStreamInterface* stream) {
+ if (pc_->IsClosed()) {
+ return;
+ }
+ rtp_manager()->RemoveVideoTrack(track, stream);
+ UpdateNegotiationNeeded();
+}
+
+RTCError SdpOfferAnswerHandler::Rollback(SdpType desc_type) {
+ TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::Rollback");
+ auto state = signaling_state();
+ if (state != PeerConnectionInterface::kHaveLocalOffer &&
+ state != PeerConnectionInterface::kHaveRemoteOffer) {
+ LOG_AND_RETURN_ERROR(
+ RTCErrorType::INVALID_STATE,
+ (rtc::StringBuilder("Called in wrong signalingState: ")
+ << (PeerConnectionInterface::AsString(signaling_state())))
+ .Release());
+ }
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ RTC_DCHECK(IsUnifiedPlan());
+ std::vector<rtc::scoped_refptr<RtpTransceiverInterface>>
+ now_receiving_transceivers;
+ std::vector<rtc::scoped_refptr<MediaStreamInterface>> all_added_streams;
+ std::vector<rtc::scoped_refptr<MediaStreamInterface>> all_removed_streams;
+ std::vector<rtc::scoped_refptr<RtpReceiverInterface>> removed_receivers;
+
+ for (auto&& transceivers_stable_state_pair : transceivers()->StableStates()) {
+ auto transceiver = transceivers_stable_state_pair.first;
+ auto state = transceivers_stable_state_pair.second;
+
+ if (state.did_set_fired_direction()) {
+ // If this rollback triggers going from not receiving to receving again,
+ // we need to fire "ontrack".
+ bool previously_fired_direction_is_recv =
+ transceiver->fired_direction().has_value() &&
+ RtpTransceiverDirectionHasRecv(*transceiver->fired_direction());
+ bool currently_fired_direction_is_recv =
+ state.fired_direction().has_value() &&
+ RtpTransceiverDirectionHasRecv(state.fired_direction().value());
+ if (!previously_fired_direction_is_recv &&
+ currently_fired_direction_is_recv) {
+ now_receiving_transceivers.push_back(transceiver);
+ }
+ transceiver->internal()->set_fired_direction(state.fired_direction());
+ }
+
+ if (state.remote_stream_ids()) {
+ std::vector<rtc::scoped_refptr<MediaStreamInterface>> added_streams;
+ std::vector<rtc::scoped_refptr<MediaStreamInterface>> removed_streams;
+ SetAssociatedRemoteStreams(transceiver->internal()->receiver_internal(),
+ state.remote_stream_ids().value(),
+ &added_streams, &removed_streams);
+ all_added_streams.insert(all_added_streams.end(), added_streams.begin(),
+ added_streams.end());
+ all_removed_streams.insert(all_removed_streams.end(),
+ removed_streams.begin(),
+ removed_streams.end());
+ if (!state.has_m_section() && !state.newly_created()) {
+ continue;
+ }
+ }
+
+ // Due to the above `continue` statement, the below code only runs if there
+ // is a change in mid association (has_m_section), if the transceiver was
+ // newly created (newly_created) or if remote streams were not set.
+
+ RTC_DCHECK(transceiver->internal()->mid().has_value());
+ transceiver->internal()->ClearChannel();
+
+ if (signaling_state() == PeerConnectionInterface::kHaveRemoteOffer &&
+ transceiver->receiver()) {
+ removed_receivers.push_back(transceiver->receiver());
+ }
+ if (state.newly_created()) {
+ if (transceiver->internal()->reused_for_addtrack()) {
+ transceiver->internal()->set_created_by_addtrack(true);
+ } else {
+ transceiver->internal()->StopTransceiverProcedure();
+ transceivers()->Remove(transceiver);
+ }
+ }
+ if (state.init_send_encodings()) {
+ transceiver->internal()->sender_internal()->set_init_send_encodings(
+ state.init_send_encodings().value());
+ }
+ transceiver->internal()->sender_internal()->set_transport(nullptr);
+ transceiver->internal()->receiver_internal()->set_transport(nullptr);
+ if (state.has_m_section()) {
+ transceiver->internal()->set_mid(state.mid());
+ transceiver->internal()->set_mline_index(state.mline_index());
+ }
+ }
+ RTCError e = transport_controller_s()->RollbackTransports();
+ if (!e.ok()) {
+ return e;
+ }
+ transceivers()->DiscardStableStates();
+ pending_local_description_.reset();
+ pending_remote_description_.reset();
+ ChangeSignalingState(PeerConnectionInterface::kStable);
+
+ // Once all processing has finished, fire off callbacks.
+ for (const auto& transceiver : now_receiving_transceivers) {
+ pc_->Observer()->OnTrack(transceiver);
+ pc_->Observer()->OnAddTrack(transceiver->receiver(),
+ transceiver->receiver()->streams());
+ }
+ for (const auto& receiver : removed_receivers) {
+ pc_->Observer()->OnRemoveTrack(receiver);
+ }
+ for (const auto& stream : all_added_streams) {
+ pc_->Observer()->OnAddStream(stream);
+ }
+ for (const auto& stream : all_removed_streams) {
+ pc_->Observer()->OnRemoveStream(stream);
+ }
+
+ // The assumption is that in case of implicit rollback
+ // UpdateNegotiationNeeded gets called in SetRemoteDescription.
+ if (desc_type == SdpType::kRollback) {
+ UpdateNegotiationNeeded();
+ if (is_negotiation_needed_) {
+ // Legacy version.
+ pc_->Observer()->OnRenegotiationNeeded();
+ // Spec-compliant version; the event may get invalidated before firing.
+ GenerateNegotiationNeededEvent();
+ }
+ }
+ return RTCError::OK();
+}
+
+bool SdpOfferAnswerHandler::IsUnifiedPlan() const {
+ return pc_->IsUnifiedPlan();
+}
+
+void SdpOfferAnswerHandler::OnOperationsChainEmpty() {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ if (pc_->IsClosed() || !update_negotiation_needed_on_empty_chain_)
+ return;
+ update_negotiation_needed_on_empty_chain_ = false;
+ // Firing when chain is empty is only supported in Unified Plan to avoid
+ // Plan B regressions. (In Plan B, onnegotiationneeded is already broken
+ // anyway, so firing it even more might just be confusing.)
+ if (IsUnifiedPlan()) {
+ UpdateNegotiationNeeded();
+ }
+}
+
+absl::optional<bool> SdpOfferAnswerHandler::is_caller() const {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ return is_caller_;
+}
+
+bool SdpOfferAnswerHandler::HasNewIceCredentials() {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ return local_ice_credentials_to_replace_->HasIceCredentials();
+}
+
+bool SdpOfferAnswerHandler::IceRestartPending(
+ const std::string& content_name) const {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ return pending_ice_restarts_.find(content_name) !=
+ pending_ice_restarts_.end();
+}
+
+bool SdpOfferAnswerHandler::NeedsIceRestart(
+ const std::string& content_name) const {
+ return pc_->NeedsIceRestart(content_name);
+}
+
+absl::optional<rtc::SSLRole> SdpOfferAnswerHandler::GetDtlsRole(
+ const std::string& mid) const {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ return transport_controller_s()->GetDtlsRole(mid);
+}
+
+void SdpOfferAnswerHandler::UpdateNegotiationNeeded() {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ if (!IsUnifiedPlan()) {
+ pc_->Observer()->OnRenegotiationNeeded();
+ GenerateNegotiationNeededEvent();
+ return;
+ }
+
+ // In the spec, a task is queued here to run the following steps - this is
+ // meant to ensure we do not fire onnegotiationneeded prematurely if
+ // multiple changes are being made at once. In order to support Chromium's
+ // implementation where the JavaScript representation of the PeerConnection
+ // lives on a separate thread though, the queuing of a task is instead
+ // performed by the PeerConnectionObserver posting from the signaling thread
+ // to the JavaScript main thread that negotiation is needed. And because the
+ // Operations Chain lives on the WebRTC signaling thread,
+ // ShouldFireNegotiationNeededEvent() must be called before firing the event
+ // to ensure the Operations Chain is still empty and the event has not been
+ // invalidated.
+
+ // If connection's [[IsClosed]] slot is true, abort these steps.
+ if (pc_->IsClosed())
+ return;
+
+ // If connection's signaling state is not "stable", abort these steps.
+ if (signaling_state() != PeerConnectionInterface::kStable)
+ return;
+
+ // NOTE
+ // The negotiation-needed flag will be updated once the state transitions to
+ // "stable", as part of the steps for setting an RTCSessionDescription.
+
+ // If the result of checking if negotiation is needed is false, clear the
+ // negotiation-needed flag by setting connection's [[NegotiationNeeded]]
+ // slot to false, and abort these steps.
+ bool is_negotiation_needed = CheckIfNegotiationIsNeeded();
+ if (!is_negotiation_needed) {
+ is_negotiation_needed_ = false;
+ // Invalidate any negotiation needed event that may previosuly have been
+ // generated.
+ ++negotiation_needed_event_id_;
+ return;
+ }
+
+ // If connection's [[NegotiationNeeded]] slot is already true, abort these
+ // steps.
+ if (is_negotiation_needed_)
+ return;
+
+ // Set connection's [[NegotiationNeeded]] slot to true.
+ is_negotiation_needed_ = true;
+
+ // Queue a task that runs the following steps:
+ // If connection's [[IsClosed]] slot is true, abort these steps.
+ // If connection's [[NegotiationNeeded]] slot is false, abort these steps.
+ // Fire an event named negotiationneeded at connection.
+ pc_->Observer()->OnRenegotiationNeeded();
+ // Fire the spec-compliant version; when ShouldFireNegotiationNeededEvent()
+ // is used in the task queued by the observer, this event will only fire
+ // when the chain is empty.
+ GenerateNegotiationNeededEvent();
+}
+
+void SdpOfferAnswerHandler::AllocateSctpSids() {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ if (!local_description() || !remote_description()) {
+ RTC_DLOG(LS_VERBOSE)
+ << "Local and Remote descriptions must be applied to get the "
+ "SSL Role of the SCTP transport.";
+ return;
+ }
+
+ absl::optional<rtc::SSLRole> guessed_role = GuessSslRole();
+ network_thread()->BlockingCall(
+ [&, data_channel_controller = data_channel_controller()] {
+ RTC_DCHECK_RUN_ON(network_thread());
+ absl::optional<rtc::SSLRole> role = pc_->GetSctpSslRole_n();
+ if (!role)
+ role = guessed_role;
+ if (role)
+ data_channel_controller->AllocateSctpSids(*role);
+ });
+}
+
+absl::optional<rtc::SSLRole> SdpOfferAnswerHandler::GuessSslRole() const {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ if (!pc_->sctp_mid())
+ return absl::nullopt;
+
+ // TODO(bugs.webrtc.org/13668): This guesswork is guessing wrong (returning
+ // SSL_CLIENT = ACTIVE) if remote offer has role ACTIVE, but we'll be able
+ // to detect that by looking at the SDP.
+ //
+ // The phases of establishing an SCTP session are:
+ //
+ // Offerer:
+ //
+ // * Before negotiation: Neither is_caller nor sctp_mid exists.
+ // * After setting an offer as local description: is_caller is known (true),
+ // sctp_mid is known, but we don't know the SSL role for sure (or if we'll
+ // eventually get an SCTP session).
+ // * After setting an answer as the remote description: We know is_caller,
+ // sctp_mid and that we'll get the SCTP channel established (m-section
+ // wasn't rejected).
+ // * Special case: The SCTP m-section was rejected: Close datachannels.
+ // * We MAY know the SSL role if we offered actpass and got back active or
+ // passive; if the other end is a webrtc implementation, it will be active.
+ // * After the TLS handshake: We have a definitive answer on the SSL role.
+ //
+ // Answerer:
+ //
+ // * After setting an offer as remote description: We know is_caller (false).
+ // * If there was an SCTP session, we know the SCTP mid. We also know the
+ // SSL role, since if the remote offer was actpass or passive, we'll answer
+ // active, and if the remote offer was active, we're passive.
+ // * Special case: No SCTP m= line. We don't know for sure if the remote
+ // doesn't support it or just didn't offer it. Not sure what we do in this
+ // case (logic would suggest fire a `negotiationneeded` event and generate a
+ // subsequent offer, but this needs to be tested).
+ // * After the TLS handshake: We know that TLS obeyed the protocol. There
+ // should be an error surfaced somewhere if it didn't.
+ // * "Guessing" should always be correct if we get an SCTP session and are not
+ // the offerer.
+
+ return is_caller() ? rtc::SSL_SERVER : rtc::SSL_CLIENT;
+}
+
+bool SdpOfferAnswerHandler::CheckIfNegotiationIsNeeded() {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ // 1. If any implementation-specific negotiation is required, as described
+ // at the start of this section, return true.
+
+ // 2. If connection.[[LocalIceCredentialsToReplace]] is not empty, return
+ // true.
+ if (local_ice_credentials_to_replace_->HasIceCredentials()) {
+ return true;
+ }
+
+ // 3. Let description be connection.[[CurrentLocalDescription]].
+ const SessionDescriptionInterface* description = current_local_description();
+ if (!description)
+ return true;
+
+ // 4. If connection has created any RTCDataChannels, and no m= section in
+ // description has been negotiated yet for data, return true.
+ if (data_channel_controller()->HasUsedDataChannels()) {
+ const cricket::ContentInfo* data_content =
+ cricket::GetFirstDataContent(description->description()->contents());
+ if (!data_content) {
+ return true;
+ }
+ // The remote end might have rejected the data content.
+ const cricket::ContentInfo* remote_data_content =
+ current_remote_description()
+ ? current_remote_description()->description()->GetContentByName(
+ data_content->name)
+ : nullptr;
+ if (remote_data_content && remote_data_content->rejected) {
+ return true;
+ }
+ }
+ if (!ConfiguredForMedia()) {
+ return false;
+ }
+
+ // 5. For each transceiver in connection's set of transceivers, perform the
+ // following checks:
+ for (const auto& transceiver : transceivers()->ListInternal()) {
+ const ContentInfo* current_local_msection =
+ FindTransceiverMSection(transceiver, description);
+
+ const ContentInfo* current_remote_msection =
+ FindTransceiverMSection(transceiver, current_remote_description());
+
+ // 5.4 If transceiver is stopped and is associated with an m= section,
+ // but the associated m= section is not yet rejected in
+ // connection.[[CurrentLocalDescription]] or
+ // connection.[[CurrentRemoteDescription]], return true.
+ if (transceiver->stopped()) {
+ RTC_DCHECK(transceiver->stopping());
+ if (current_local_msection && !current_local_msection->rejected &&
+ ((current_remote_msection && !current_remote_msection->rejected) ||
+ !current_remote_msection)) {
+ return true;
+ }
+ continue;
+ }
+
+ // 5.1 If transceiver.[[Stopping]] is true and transceiver.[[Stopped]] is
+ // false, return true.
+ if (transceiver->stopping() && !transceiver->stopped())
+ return true;
+
+ // 5.2 If transceiver isn't stopped and isn't yet associated with an m=
+ // section in description, return true.
+ if (!current_local_msection)
+ return true;
+
+ const MediaContentDescription* current_local_media_description =
+ current_local_msection->media_description();
+ // 5.3 If transceiver isn't stopped and is associated with an m= section
+ // in description then perform the following checks:
+
+ // 5.3.1 If transceiver.[[Direction]] is "sendrecv" or "sendonly", and the
+ // associated m= section in description either doesn't contain a single
+ // "a=msid" line, or the number of MSIDs from the "a=msid" lines in this
+ // m= section, or the MSID values themselves, differ from what is in
+ // transceiver.sender.[[AssociatedMediaStreamIds]], return true.
+ if (RtpTransceiverDirectionHasSend(transceiver->direction())) {
+ if (current_local_media_description->streams().size() == 0)
+ return true;
+
+ std::vector<std::string> msection_msids;
+ for (const auto& stream : current_local_media_description->streams()) {
+ for (const std::string& msid : stream.stream_ids())
+ msection_msids.push_back(msid);
+ }
+
+ std::vector<std::string> transceiver_msids =
+ transceiver->sender()->stream_ids();
+ if (msection_msids.size() != transceiver_msids.size())
+ return true;
+
+ absl::c_sort(transceiver_msids);
+ absl::c_sort(msection_msids);
+ if (transceiver_msids != msection_msids)
+ return true;
+ }
+
+ // 5.3.2 If description is of type "offer", and the direction of the
+ // associated m= section in neither connection.[[CurrentLocalDescription]]
+ // nor connection.[[CurrentRemoteDescription]] matches
+ // transceiver.[[Direction]], return true.
+ if (description->GetType() == SdpType::kOffer) {
+ if (!current_remote_description())
+ return true;
+
+ if (!current_remote_msection)
+ return true;
+
+ RtpTransceiverDirection current_local_direction =
+ current_local_media_description->direction();
+ RtpTransceiverDirection current_remote_direction =
+ current_remote_msection->media_description()->direction();
+ if (transceiver->direction() != current_local_direction &&
+ transceiver->direction() !=
+ RtpTransceiverDirectionReversed(current_remote_direction)) {
+ return true;
+ }
+ }
+
+ // 5.3.3 If description is of type "answer", and the direction of the
+ // associated m= section in the description does not match
+ // transceiver.[[Direction]] intersected with the offered direction (as
+ // described in [JSEP] (section 5.3.1.)), return true.
+ if (description->GetType() == SdpType::kAnswer) {
+ if (!remote_description())
+ return true;
+
+ const ContentInfo* offered_remote_msection =
+ FindTransceiverMSection(transceiver, remote_description());
+
+ RtpTransceiverDirection offered_direction =
+ offered_remote_msection
+ ? offered_remote_msection->media_description()->direction()
+ : RtpTransceiverDirection::kInactive;
+
+ if (current_local_media_description->direction() !=
+ (RtpTransceiverDirectionIntersection(
+ transceiver->direction(),
+ RtpTransceiverDirectionReversed(offered_direction)))) {
+ return true;
+ }
+ }
+ }
+ // If all the preceding checks were performed and true was not returned,
+ // nothing remains to be negotiated; return false.
+ return false;
+}
+
+void SdpOfferAnswerHandler::GenerateNegotiationNeededEvent() {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ ++negotiation_needed_event_id_;
+ pc_->Observer()->OnNegotiationNeededEvent(negotiation_needed_event_id_);
+}
+
+RTCError SdpOfferAnswerHandler::ValidateSessionDescription(
+ const SessionDescriptionInterface* sdesc,
+ cricket::ContentSource source,
+ const std::map<std::string, const cricket::ContentGroup*>&
+ bundle_groups_by_mid) {
+ // An assumption is that a check for session error is done at a higher level.
+ RTC_DCHECK_EQ(SessionError::kNone, session_error());
+
+ if (!sdesc || !sdesc->description()) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, kInvalidSdp);
+ }
+
+ SdpType type = sdesc->GetType();
+ if ((source == cricket::CS_LOCAL && !ExpectSetLocalDescription(type)) ||
+ (source == cricket::CS_REMOTE && !ExpectSetRemoteDescription(type))) {
+ LOG_AND_RETURN_ERROR(
+ RTCErrorType::INVALID_STATE,
+ (rtc::StringBuilder("Called in wrong state: ")
+ << PeerConnectionInterface::AsString(signaling_state()))
+ .Release());
+ }
+
+ RTCError error = ValidateMids(*sdesc->description());
+ if (!error.ok()) {
+ return error;
+ }
+
+ // Verify crypto settings.
+ std::string crypto_error;
+ if (webrtc_session_desc_factory_->SdesPolicy() == cricket::SEC_REQUIRED ||
+ pc_->dtls_enabled()) {
+ RTCError crypto_error = VerifyCrypto(
+ sdesc->description(), pc_->dtls_enabled(), bundle_groups_by_mid);
+ if (!crypto_error.ok()) {
+ return crypto_error;
+ }
+ }
+
+ // Verify ice-ufrag and ice-pwd.
+ if (!VerifyIceUfragPwdPresent(sdesc->description(), bundle_groups_by_mid)) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER,
+ kSdpWithoutIceUfragPwd);
+ }
+
+ // Validate that there are no collisions of bundled payload types.
+ error = ValidateBundledPayloadTypes(*sdesc->description());
+ // TODO(bugs.webrtc.org/14420): actually reject.
+ RTC_HISTOGRAM_BOOLEAN("WebRTC.PeerConnection.ValidBundledPayloadTypes",
+ error.ok());
+
+ // Validate that there are no collisions of bundled header extensions ids.
+ error = ValidateBundledRtpHeaderExtensions(*sdesc->description());
+ if (!error.ok()) {
+ return error;
+ }
+
+ // TODO(crbug.com/1459124): remove killswitch after rollout.
+ error = ValidateSsrcGroups(*sdesc->description());
+ if (!error.ok() &&
+ !pc_->trials().IsDisabled("WebRTC-PreventSsrcGroupsWithUnexpectedSize")) {
+ return error;
+ }
+
+ if (!pc_->ValidateBundleSettings(sdesc->description(),
+ bundle_groups_by_mid)) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER,
+ kBundleWithoutRtcpMux);
+ }
+
+ error = ValidatePayloadTypes(*sdesc->description());
+ if (!error.ok()) {
+ return error;
+ }
+
+ // TODO(skvlad): When the local rtcp-mux policy is Require, reject any
+ // m-lines that do not rtcp-mux enabled.
+
+ // Verify m-lines in Answer when compared against Offer.
+ if (type == SdpType::kPrAnswer || type == SdpType::kAnswer) {
+ // With an answer we want to compare the new answer session description
+ // with the offer's session description from the current negotiation.
+ const cricket::SessionDescription* offer_desc =
+ (source == cricket::CS_LOCAL) ? remote_description()->description()
+ : local_description()->description();
+ if (!MediaSectionsHaveSameCount(*offer_desc, *sdesc->description()) ||
+ !MediaSectionsInSameOrder(*offer_desc, nullptr, *sdesc->description(),
+ type)) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER,
+ kMlineMismatchInAnswer);
+ }
+ } else {
+ // The re-offers should respect the order of m= sections in current
+ // description. See RFC3264 Section 8 paragraph 4 for more details.
+ // With a re-offer, either the current local or current remote
+ // descriptions could be the most up to date, so we would like to check
+ // against both of them if they exist. It could be the case that one of
+ // them has a 0 port for a media section, but the other does not. This is
+ // important to check against in the case that we are recycling an m=
+ // section.
+ const cricket::SessionDescription* current_desc = nullptr;
+ const cricket::SessionDescription* secondary_current_desc = nullptr;
+ if (local_description()) {
+ current_desc = local_description()->description();
+ if (remote_description()) {
+ secondary_current_desc = remote_description()->description();
+ }
+ } else if (remote_description()) {
+ current_desc = remote_description()->description();
+ }
+ if (current_desc &&
+ !MediaSectionsInSameOrder(*current_desc, secondary_current_desc,
+ *sdesc->description(), type)) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER,
+ kMlineMismatchInSubsequentOffer);
+ }
+ }
+
+ if (IsUnifiedPlan()) {
+ // Ensure that each audio and video media section has at most one
+ // "StreamParams". This will return an error if receiving a session
+ // description from a "Plan B" endpoint which adds multiple tracks of the
+ // same type. With Unified Plan, there can only be at most one track per
+ // media section.
+ for (const ContentInfo& content : sdesc->description()->contents()) {
+ const MediaContentDescription& desc = *content.media_description();
+ if ((desc.type() == cricket::MEDIA_TYPE_AUDIO ||
+ desc.type() == cricket::MEDIA_TYPE_VIDEO) &&
+ desc.streams().size() > 1u) {
+ LOG_AND_RETURN_ERROR(
+ RTCErrorType::INVALID_PARAMETER,
+ "Media section has more than one track specified with a=ssrc lines "
+ "which is not supported with Unified Plan.");
+ }
+ }
+ // Validate spec-simulcast which only works if the remote end negotiated the
+ // mid and rid header extension.
+ error = ValidateRtpHeaderExtensionsForSpecSimulcast(*sdesc->description());
+ if (!error.ok()) {
+ return error;
+ }
+ }
+
+ return RTCError::OK();
+}
+
+RTCError SdpOfferAnswerHandler::UpdateTransceiversAndDataChannels(
+ cricket::ContentSource source,
+ const SessionDescriptionInterface& new_session,
+ const SessionDescriptionInterface* old_local_description,
+ const SessionDescriptionInterface* old_remote_description,
+ const std::map<std::string, const cricket::ContentGroup*>&
+ bundle_groups_by_mid) {
+ TRACE_EVENT0("webrtc",
+ "SdpOfferAnswerHandler::UpdateTransceiversAndDataChannels");
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ RTC_DCHECK(IsUnifiedPlan());
+
+ if (new_session.GetType() == SdpType::kOffer) {
+ // If the BUNDLE policy is max-bundle, then we know for sure that all
+ // transports will be bundled from the start. Return an error if
+ // max-bundle is specified but the session description does not have a
+ // BUNDLE group.
+ if (pc_->configuration()->bundle_policy ==
+ PeerConnectionInterface::kBundlePolicyMaxBundle &&
+ bundle_groups_by_mid.empty()) {
+ LOG_AND_RETURN_ERROR(
+ RTCErrorType::INVALID_PARAMETER,
+ "max-bundle configured but session description has no BUNDLE group");
+ }
+ }
+
+ const ContentInfos& new_contents = new_session.description()->contents();
+ for (size_t i = 0; i < new_contents.size(); ++i) {
+ const cricket::ContentInfo& new_content = new_contents[i];
+ cricket::MediaType media_type = new_content.media_description()->type();
+ mid_generator_.AddKnownId(new_content.name);
+ auto it = bundle_groups_by_mid.find(new_content.name);
+ const cricket::ContentGroup* bundle_group =
+ it != bundle_groups_by_mid.end() ? it->second : nullptr;
+ if (media_type == cricket::MEDIA_TYPE_AUDIO ||
+ media_type == cricket::MEDIA_TYPE_VIDEO) {
+ const cricket::ContentInfo* old_local_content = nullptr;
+ if (old_local_description &&
+ i < old_local_description->description()->contents().size()) {
+ old_local_content =
+ &old_local_description->description()->contents()[i];
+ }
+ const cricket::ContentInfo* old_remote_content = nullptr;
+ if (old_remote_description &&
+ i < old_remote_description->description()->contents().size()) {
+ old_remote_content =
+ &old_remote_description->description()->contents()[i];
+ }
+ auto transceiver_or_error =
+ AssociateTransceiver(source, new_session.GetType(), i, new_content,
+ old_local_content, old_remote_content);
+ if (!transceiver_or_error.ok()) {
+ // In the case where a transceiver is rejected locally prior to being
+ // associated, we don't expect to find a transceiver, but might find it
+ // in the case where state is still "stopping", not "stopped".
+ if (new_content.rejected) {
+ continue;
+ }
+ return transceiver_or_error.MoveError();
+ }
+ auto transceiver = transceiver_or_error.MoveValue();
+ RTCError error =
+ UpdateTransceiverChannel(transceiver, new_content, bundle_group);
+ // Handle locally rejected content. This code path is only needed for apps
+ // that SDP munge. Remote rejected content is handled in
+ // ApplyRemoteDescriptionUpdateTransceiverState().
+ if (source == cricket::ContentSource::CS_LOCAL && new_content.rejected) {
+ // Local offer.
+ if (new_session.GetType() == SdpType::kOffer) {
+ // If the RtpTransceiver API was used, it would already have made the
+ // transceiver stopping. But if the rejection was caused by SDP
+ // munging then we need to ensure the transceiver is stopping here.
+ if (!transceiver->internal()->stopping()) {
+ transceiver->internal()->StopStandard();
+ }
+ RTC_DCHECK(transceiver->internal()->stopping());
+ } else {
+ // Local answer.
+ RTC_DCHECK(new_session.GetType() == SdpType::kAnswer ||
+ new_session.GetType() == SdpType::kPrAnswer);
+ // When RtpTransceiver API is used, rejection happens in the offer and
+ // the transceiver will already be stopped at local answer time
+ // (calling stop between SRD(offer) and SLD(answer) would not reject
+ // the content in the answer - instead this would trigger a follow-up
+ // O/A exchange). So if the content was rejected but the transceiver
+ // is not already stopped, SDP munging has happened and we need to
+ // ensure the transceiver is stopped.
+ if (!transceiver->internal()->stopped()) {
+ transceiver->internal()->StopTransceiverProcedure();
+ }
+ RTC_DCHECK(transceiver->internal()->stopped());
+ }
+ }
+ if (!error.ok()) {
+ return error;
+ }
+ } else if (media_type == cricket::MEDIA_TYPE_DATA) {
+ const auto data_mid = pc_->sctp_mid();
+ if (data_mid && new_content.name != data_mid.value()) {
+ // Ignore all but the first data section.
+ RTC_LOG(LS_INFO) << "Ignoring data media section with MID="
+ << new_content.name;
+ continue;
+ }
+ RTCError error =
+ UpdateDataChannelTransport(source, new_content, bundle_group);
+ if (!error.ok()) {
+ return error;
+ }
+ } else if (media_type == cricket::MEDIA_TYPE_UNSUPPORTED) {
+ RTC_LOG(LS_INFO) << "Ignoring unsupported media type";
+ } else {
+ LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR,
+ "Unknown section type.");
+ }
+ }
+
+ return RTCError::OK();
+}
+
+RTCErrorOr<rtc::scoped_refptr<RtpTransceiverProxyWithInternal<RtpTransceiver>>>
+SdpOfferAnswerHandler::AssociateTransceiver(
+ cricket::ContentSource source,
+ SdpType type,
+ size_t mline_index,
+ const ContentInfo& content,
+ const ContentInfo* old_local_content,
+ const ContentInfo* old_remote_content) {
+ TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::AssociateTransceiver");
+ RTC_DCHECK(IsUnifiedPlan());
+#if RTC_DCHECK_IS_ON
+ // If this is an offer then the m= section might be recycled. If the m=
+ // section is being recycled (defined as: rejected in the current local or
+ // remote description and not rejected in new description), the transceiver
+ // should have been removed by RemoveStoppedtransceivers()->
+ if (IsMediaSectionBeingRecycled(type, content, old_local_content,
+ old_remote_content)) {
+ const std::string& old_mid =
+ (old_local_content && old_local_content->rejected)
+ ? old_local_content->name
+ : old_remote_content->name;
+ auto old_transceiver = transceivers()->FindByMid(old_mid);
+ // The transceiver should be disassociated in RemoveStoppedTransceivers()
+ RTC_DCHECK(!old_transceiver);
+ }
+#endif
+
+ const MediaContentDescription* media_desc = content.media_description();
+ auto transceiver = transceivers()->FindByMid(content.name);
+ if (source == cricket::CS_LOCAL) {
+ // Find the RtpTransceiver that corresponds to this m= section, using the
+ // mapping between transceivers and m= section indices established when
+ // creating the offer.
+ if (!transceiver) {
+ transceiver = transceivers()->FindByMLineIndex(mline_index);
+ }
+ if (!transceiver) {
+ // This may happen normally when media sections are rejected.
+ LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER,
+ "Transceiver not found based on m-line index");
+ }
+ } else {
+ RTC_DCHECK_EQ(source, cricket::CS_REMOTE);
+ // If the m= section is sendrecv or recvonly, and there are RtpTransceivers
+ // of the same type...
+ // When simulcast is requested, a transceiver cannot be associated because
+ // AddTrack cannot be called to initialize it.
+ if (!transceiver &&
+ RtpTransceiverDirectionHasRecv(media_desc->direction()) &&
+ !media_desc->HasSimulcast()) {
+ transceiver = FindAvailableTransceiverToReceive(media_desc->type());
+ }
+ // If no RtpTransceiver was found in the previous step, create one with a
+ // recvonly direction.
+ if (!transceiver) {
+ RTC_LOG(LS_INFO) << "Adding "
+ << cricket::MediaTypeToString(media_desc->type())
+ << " transceiver for MID=" << content.name
+ << " at i=" << mline_index
+ << " in response to the remote description.";
+ std::string sender_id = rtc::CreateRandomUuid();
+ std::vector<RtpEncodingParameters> send_encodings =
+ GetSendEncodingsFromRemoteDescription(*media_desc);
+ auto sender = rtp_manager()->CreateSender(media_desc->type(), sender_id,
+ nullptr, {}, send_encodings);
+ std::string receiver_id;
+ if (!media_desc->streams().empty()) {
+ receiver_id = media_desc->streams()[0].id;
+ } else {
+ receiver_id = rtc::CreateRandomUuid();
+ }
+ auto receiver =
+ rtp_manager()->CreateReceiver(media_desc->type(), receiver_id);
+ transceiver = rtp_manager()->CreateAndAddTransceiver(sender, receiver);
+ transceiver->internal()->set_direction(
+ RtpTransceiverDirection::kRecvOnly);
+ if (type == SdpType::kOffer) {
+ transceivers()->StableState(transceiver)->set_newly_created();
+ }
+ }
+
+ RTC_DCHECK(transceiver);
+
+ // Check if the offer indicated simulcast but the answer rejected it.
+ // This can happen when simulcast is not supported on the remote party.
+ if (SimulcastIsRejected(old_local_content, *media_desc,
+ pc_->GetCryptoOptions()
+ .srtp.enable_encrypted_rtp_header_extensions)) {
+ RTCError error =
+ DisableSimulcastInSender(transceiver->internal()->sender_internal());
+ if (!error.ok()) {
+ RTC_LOG(LS_ERROR) << "Failed to remove rejected simulcast.";
+ return std::move(error);
+ }
+ }
+ }
+
+ if (transceiver->media_type() != media_desc->type()) {
+ LOG_AND_RETURN_ERROR(
+ RTCErrorType::INVALID_PARAMETER,
+ "Transceiver type does not match media description type.");
+ }
+
+ if (media_desc->HasSimulcast()) {
+ std::vector<SimulcastLayer> layers =
+ source == cricket::CS_LOCAL
+ ? media_desc->simulcast_description().send_layers().GetAllLayers()
+ : media_desc->simulcast_description()
+ .receive_layers()
+ .GetAllLayers();
+ RTCError error = UpdateSimulcastLayerStatusInSender(
+ layers, transceiver->internal()->sender_internal());
+ if (!error.ok()) {
+ RTC_LOG(LS_ERROR) << "Failed updating status for simulcast layers.";
+ return std::move(error);
+ }
+ }
+ if (type == SdpType::kOffer) {
+ bool state_changes = transceiver->internal()->mid() != content.name ||
+ transceiver->internal()->mline_index() != mline_index;
+ if (state_changes) {
+ transceivers()
+ ->StableState(transceiver)
+ ->SetMSectionIfUnset(transceiver->internal()->mid(),
+ transceiver->internal()->mline_index());
+ }
+ }
+ // Associate the found or created RtpTransceiver with the m= section by
+ // setting the value of the RtpTransceiver's mid property to the MID of the m=
+ // section, and establish a mapping between the transceiver and the index of
+ // the m= section.
+ transceiver->internal()->set_mid(content.name);
+ transceiver->internal()->set_mline_index(mline_index);
+ return std::move(transceiver);
+}
+
+RTCError SdpOfferAnswerHandler::UpdateTransceiverChannel(
+ rtc::scoped_refptr<RtpTransceiverProxyWithInternal<RtpTransceiver>>
+ transceiver,
+ const cricket::ContentInfo& content,
+ const cricket::ContentGroup* bundle_group) {
+ TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::UpdateTransceiverChannel");
+ RTC_DCHECK(IsUnifiedPlan());
+ RTC_DCHECK(transceiver);
+ cricket::ChannelInterface* channel = transceiver->internal()->channel();
+ if (content.rejected) {
+ if (channel) {
+ transceiver->internal()->ClearChannel();
+ }
+ } else {
+ if (!channel) {
+ auto error = transceiver->internal()->CreateChannel(
+ content.name, pc_->call_ptr(), pc_->configuration()->media_config,
+ pc_->SrtpRequired(), pc_->GetCryptoOptions(), audio_options(),
+ video_options(), video_bitrate_allocator_factory_.get(),
+ [&](absl::string_view mid) {
+ RTC_DCHECK_RUN_ON(network_thread());
+ return transport_controller_n()->GetRtpTransport(mid);
+ });
+ if (!error.ok()) {
+ return error;
+ }
+ }
+ }
+ return RTCError::OK();
+}
+
+RTCError SdpOfferAnswerHandler::UpdateDataChannelTransport(
+ cricket::ContentSource source,
+ const cricket::ContentInfo& content,
+ const cricket::ContentGroup* bundle_group) {
+ if (content.rejected) {
+ RTC_LOG(LS_INFO) << "Rejected data channel transport with mid="
+ << content.mid();
+
+ rtc::StringBuilder sb;
+ sb << "Rejected data channel transport with mid=" << content.mid();
+ RTCError error(RTCErrorType::OPERATION_ERROR_WITH_DATA, sb.Release());
+ error.set_error_detail(RTCErrorDetailType::DATA_CHANNEL_FAILURE);
+ pc_->DestroyDataChannelTransport(error);
+ } else if (!pc_->CreateDataChannelTransport(content.name)) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR,
+ "Failed to create data channel.");
+ }
+ return RTCError::OK();
+}
+
+bool SdpOfferAnswerHandler::ExpectSetLocalDescription(SdpType type) {
+ PeerConnectionInterface::SignalingState state = signaling_state();
+ if (type == SdpType::kOffer) {
+ return (state == PeerConnectionInterface::kStable) ||
+ (state == PeerConnectionInterface::kHaveLocalOffer);
+ } else {
+ RTC_DCHECK(type == SdpType::kPrAnswer || type == SdpType::kAnswer);
+ return (state == PeerConnectionInterface::kHaveRemoteOffer) ||
+ (state == PeerConnectionInterface::kHaveLocalPrAnswer);
+ }
+}
+
+bool SdpOfferAnswerHandler::ExpectSetRemoteDescription(SdpType type) {
+ PeerConnectionInterface::SignalingState state = signaling_state();
+ if (type == SdpType::kOffer) {
+ return (state == PeerConnectionInterface::kStable) ||
+ (state == PeerConnectionInterface::kHaveRemoteOffer);
+ } else {
+ RTC_DCHECK(type == SdpType::kPrAnswer || type == SdpType::kAnswer);
+ return (state == PeerConnectionInterface::kHaveLocalOffer) ||
+ (state == PeerConnectionInterface::kHaveRemotePrAnswer);
+ }
+}
+
+void SdpOfferAnswerHandler::FillInMissingRemoteMids(
+ cricket::SessionDescription* new_remote_description) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ RTC_DCHECK(new_remote_description);
+ const cricket::ContentInfos no_infos;
+ const cricket::ContentInfos& local_contents =
+ (local_description() ? local_description()->description()->contents()
+ : no_infos);
+ const cricket::ContentInfos& remote_contents =
+ (remote_description() ? remote_description()->description()->contents()
+ : no_infos);
+ for (size_t i = 0; i < new_remote_description->contents().size(); ++i) {
+ cricket::ContentInfo& content = new_remote_description->contents()[i];
+ if (!content.name.empty()) {
+ continue;
+ }
+ std::string new_mid;
+ absl::string_view source_explanation;
+ if (IsUnifiedPlan()) {
+ if (i < local_contents.size()) {
+ new_mid = local_contents[i].name;
+ source_explanation = "from the matching local media section";
+ } else if (i < remote_contents.size()) {
+ new_mid = remote_contents[i].name;
+ source_explanation = "from the matching previous remote media section";
+ } else {
+ new_mid = mid_generator_.GenerateString();
+ source_explanation = "generated just now";
+ }
+ } else {
+ new_mid = std::string(
+ GetDefaultMidForPlanB(content.media_description()->type()));
+ source_explanation = "to match pre-existing behavior";
+ }
+ RTC_DCHECK(!new_mid.empty());
+ content.name = new_mid;
+ new_remote_description->transport_infos()[i].content_name = new_mid;
+ RTC_LOG(LS_INFO) << "SetRemoteDescription: Remote media section at i=" << i
+ << " is missing an a=mid line. Filling in the value '"
+ << new_mid << "' " << source_explanation << ".";
+ }
+}
+
+rtc::scoped_refptr<RtpTransceiverProxyWithInternal<RtpTransceiver>>
+SdpOfferAnswerHandler::FindAvailableTransceiverToReceive(
+ cricket::MediaType media_type) const {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ RTC_DCHECK(IsUnifiedPlan());
+ // From JSEP section 5.10 (Applying a Remote Description):
+ // If the m= section is sendrecv or recvonly, and there are RtpTransceivers of
+ // the same type that were added to the PeerConnection by addTrack and are not
+ // associated with any m= section and are not stopped, find the first such
+ // RtpTransceiver.
+ for (auto transceiver : transceivers()->List()) {
+ if (transceiver->media_type() == media_type &&
+ transceiver->internal()->created_by_addtrack() && !transceiver->mid() &&
+ !transceiver->stopped()) {
+ return transceiver;
+ }
+ }
+ return nullptr;
+}
+
+const cricket::ContentInfo*
+SdpOfferAnswerHandler::FindMediaSectionForTransceiver(
+ const RtpTransceiver* transceiver,
+ const SessionDescriptionInterface* sdesc) const {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ RTC_DCHECK(transceiver);
+ RTC_DCHECK(sdesc);
+ if (IsUnifiedPlan()) {
+ if (!transceiver->mid()) {
+ // This transceiver is not associated with a media section yet.
+ return nullptr;
+ }
+ return sdesc->description()->GetContentByName(*transceiver->mid());
+ } else {
+ // Plan B only allows at most one audio and one video section, so use the
+ // first media section of that type.
+ return cricket::GetFirstMediaContent(sdesc->description()->contents(),
+ transceiver->media_type());
+ }
+}
+
+void SdpOfferAnswerHandler::GetOptionsForOffer(
+ const PeerConnectionInterface::RTCOfferAnswerOptions& offer_answer_options,
+ cricket::MediaSessionOptions* session_options) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ ExtractSharedMediaSessionOptions(offer_answer_options, session_options);
+
+ if (IsUnifiedPlan()) {
+ GetOptionsForUnifiedPlanOffer(offer_answer_options, session_options);
+ } else {
+ GetOptionsForPlanBOffer(offer_answer_options, session_options);
+ }
+
+ // Apply ICE restart flag and renomination flag.
+ bool ice_restart = offer_answer_options.ice_restart || HasNewIceCredentials();
+ for (auto& options : session_options->media_description_options) {
+ options.transport_options.ice_restart = ice_restart;
+ options.transport_options.enable_ice_renomination =
+ pc_->configuration()->enable_ice_renomination;
+ }
+
+ session_options->rtcp_cname = rtcp_cname_;
+ session_options->crypto_options = pc_->GetCryptoOptions();
+ session_options->pooled_ice_credentials =
+ context_->network_thread()->BlockingCall(
+ [this] { return port_allocator()->GetPooledIceCredentials(); });
+ session_options->offer_extmap_allow_mixed =
+ pc_->configuration()->offer_extmap_allow_mixed;
+
+ // Allow fallback for using obsolete SCTP syntax.
+ // Note that the default in `session_options` is true, while
+ // the default in `options` is false.
+ session_options->use_obsolete_sctp_sdp =
+ offer_answer_options.use_obsolete_sctp_sdp;
+}
+
+void SdpOfferAnswerHandler::GetOptionsForPlanBOffer(
+ const PeerConnectionInterface::RTCOfferAnswerOptions& offer_answer_options,
+ cricket::MediaSessionOptions* session_options) {
+ bool offer_new_data_description =
+ data_channel_controller()->HasUsedDataChannels();
+ bool send_audio = false;
+ bool send_video = false;
+ bool recv_audio = false;
+ bool recv_video = false;
+ if (ConfiguredForMedia()) {
+ // Figure out transceiver directional preferences.
+ send_audio =
+ !rtp_manager()->GetAudioTransceiver()->internal()->senders().empty();
+ send_video =
+ !rtp_manager()->GetVideoTransceiver()->internal()->senders().empty();
+
+ // By default, generate sendrecv/recvonly m= sections.
+ recv_audio = true;
+ recv_video = true;
+ }
+ // By default, only offer a new m= section if we have media to send with it.
+ bool offer_new_audio_description = send_audio;
+ bool offer_new_video_description = send_video;
+ if (ConfiguredForMedia()) {
+ // The "offer_to_receive_X" options allow those defaults to be overridden.
+ if (offer_answer_options.offer_to_receive_audio !=
+ PeerConnectionInterface::RTCOfferAnswerOptions::kUndefined) {
+ recv_audio = (offer_answer_options.offer_to_receive_audio > 0);
+ offer_new_audio_description =
+ offer_new_audio_description ||
+ (offer_answer_options.offer_to_receive_audio > 0);
+ }
+ if (offer_answer_options.offer_to_receive_video !=
+ RTCOfferAnswerOptions::kUndefined) {
+ recv_video = (offer_answer_options.offer_to_receive_video > 0);
+ offer_new_video_description =
+ offer_new_video_description ||
+ (offer_answer_options.offer_to_receive_video > 0);
+ }
+ }
+ absl::optional<size_t> audio_index;
+ absl::optional<size_t> video_index;
+ absl::optional<size_t> data_index;
+ // If a current description exists, generate m= sections in the same order,
+ // using the first audio/video/data section that appears and rejecting
+ // extraneous ones.
+ if (local_description()) {
+ GenerateMediaDescriptionOptions(
+ local_description(),
+ RtpTransceiverDirectionFromSendRecv(send_audio, recv_audio),
+ RtpTransceiverDirectionFromSendRecv(send_video, recv_video),
+ &audio_index, &video_index, &data_index, session_options);
+ }
+
+ if (ConfiguredForMedia()) {
+ // Add audio/video/data m= sections to the end if needed.
+ if (!audio_index && offer_new_audio_description) {
+ cricket::MediaDescriptionOptions options(
+ cricket::MEDIA_TYPE_AUDIO, cricket::CN_AUDIO,
+ RtpTransceiverDirectionFromSendRecv(send_audio, recv_audio), false);
+ options.header_extensions =
+ media_engine()->voice().GetRtpHeaderExtensions();
+ session_options->media_description_options.push_back(options);
+ audio_index = session_options->media_description_options.size() - 1;
+ }
+ if (!video_index && offer_new_video_description) {
+ cricket::MediaDescriptionOptions options(
+ cricket::MEDIA_TYPE_VIDEO, cricket::CN_VIDEO,
+ RtpTransceiverDirectionFromSendRecv(send_video, recv_video), false);
+ options.header_extensions =
+ media_engine()->video().GetRtpHeaderExtensions();
+ session_options->media_description_options.push_back(options);
+ video_index = session_options->media_description_options.size() - 1;
+ }
+ cricket::MediaDescriptionOptions* audio_media_description_options =
+ !audio_index
+ ? nullptr
+ : &session_options->media_description_options[*audio_index];
+ cricket::MediaDescriptionOptions* video_media_description_options =
+ !video_index
+ ? nullptr
+ : &session_options->media_description_options[*video_index];
+
+ AddPlanBRtpSenderOptions(rtp_manager()->GetSendersInternal(),
+ audio_media_description_options,
+ video_media_description_options,
+ offer_answer_options.num_simulcast_layers);
+ }
+ if (!data_index && offer_new_data_description) {
+ session_options->media_description_options.push_back(
+ GetMediaDescriptionOptionsForActiveData(cricket::CN_DATA));
+ }
+}
+
+void SdpOfferAnswerHandler::GetOptionsForUnifiedPlanOffer(
+ const RTCOfferAnswerOptions& offer_answer_options,
+ cricket::MediaSessionOptions* session_options) {
+ // Rules for generating an offer are dictated by JSEP sections 5.2.1 (Initial
+ // Offers) and 5.2.2 (Subsequent Offers).
+ RTC_DCHECK_EQ(session_options->media_description_options.size(), 0);
+ const ContentInfos no_infos;
+ const ContentInfos& local_contents =
+ (local_description() ? local_description()->description()->contents()
+ : no_infos);
+ const ContentInfos& remote_contents =
+ (remote_description() ? remote_description()->description()->contents()
+ : no_infos);
+ // The mline indices that can be recycled. New transceivers should reuse these
+ // slots first.
+ std::queue<size_t> recycleable_mline_indices;
+ // First, go through each media section that exists in either the local or
+ // remote description and generate a media section in this offer for the
+ // associated transceiver. If a media section can be recycled, generate a
+ // default, rejected media section here that can be later overwritten.
+ for (size_t i = 0;
+ i < std::max(local_contents.size(), remote_contents.size()); ++i) {
+ // Either `local_content` or `remote_content` is non-null.
+ const ContentInfo* local_content =
+ (i < local_contents.size() ? &local_contents[i] : nullptr);
+ const ContentInfo* current_local_content =
+ GetContentByIndex(current_local_description(), i);
+ const ContentInfo* remote_content =
+ (i < remote_contents.size() ? &remote_contents[i] : nullptr);
+ const ContentInfo* current_remote_content =
+ GetContentByIndex(current_remote_description(), i);
+ bool had_been_rejected =
+ (current_local_content && current_local_content->rejected) ||
+ (current_remote_content && current_remote_content->rejected);
+ const std::string& mid =
+ (local_content ? local_content->name : remote_content->name);
+ cricket::MediaType media_type =
+ (local_content ? local_content->media_description()->type()
+ : remote_content->media_description()->type());
+ if (media_type == cricket::MEDIA_TYPE_AUDIO ||
+ media_type == cricket::MEDIA_TYPE_VIDEO) {
+ // A media section is considered eligible for recycling if it is marked as
+ // rejected in either the current local or current remote description.
+ auto transceiver = transceivers()->FindByMid(mid);
+ if (!transceiver) {
+ // No associated transceiver. The media section has been stopped.
+ recycleable_mline_indices.push(i);
+ session_options->media_description_options.push_back(
+ cricket::MediaDescriptionOptions(media_type, mid,
+ RtpTransceiverDirection::kInactive,
+ /*stopped=*/true));
+ } else {
+ // NOTE: a stopping transceiver should be treated as a stopped one in
+ // createOffer as specified in
+ // https://w3c.github.io/webrtc-pc/#dom-rtcpeerconnection-createoffer.
+ if (had_been_rejected && transceiver->stopping()) {
+ session_options->media_description_options.push_back(
+ cricket::MediaDescriptionOptions(
+ transceiver->media_type(), mid,
+ RtpTransceiverDirection::kInactive,
+ /*stopped=*/true));
+ recycleable_mline_indices.push(i);
+ } else {
+ session_options->media_description_options.push_back(
+ GetMediaDescriptionOptionsForTransceiver(
+ transceiver->internal(), mid,
+ /*is_create_offer=*/true));
+ // CreateOffer shouldn't really cause any state changes in
+ // PeerConnection, but we need a way to match new transceivers to new
+ // media sections in SetLocalDescription and JSEP specifies this is
+ // done by recording the index of the media section generated for the
+ // transceiver in the offer.
+ transceiver->internal()->set_mline_index(i);
+ }
+ }
+ } else if (media_type == cricket::MEDIA_TYPE_UNSUPPORTED) {
+ RTC_DCHECK(local_content->rejected);
+ session_options->media_description_options.push_back(
+ cricket::MediaDescriptionOptions(media_type, mid,
+ RtpTransceiverDirection::kInactive,
+ /*stopped=*/true));
+ } else {
+ RTC_CHECK_EQ(cricket::MEDIA_TYPE_DATA, media_type);
+ if (had_been_rejected) {
+ session_options->media_description_options.push_back(
+ GetMediaDescriptionOptionsForRejectedData(mid));
+ } else {
+ const auto data_mid = pc_->sctp_mid();
+ RTC_CHECK(data_mid);
+ if (mid == data_mid.value()) {
+ session_options->media_description_options.push_back(
+ GetMediaDescriptionOptionsForActiveData(mid));
+ } else {
+ session_options->media_description_options.push_back(
+ GetMediaDescriptionOptionsForRejectedData(mid));
+ }
+ }
+ }
+ }
+
+ // Next, look for transceivers that are newly added (that is, are not stopped
+ // and not associated). Reuse media sections marked as recyclable first,
+ // otherwise append to the end of the offer. New media sections should be
+ // added in the order they were added to the PeerConnection.
+ if (ConfiguredForMedia()) {
+ for (const auto& transceiver : transceivers()->ListInternal()) {
+ if (transceiver->mid() || transceiver->stopping()) {
+ continue;
+ }
+ size_t mline_index;
+ if (!recycleable_mline_indices.empty()) {
+ mline_index = recycleable_mline_indices.front();
+ recycleable_mline_indices.pop();
+ session_options->media_description_options[mline_index] =
+ GetMediaDescriptionOptionsForTransceiver(
+ transceiver, mid_generator_.GenerateString(),
+ /*is_create_offer=*/true);
+ } else {
+ mline_index = session_options->media_description_options.size();
+ session_options->media_description_options.push_back(
+ GetMediaDescriptionOptionsForTransceiver(
+ transceiver, mid_generator_.GenerateString(),
+ /*is_create_offer=*/true));
+ }
+ // See comment above for why CreateOffer changes the transceiver's state.
+ transceiver->set_mline_index(mline_index);
+ }
+ }
+ // Lastly, add a m-section if we have requested local data channels and an
+ // m section does not already exist.
+ if (!pc_->sctp_mid() && data_channel_controller()->HasDataChannels()) {
+ // Attempt to recycle a stopped m-line.
+ // TODO(crbug.com/1442604): sctp_mid() should return the mid if one was
+ // ever created but rejected.
+ bool recycled = false;
+ for (size_t i = 0; i < session_options->media_description_options.size();
+ i++) {
+ auto media_description = session_options->media_description_options[i];
+ if (media_description.type == cricket::MEDIA_TYPE_DATA &&
+ media_description.stopped) {
+ session_options->media_description_options[i] =
+ GetMediaDescriptionOptionsForActiveData(media_description.mid);
+ recycled = true;
+ break;
+ }
+ }
+ if (!recycled) {
+ session_options->media_description_options.push_back(
+ GetMediaDescriptionOptionsForActiveData(
+ mid_generator_.GenerateString()));
+ }
+ }
+}
+
+void SdpOfferAnswerHandler::GetOptionsForAnswer(
+ const RTCOfferAnswerOptions& offer_answer_options,
+ cricket::MediaSessionOptions* session_options) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ ExtractSharedMediaSessionOptions(offer_answer_options, session_options);
+
+ if (IsUnifiedPlan()) {
+ GetOptionsForUnifiedPlanAnswer(offer_answer_options, session_options);
+ } else {
+ GetOptionsForPlanBAnswer(offer_answer_options, session_options);
+ }
+
+ // Apply ICE renomination flag.
+ for (auto& options : session_options->media_description_options) {
+ options.transport_options.enable_ice_renomination =
+ pc_->configuration()->enable_ice_renomination;
+ }
+
+ session_options->rtcp_cname = rtcp_cname_;
+ session_options->crypto_options = pc_->GetCryptoOptions();
+ session_options->pooled_ice_credentials =
+ context_->network_thread()->BlockingCall(
+ [this] { return port_allocator()->GetPooledIceCredentials(); });
+}
+
+void SdpOfferAnswerHandler::GetOptionsForPlanBAnswer(
+ const PeerConnectionInterface::RTCOfferAnswerOptions& offer_answer_options,
+ cricket::MediaSessionOptions* session_options) {
+ bool send_audio = false;
+ bool recv_audio = false;
+ bool send_video = false;
+ bool recv_video = false;
+
+ if (ConfiguredForMedia()) {
+ // Figure out transceiver directional preferences.
+ send_audio =
+ !rtp_manager()->GetAudioTransceiver()->internal()->senders().empty();
+ send_video =
+ !rtp_manager()->GetVideoTransceiver()->internal()->senders().empty();
+
+ // By default, generate sendrecv/recvonly m= sections. The direction is also
+ // restricted by the direction in the offer.
+ recv_audio = true;
+ recv_video = true;
+
+ // The "offer_to_receive_X" options allow those defaults to be overridden.
+ if (offer_answer_options.offer_to_receive_audio !=
+ RTCOfferAnswerOptions::kUndefined) {
+ recv_audio = (offer_answer_options.offer_to_receive_audio > 0);
+ }
+ if (offer_answer_options.offer_to_receive_video !=
+ RTCOfferAnswerOptions::kUndefined) {
+ recv_video = (offer_answer_options.offer_to_receive_video > 0);
+ }
+ }
+
+ absl::optional<size_t> audio_index;
+ absl::optional<size_t> video_index;
+ absl::optional<size_t> data_index;
+
+ // Generate m= sections that match those in the offer.
+ // Note that mediasession.cc will handle intersection our preferred
+ // direction with the offered direction.
+ GenerateMediaDescriptionOptions(
+ remote_description(),
+ RtpTransceiverDirectionFromSendRecv(send_audio, recv_audio),
+ RtpTransceiverDirectionFromSendRecv(send_video, recv_video), &audio_index,
+ &video_index, &data_index, session_options);
+
+ cricket::MediaDescriptionOptions* audio_media_description_options =
+ !audio_index ? nullptr
+ : &session_options->media_description_options[*audio_index];
+ cricket::MediaDescriptionOptions* video_media_description_options =
+ !video_index ? nullptr
+ : &session_options->media_description_options[*video_index];
+
+ if (ConfiguredForMedia()) {
+ AddPlanBRtpSenderOptions(rtp_manager()->GetSendersInternal(),
+ audio_media_description_options,
+ video_media_description_options,
+ offer_answer_options.num_simulcast_layers);
+ }
+}
+
+void SdpOfferAnswerHandler::GetOptionsForUnifiedPlanAnswer(
+ const PeerConnectionInterface::RTCOfferAnswerOptions& offer_answer_options,
+ cricket::MediaSessionOptions* session_options) {
+ // Rules for generating an answer are dictated by JSEP sections 5.3.1 (Initial
+ // Answers) and 5.3.2 (Subsequent Answers).
+ RTC_DCHECK(remote_description());
+ RTC_DCHECK(remote_description()->GetType() == SdpType::kOffer);
+ for (const ContentInfo& content :
+ remote_description()->description()->contents()) {
+ cricket::MediaType media_type = content.media_description()->type();
+ if (media_type == cricket::MEDIA_TYPE_AUDIO ||
+ media_type == cricket::MEDIA_TYPE_VIDEO) {
+ auto transceiver = transceivers()->FindByMid(content.name);
+ if (transceiver) {
+ session_options->media_description_options.push_back(
+ GetMediaDescriptionOptionsForTransceiver(
+ transceiver->internal(), content.name,
+ /*is_create_offer=*/false));
+ } else {
+ // This should only happen with rejected transceivers.
+ RTC_DCHECK(content.rejected);
+ session_options->media_description_options.push_back(
+ cricket::MediaDescriptionOptions(media_type, content.name,
+ RtpTransceiverDirection::kInactive,
+ /*stopped=*/true));
+ }
+ } else if (media_type == cricket::MEDIA_TYPE_UNSUPPORTED) {
+ RTC_DCHECK(content.rejected);
+ session_options->media_description_options.push_back(
+ cricket::MediaDescriptionOptions(media_type, content.name,
+ RtpTransceiverDirection::kInactive,
+ /*stopped=*/true));
+ } else {
+ RTC_CHECK_EQ(cricket::MEDIA_TYPE_DATA, media_type);
+ // Reject all data sections if data channels are disabled.
+ // Reject a data section if it has already been rejected.
+ // Reject all data sections except for the first one.
+ if (content.rejected || content.name != *(pc_->sctp_mid())) {
+ session_options->media_description_options.push_back(
+ GetMediaDescriptionOptionsForRejectedData(content.name));
+ } else {
+ session_options->media_description_options.push_back(
+ GetMediaDescriptionOptionsForActiveData(content.name));
+ }
+ }
+ }
+}
+
+const char* SdpOfferAnswerHandler::SessionErrorToString(
+ SessionError error) const {
+ switch (error) {
+ case SessionError::kNone:
+ return "ERROR_NONE";
+ case SessionError::kContent:
+ return "ERROR_CONTENT";
+ case SessionError::kTransport:
+ return "ERROR_TRANSPORT";
+ }
+ RTC_DCHECK_NOTREACHED();
+ return "";
+}
+
+std::string SdpOfferAnswerHandler::GetSessionErrorMsg() {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ rtc::StringBuilder desc;
+ desc << kSessionError << SessionErrorToString(session_error()) << ". ";
+ desc << kSessionErrorDesc << session_error_desc() << ".";
+ return desc.Release();
+}
+
+void SdpOfferAnswerHandler::SetSessionError(SessionError error,
+ const std::string& error_desc) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ if (error != session_error_) {
+ session_error_ = error;
+ session_error_desc_ = error_desc;
+ }
+}
+
+RTCError SdpOfferAnswerHandler::HandleLegacyOfferOptions(
+ const PeerConnectionInterface::RTCOfferAnswerOptions& options) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ RTC_DCHECK(IsUnifiedPlan());
+
+ if (options.offer_to_receive_audio == 0) {
+ RemoveRecvDirectionFromReceivingTransceiversOfType(
+ cricket::MEDIA_TYPE_AUDIO);
+ } else if (options.offer_to_receive_audio == 1) {
+ AddUpToOneReceivingTransceiverOfType(cricket::MEDIA_TYPE_AUDIO);
+ } else if (options.offer_to_receive_audio > 1) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::UNSUPPORTED_PARAMETER,
+ "offer_to_receive_audio > 1 is not supported.");
+ }
+
+ if (options.offer_to_receive_video == 0) {
+ RemoveRecvDirectionFromReceivingTransceiversOfType(
+ cricket::MEDIA_TYPE_VIDEO);
+ } else if (options.offer_to_receive_video == 1) {
+ AddUpToOneReceivingTransceiverOfType(cricket::MEDIA_TYPE_VIDEO);
+ } else if (options.offer_to_receive_video > 1) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::UNSUPPORTED_PARAMETER,
+ "offer_to_receive_video > 1 is not supported.");
+ }
+
+ return RTCError::OK();
+}
+
+void SdpOfferAnswerHandler::RemoveRecvDirectionFromReceivingTransceiversOfType(
+ cricket::MediaType media_type) {
+ for (const auto& transceiver : GetReceivingTransceiversOfType(media_type)) {
+ RtpTransceiverDirection new_direction =
+ RtpTransceiverDirectionWithRecvSet(transceiver->direction(), false);
+ if (new_direction != transceiver->direction()) {
+ RTC_LOG(LS_INFO) << "Changing " << cricket::MediaTypeToString(media_type)
+ << " transceiver (MID="
+ << transceiver->mid().value_or("<not set>") << ") from "
+ << RtpTransceiverDirectionToString(
+ transceiver->direction())
+ << " to "
+ << RtpTransceiverDirectionToString(new_direction)
+ << " since CreateOffer specified offer_to_receive=0";
+ transceiver->internal()->set_direction(new_direction);
+ }
+ }
+}
+
+void SdpOfferAnswerHandler::AddUpToOneReceivingTransceiverOfType(
+ cricket::MediaType media_type) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ if (GetReceivingTransceiversOfType(media_type).empty()) {
+ RTC_LOG(LS_INFO)
+ << "Adding one recvonly " << cricket::MediaTypeToString(media_type)
+ << " transceiver since CreateOffer specified offer_to_receive=1";
+ RtpTransceiverInit init;
+ init.direction = RtpTransceiverDirection::kRecvOnly;
+ pc_->AddTransceiver(media_type, nullptr, init,
+ /*update_negotiation_needed=*/false);
+ }
+}
+
+std::vector<rtc::scoped_refptr<RtpTransceiverProxyWithInternal<RtpTransceiver>>>
+SdpOfferAnswerHandler::GetReceivingTransceiversOfType(
+ cricket::MediaType media_type) {
+ std::vector<
+ rtc::scoped_refptr<RtpTransceiverProxyWithInternal<RtpTransceiver>>>
+ receiving_transceivers;
+ for (const auto& transceiver : transceivers()->List()) {
+ if (!transceiver->stopped() && transceiver->media_type() == media_type &&
+ RtpTransceiverDirectionHasRecv(transceiver->direction())) {
+ receiving_transceivers.push_back(transceiver);
+ }
+ }
+ return receiving_transceivers;
+}
+
+void SdpOfferAnswerHandler::ProcessRemovalOfRemoteTrack(
+ rtc::scoped_refptr<RtpTransceiverProxyWithInternal<RtpTransceiver>>
+ transceiver,
+ std::vector<rtc::scoped_refptr<RtpTransceiverInterface>>* remove_list,
+ std::vector<rtc::scoped_refptr<MediaStreamInterface>>* removed_streams) {
+ RTC_DCHECK(transceiver->mid());
+ RTC_LOG(LS_INFO) << "Processing the removal of a track for MID="
+ << *transceiver->mid();
+ std::vector<rtc::scoped_refptr<MediaStreamInterface>> previous_streams =
+ transceiver->internal()->receiver_internal()->streams();
+ // This will remove the remote track from the streams.
+ transceiver->internal()->receiver_internal()->set_stream_ids({});
+ remove_list->push_back(transceiver);
+ RemoveRemoteStreamsIfEmpty(previous_streams, removed_streams);
+}
+
+void SdpOfferAnswerHandler::RemoveRemoteStreamsIfEmpty(
+ const std::vector<rtc::scoped_refptr<MediaStreamInterface>>& remote_streams,
+ std::vector<rtc::scoped_refptr<MediaStreamInterface>>* removed_streams) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ // TODO(https://crbug.com/webrtc/9480): When we use stream IDs instead of
+ // streams, see if the stream was removed by checking if this was the last
+ // receiver with that stream ID.
+ for (const auto& remote_stream : remote_streams) {
+ if (remote_stream->GetAudioTracks().empty() &&
+ remote_stream->GetVideoTracks().empty()) {
+ remote_streams_->RemoveStream(remote_stream.get());
+ removed_streams->push_back(remote_stream);
+ }
+ }
+}
+
+void SdpOfferAnswerHandler::RemoveSenders(cricket::MediaType media_type) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ UpdateLocalSenders(std::vector<cricket::StreamParams>(), media_type);
+ UpdateRemoteSendersList(std::vector<cricket::StreamParams>(), false,
+ media_type, nullptr);
+}
+
+void SdpOfferAnswerHandler::UpdateLocalSenders(
+ const std::vector<cricket::StreamParams>& streams,
+ cricket::MediaType media_type) {
+ TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::UpdateLocalSenders");
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ std::vector<RtpSenderInfo>* current_senders =
+ rtp_manager()->GetLocalSenderInfos(media_type);
+
+ // Find removed tracks. I.e., tracks where the track id, stream id or ssrc
+ // don't match the new StreamParam.
+ for (auto sender_it = current_senders->begin();
+ sender_it != current_senders->end();
+ /* incremented manually */) {
+ const RtpSenderInfo& info = *sender_it;
+ const cricket::StreamParams* params =
+ cricket::GetStreamBySsrc(streams, info.first_ssrc);
+ if (!params || params->id != info.sender_id ||
+ params->first_stream_id() != info.stream_id) {
+ rtp_manager()->OnLocalSenderRemoved(info, media_type);
+ sender_it = current_senders->erase(sender_it);
+ } else {
+ ++sender_it;
+ }
+ }
+
+ // Find new and active senders.
+ for (const cricket::StreamParams& params : streams) {
+ // The sync_label is the MediaStream label and the `stream.id` is the
+ // sender id.
+ const std::string& stream_id = params.first_stream_id();
+ const std::string& sender_id = params.id;
+ uint32_t ssrc = params.first_ssrc();
+ const RtpSenderInfo* sender_info =
+ rtp_manager()->FindSenderInfo(*current_senders, stream_id, sender_id);
+ if (!sender_info) {
+ current_senders->push_back(RtpSenderInfo(stream_id, sender_id, ssrc));
+ rtp_manager()->OnLocalSenderAdded(current_senders->back(), media_type);
+ }
+ }
+}
+
+void SdpOfferAnswerHandler::UpdateRemoteSendersList(
+ const cricket::StreamParamsVec& streams,
+ bool default_sender_needed,
+ cricket::MediaType media_type,
+ StreamCollection* new_streams) {
+ TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::UpdateRemoteSendersList");
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ RTC_DCHECK(!IsUnifiedPlan());
+
+ std::vector<RtpSenderInfo>* current_senders =
+ rtp_manager()->GetRemoteSenderInfos(media_type);
+
+ // Find removed senders. I.e., senders where the sender id or ssrc don't match
+ // the new StreamParam.
+ for (auto sender_it = current_senders->begin();
+ sender_it != current_senders->end();
+ /* incremented manually */) {
+ const RtpSenderInfo& info = *sender_it;
+ const cricket::StreamParams* params =
+ cricket::GetStreamBySsrc(streams, info.first_ssrc);
+ std::string params_stream_id;
+ if (params) {
+ params_stream_id =
+ (!params->first_stream_id().empty() ? params->first_stream_id()
+ : kDefaultStreamId);
+ }
+ bool sender_exists = params && params->id == info.sender_id &&
+ params_stream_id == info.stream_id;
+ // If this is a default track, and we still need it, don't remove it.
+ if ((info.stream_id == kDefaultStreamId && default_sender_needed) ||
+ sender_exists) {
+ ++sender_it;
+ } else {
+ rtp_manager()->OnRemoteSenderRemoved(
+ info, remote_streams_->find(info.stream_id), media_type);
+ sender_it = current_senders->erase(sender_it);
+ }
+ }
+
+ // Find new and active senders.
+ for (const cricket::StreamParams& params : streams) {
+ if (!params.has_ssrcs()) {
+ // The remote endpoint has streams, but didn't signal ssrcs. For an active
+ // sender, this means it is coming from a Unified Plan endpoint,so we just
+ // create a default.
+ default_sender_needed = true;
+ break;
+ }
+
+ // `params.id` is the sender id and the stream id uses the first of
+ // `params.stream_ids`. The remote description could come from a Unified
+ // Plan endpoint, with multiple or no stream_ids() signaled. Since this is
+ // not supported in Plan B, we just take the first here and create the
+ // default stream ID if none is specified.
+ const std::string& stream_id =
+ (!params.first_stream_id().empty() ? params.first_stream_id()
+ : kDefaultStreamId);
+ const std::string& sender_id = params.id;
+ uint32_t ssrc = params.first_ssrc();
+
+ rtc::scoped_refptr<MediaStreamInterface> stream(
+ remote_streams_->find(stream_id));
+ if (!stream) {
+ // This is a new MediaStream. Create a new remote MediaStream.
+ stream = MediaStreamProxy::Create(rtc::Thread::Current(),
+ MediaStream::Create(stream_id));
+ remote_streams_->AddStream(stream);
+ new_streams->AddStream(stream);
+ }
+
+ const RtpSenderInfo* sender_info =
+ rtp_manager()->FindSenderInfo(*current_senders, stream_id, sender_id);
+ if (!sender_info) {
+ current_senders->push_back(RtpSenderInfo(stream_id, sender_id, ssrc));
+ rtp_manager()->OnRemoteSenderAdded(current_senders->back(), stream.get(),
+ media_type);
+ }
+ }
+
+ // Add default sender if necessary.
+ if (default_sender_needed) {
+ rtc::scoped_refptr<MediaStreamInterface> default_stream(
+ remote_streams_->find(kDefaultStreamId));
+ if (!default_stream) {
+ // Create the new default MediaStream.
+ default_stream = MediaStreamProxy::Create(
+ rtc::Thread::Current(), MediaStream::Create(kDefaultStreamId));
+ remote_streams_->AddStream(default_stream);
+ new_streams->AddStream(default_stream);
+ }
+ std::string default_sender_id = (media_type == cricket::MEDIA_TYPE_AUDIO)
+ ? kDefaultAudioSenderId
+ : kDefaultVideoSenderId;
+ const RtpSenderInfo* default_sender_info = rtp_manager()->FindSenderInfo(
+ *current_senders, kDefaultStreamId, default_sender_id);
+ if (!default_sender_info) {
+ current_senders->push_back(
+ RtpSenderInfo(kDefaultStreamId, default_sender_id, /*ssrc=*/0));
+ rtp_manager()->OnRemoteSenderAdded(current_senders->back(),
+ default_stream.get(), media_type);
+ }
+ }
+}
+
+void SdpOfferAnswerHandler::EnableSending() {
+ TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::EnableSending");
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ if (!ConfiguredForMedia()) {
+ return;
+ }
+ for (const auto& transceiver : transceivers()->ListInternal()) {
+ cricket::ChannelInterface* channel = transceiver->channel();
+ if (channel) {
+ channel->Enable(true);
+ }
+ }
+}
+
+RTCError SdpOfferAnswerHandler::PushdownMediaDescription(
+ SdpType type,
+ cricket::ContentSource source,
+ const std::map<std::string, const cricket::ContentGroup*>&
+ bundle_groups_by_mid) {
+ TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::PushdownMediaDescription");
+ const SessionDescriptionInterface* sdesc =
+ (source == cricket::CS_LOCAL ? local_description()
+ : remote_description());
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ RTC_DCHECK(sdesc);
+
+ if (ConfiguredForMedia()) {
+ // Note: This will perform a BlockingCall over to the worker thread, which
+ // we'll also do in a loop below.
+ if (!UpdatePayloadTypeDemuxingState(source, bundle_groups_by_mid)) {
+ // Note that this is never expected to fail, since RtpDemuxer doesn't
+ // return an error when changing payload type demux criteria, which is all
+ // this does.
+ LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR,
+ "Failed to update payload type demuxing state.");
+ }
+
+ // Push down the new SDP media section for each audio/video transceiver.
+ auto rtp_transceivers = transceivers()->ListInternal();
+ std::vector<
+ std::pair<cricket::ChannelInterface*, const MediaContentDescription*>>
+ channels;
+ for (const auto& transceiver : rtp_transceivers) {
+ const ContentInfo* content_info =
+ FindMediaSectionForTransceiver(transceiver, sdesc);
+ cricket::ChannelInterface* channel = transceiver->channel();
+ if (!channel || !content_info || content_info->rejected) {
+ continue;
+ }
+ const MediaContentDescription* content_desc =
+ content_info->media_description();
+ if (!content_desc) {
+ continue;
+ }
+
+ transceiver->OnNegotiationUpdate(type, content_desc);
+ channels.push_back(std::make_pair(channel, content_desc));
+ }
+
+ // This for-loop of invokes helps audio impairment during re-negotiations.
+ // One of the causes is that downstairs decoder creation is synchronous at
+ // the moment, and that a decoder is created for each codec listed in the
+ // SDP.
+ //
+ // TODO(bugs.webrtc.org/12840): consider merging the invokes again after
+ // these projects have shipped:
+ // - bugs.webrtc.org/12462
+ // - crbug.com/1157227
+ // - crbug.com/1187289
+ for (const auto& entry : channels) {
+ std::string error;
+ bool success = context_->worker_thread()->BlockingCall([&]() {
+ return (source == cricket::CS_LOCAL)
+ ? entry.first->SetLocalContent(entry.second, type, error)
+ : entry.first->SetRemoteContent(entry.second, type, error);
+ });
+ if (!success) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, error);
+ }
+ }
+ }
+ // Need complete offer/answer with an SCTP m= section before starting SCTP,
+ // according to https://tools.ietf.org/html/draft-ietf-mmusic-sctp-sdp-19
+ if (pc_->sctp_mid() && local_description() && remote_description()) {
+ auto local_sctp_description = cricket::GetFirstSctpDataContentDescription(
+ local_description()->description());
+ auto remote_sctp_description = cricket::GetFirstSctpDataContentDescription(
+ remote_description()->description());
+ if (local_sctp_description && remote_sctp_description) {
+ int max_message_size;
+ // A remote max message size of zero means "any size supported".
+ // We configure the connection with our own max message size.
+ if (remote_sctp_description->max_message_size() == 0) {
+ max_message_size = local_sctp_description->max_message_size();
+ } else {
+ max_message_size =
+ std::min(local_sctp_description->max_message_size(),
+ remote_sctp_description->max_message_size());
+ }
+ pc_->StartSctpTransport(local_sctp_description->port(),
+ remote_sctp_description->port(),
+ max_message_size);
+ }
+ }
+
+ return RTCError::OK();
+}
+
+RTCError SdpOfferAnswerHandler::PushdownTransportDescription(
+ cricket::ContentSource source,
+ SdpType type) {
+ TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::PushdownTransportDescription");
+ RTC_DCHECK_RUN_ON(signaling_thread());
+
+ if (source == cricket::CS_LOCAL) {
+ const SessionDescriptionInterface* sdesc = local_description();
+ RTC_DCHECK(sdesc);
+ return transport_controller_s()->SetLocalDescription(type,
+ sdesc->description());
+ } else {
+ const SessionDescriptionInterface* sdesc = remote_description();
+ RTC_DCHECK(sdesc);
+ return transport_controller_s()->SetRemoteDescription(type,
+ sdesc->description());
+ }
+}
+
+void SdpOfferAnswerHandler::RemoveStoppedTransceivers() {
+ TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::RemoveStoppedTransceivers");
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ // 3.2.10.1: For each transceiver in the connection's set of transceivers
+ // run the following steps:
+ if (!IsUnifiedPlan())
+ return;
+ if (!ConfiguredForMedia()) {
+ return;
+ }
+ // Traverse a copy of the transceiver list.
+ auto transceiver_list = transceivers()->List();
+ for (auto transceiver : transceiver_list) {
+ // 3.2.10.1.1: If transceiver is stopped, associated with an m= section
+ // and the associated m= section is rejected in
+ // connection.[[CurrentLocalDescription]] or
+ // connection.[[CurrentRemoteDescription]], remove the
+ // transceiver from the connection's set of transceivers.
+ if (!transceiver->stopped()) {
+ continue;
+ }
+ const ContentInfo* local_content = FindMediaSectionForTransceiver(
+ transceiver->internal(), local_description());
+ const ContentInfo* remote_content = FindMediaSectionForTransceiver(
+ transceiver->internal(), remote_description());
+ if ((local_content && local_content->rejected) ||
+ (remote_content && remote_content->rejected)) {
+ RTC_LOG(LS_INFO) << "Dissociating transceiver"
+ " since the media section is being recycled.";
+ transceiver->internal()->set_mid(absl::nullopt);
+ transceiver->internal()->set_mline_index(absl::nullopt);
+ } else if (!local_content && !remote_content) {
+ // TODO(bugs.webrtc.org/11973): Consider if this should be removed already
+ // See https://github.com/w3c/webrtc-pc/issues/2576
+ RTC_LOG(LS_INFO)
+ << "Dropping stopped transceiver that was never associated";
+ }
+ transceivers()->Remove(transceiver);
+ }
+}
+
+void SdpOfferAnswerHandler::RemoveUnusedChannels(
+ const SessionDescription* desc) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ if (ConfiguredForMedia()) {
+ // Destroy video channel first since it may have a pointer to the
+ // voice channel.
+ const cricket::ContentInfo* video_info =
+ cricket::GetFirstVideoContent(desc);
+ if (!video_info || video_info->rejected) {
+ rtp_manager()->GetVideoTransceiver()->internal()->ClearChannel();
+ }
+
+ const cricket::ContentInfo* audio_info =
+ cricket::GetFirstAudioContent(desc);
+ if (!audio_info || audio_info->rejected) {
+ rtp_manager()->GetAudioTransceiver()->internal()->ClearChannel();
+ }
+ }
+ const cricket::ContentInfo* data_info = cricket::GetFirstDataContent(desc);
+ if (!data_info) {
+ RTCError error(RTCErrorType::OPERATION_ERROR_WITH_DATA,
+ "No data channel section in the description.");
+ error.set_error_detail(RTCErrorDetailType::DATA_CHANNEL_FAILURE);
+ pc_->DestroyDataChannelTransport(error);
+ } else if (data_info->rejected) {
+ rtc::StringBuilder sb;
+ sb << "Rejected data channel with mid=" << data_info->name << ".";
+
+ RTCError error(RTCErrorType::OPERATION_ERROR_WITH_DATA, sb.Release());
+ error.set_error_detail(RTCErrorDetailType::DATA_CHANNEL_FAILURE);
+ pc_->DestroyDataChannelTransport(error);
+ }
+}
+
+void SdpOfferAnswerHandler::UpdateEndedRemoteMediaStreams() {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ std::vector<rtc::scoped_refptr<MediaStreamInterface>> streams_to_remove;
+ for (size_t i = 0; i < remote_streams_->count(); ++i) {
+ MediaStreamInterface* stream = remote_streams_->at(i);
+ if (stream->GetAudioTracks().empty() && stream->GetVideoTracks().empty()) {
+ streams_to_remove.push_back(
+ rtc::scoped_refptr<MediaStreamInterface>(stream));
+ }
+ }
+
+ for (auto& stream : streams_to_remove) {
+ remote_streams_->RemoveStream(stream.get());
+ pc_->Observer()->OnRemoveStream(std::move(stream));
+ }
+}
+
+bool SdpOfferAnswerHandler::UseCandidatesInRemoteDescription() {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ auto* remote_desc = remote_description();
+ if (!remote_desc) {
+ return true;
+ }
+ bool ret = true;
+
+ for (size_t m = 0; m < remote_desc->number_of_mediasections(); ++m) {
+ const IceCandidateCollection* candidates = remote_desc->candidates(m);
+ for (size_t n = 0; n < candidates->count(); ++n) {
+ const IceCandidateInterface* candidate = candidates->at(n);
+ bool valid = false;
+ if (!ReadyToUseRemoteCandidate(candidate, remote_desc, &valid)) {
+ if (valid) {
+ RTC_LOG(LS_INFO)
+ << "UseCandidatesInRemoteDescription: Not ready to use "
+ "candidate.";
+ }
+ continue;
+ }
+ ret = UseCandidate(candidate);
+ if (!ret) {
+ break;
+ }
+ }
+ }
+ return ret;
+}
+
+bool SdpOfferAnswerHandler::UseCandidate(
+ const IceCandidateInterface* candidate) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+
+ rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
+
+ RTCErrorOr<const cricket::ContentInfo*> result =
+ FindContentInfo(remote_description(), candidate);
+ if (!result.ok())
+ return false;
+
+ const cricket::Candidate& c = candidate->candidate();
+ RTCError error = cricket::VerifyCandidate(c);
+ if (!error.ok()) {
+ RTC_LOG(LS_WARNING) << "Invalid candidate: " << c.ToString();
+ return true;
+ }
+
+ pc_->AddRemoteCandidate(result.value()->name, c);
+
+ return true;
+}
+
+// We need to check the local/remote description for the Transport instead of
+// the session, because a new Transport added during renegotiation may have
+// them unset while the session has them set from the previous negotiation.
+// Not doing so may trigger the auto generation of transport description and
+// mess up DTLS identity information, ICE credential, etc.
+bool SdpOfferAnswerHandler::ReadyToUseRemoteCandidate(
+ const IceCandidateInterface* candidate,
+ const SessionDescriptionInterface* remote_desc,
+ bool* valid) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ *valid = true;
+
+ const SessionDescriptionInterface* current_remote_desc =
+ remote_desc ? remote_desc : remote_description();
+
+ if (!current_remote_desc) {
+ return false;
+ }
+
+ RTCErrorOr<const cricket::ContentInfo*> result =
+ FindContentInfo(current_remote_desc, candidate);
+ if (!result.ok()) {
+ RTC_LOG(LS_ERROR) << "ReadyToUseRemoteCandidate: Invalid candidate. "
+ << result.error().message();
+
+ *valid = false;
+ return false;
+ }
+
+ return true;
+}
+
+RTCErrorOr<const cricket::ContentInfo*> SdpOfferAnswerHandler::FindContentInfo(
+ const SessionDescriptionInterface* description,
+ const IceCandidateInterface* candidate) {
+ if (!candidate->sdp_mid().empty()) {
+ auto& contents = description->description()->contents();
+ auto it = absl::c_find_if(
+ contents, [candidate](const cricket::ContentInfo& content_info) {
+ return content_info.mid() == candidate->sdp_mid();
+ });
+ if (it == contents.end()) {
+ LOG_AND_RETURN_ERROR(
+ RTCErrorType::INVALID_PARAMETER,
+ "Mid " + candidate->sdp_mid() +
+ " specified but no media section with that mid found.");
+ } else {
+ return &*it;
+ }
+ } else if (candidate->sdp_mline_index() >= 0) {
+ size_t mediacontent_index =
+ static_cast<size_t>(candidate->sdp_mline_index());
+ size_t content_size = description->description()->contents().size();
+ if (mediacontent_index < content_size) {
+ return &description->description()->contents()[mediacontent_index];
+ } else {
+ LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_RANGE,
+ "Media line index (" +
+ rtc::ToString(candidate->sdp_mline_index()) +
+ ") out of range (number of mlines: " +
+ rtc::ToString(content_size) + ").");
+ }
+ }
+
+ LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER,
+ "Neither sdp_mline_index nor sdp_mid specified.");
+}
+
+RTCError SdpOfferAnswerHandler::CreateChannels(const SessionDescription& desc) {
+ TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::CreateChannels");
+ // Creating the media channels. Transports should already have been created
+ // at this point.
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ const cricket::ContentInfo* voice = cricket::GetFirstAudioContent(&desc);
+ if (voice && !voice->rejected &&
+ !rtp_manager()->GetAudioTransceiver()->internal()->channel()) {
+ auto error =
+ rtp_manager()->GetAudioTransceiver()->internal()->CreateChannel(
+ voice->name, pc_->call_ptr(), pc_->configuration()->media_config,
+ pc_->SrtpRequired(), pc_->GetCryptoOptions(), audio_options(),
+ video_options(), video_bitrate_allocator_factory_.get(),
+ [&](absl::string_view mid) {
+ RTC_DCHECK_RUN_ON(network_thread());
+ return transport_controller_n()->GetRtpTransport(mid);
+ });
+ if (!error.ok()) {
+ return error;
+ }
+ }
+
+ const cricket::ContentInfo* video = cricket::GetFirstVideoContent(&desc);
+ if (video && !video->rejected &&
+ !rtp_manager()->GetVideoTransceiver()->internal()->channel()) {
+ auto error =
+ rtp_manager()->GetVideoTransceiver()->internal()->CreateChannel(
+ video->name, pc_->call_ptr(), pc_->configuration()->media_config,
+ pc_->SrtpRequired(), pc_->GetCryptoOptions(),
+
+ audio_options(), video_options(),
+ video_bitrate_allocator_factory_.get(), [&](absl::string_view mid) {
+ RTC_DCHECK_RUN_ON(network_thread());
+ return transport_controller_n()->GetRtpTransport(mid);
+ });
+ if (!error.ok()) {
+ return error;
+ }
+ }
+
+ const cricket::ContentInfo* data = cricket::GetFirstDataContent(&desc);
+ if (data && !data->rejected && !pc_->CreateDataChannelTransport(data->name)) {
+ LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR,
+ "Failed to create data channel.");
+ }
+
+ return RTCError::OK();
+}
+
+void SdpOfferAnswerHandler::DestroyMediaChannels() {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ if (!transceivers()) {
+ return;
+ }
+
+ RTC_LOG_THREAD_BLOCK_COUNT();
+
+ // Destroy video channels first since they may have a pointer to a voice
+ // channel.
+ auto list = transceivers()->List();
+ RTC_DCHECK_BLOCK_COUNT_NO_MORE_THAN(0);
+
+ for (const auto& transceiver : list) {
+ if (transceiver->media_type() == cricket::MEDIA_TYPE_VIDEO) {
+ transceiver->internal()->ClearChannel();
+ }
+ }
+ for (const auto& transceiver : list) {
+ if (transceiver->media_type() == cricket::MEDIA_TYPE_AUDIO) {
+ transceiver->internal()->ClearChannel();
+ }
+ }
+}
+
+void SdpOfferAnswerHandler::GenerateMediaDescriptionOptions(
+ const SessionDescriptionInterface* session_desc,
+ RtpTransceiverDirection audio_direction,
+ RtpTransceiverDirection video_direction,
+ absl::optional<size_t>* audio_index,
+ absl::optional<size_t>* video_index,
+ absl::optional<size_t>* data_index,
+ cricket::MediaSessionOptions* session_options) {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ for (const cricket::ContentInfo& content :
+ session_desc->description()->contents()) {
+ if (IsAudioContent(&content)) {
+ // If we already have an audio m= section, reject this extra one.
+ if (*audio_index) {
+ session_options->media_description_options.push_back(
+ cricket::MediaDescriptionOptions(
+ cricket::MEDIA_TYPE_AUDIO, content.name,
+ RtpTransceiverDirection::kInactive, /*stopped=*/true));
+ } else {
+ bool stopped = (audio_direction == RtpTransceiverDirection::kInactive);
+ session_options->media_description_options.push_back(
+ cricket::MediaDescriptionOptions(cricket::MEDIA_TYPE_AUDIO,
+ content.name, audio_direction,
+ stopped));
+ *audio_index = session_options->media_description_options.size() - 1;
+ }
+ session_options->media_description_options.back().header_extensions =
+ media_engine()->voice().GetRtpHeaderExtensions();
+ } else if (IsVideoContent(&content)) {
+ // If we already have an video m= section, reject this extra one.
+ if (*video_index) {
+ session_options->media_description_options.push_back(
+ cricket::MediaDescriptionOptions(
+ cricket::MEDIA_TYPE_VIDEO, content.name,
+ RtpTransceiverDirection::kInactive, /*stopped=*/true));
+ } else {
+ bool stopped = (video_direction == RtpTransceiverDirection::kInactive);
+ session_options->media_description_options.push_back(
+ cricket::MediaDescriptionOptions(cricket::MEDIA_TYPE_VIDEO,
+ content.name, video_direction,
+ stopped));
+ *video_index = session_options->media_description_options.size() - 1;
+ }
+ session_options->media_description_options.back().header_extensions =
+ media_engine()->video().GetRtpHeaderExtensions();
+ } else if (IsUnsupportedContent(&content)) {
+ session_options->media_description_options.push_back(
+ cricket::MediaDescriptionOptions(cricket::MEDIA_TYPE_UNSUPPORTED,
+ content.name,
+ RtpTransceiverDirection::kInactive,
+ /*stopped=*/true));
+ } else {
+ RTC_DCHECK(IsDataContent(&content));
+ // If we already have an data m= section, reject this extra one.
+ if (*data_index) {
+ session_options->media_description_options.push_back(
+ GetMediaDescriptionOptionsForRejectedData(content.name));
+ } else {
+ session_options->media_description_options.push_back(
+ GetMediaDescriptionOptionsForActiveData(content.name));
+ *data_index = session_options->media_description_options.size() - 1;
+ }
+ }
+ }
+}
+
+cricket::MediaDescriptionOptions
+SdpOfferAnswerHandler::GetMediaDescriptionOptionsForActiveData(
+ const std::string& mid) const {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ // Direction for data sections is meaningless, but legacy endpoints might
+ // expect sendrecv.
+ cricket::MediaDescriptionOptions options(cricket::MEDIA_TYPE_DATA, mid,
+ RtpTransceiverDirection::kSendRecv,
+ /*stopped=*/false);
+ return options;
+}
+
+cricket::MediaDescriptionOptions
+SdpOfferAnswerHandler::GetMediaDescriptionOptionsForRejectedData(
+ const std::string& mid) const {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ cricket::MediaDescriptionOptions options(cricket::MEDIA_TYPE_DATA, mid,
+ RtpTransceiverDirection::kInactive,
+ /*stopped=*/true);
+ return options;
+}
+
+bool SdpOfferAnswerHandler::UpdatePayloadTypeDemuxingState(
+ cricket::ContentSource source,
+ const std::map<std::string, const cricket::ContentGroup*>&
+ bundle_groups_by_mid) {
+ TRACE_EVENT0("webrtc",
+ "SdpOfferAnswerHandler::UpdatePayloadTypeDemuxingState");
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ // We may need to delete any created default streams and disable creation of
+ // new ones on the basis of payload type. This is needed to avoid SSRC
+ // collisions in Call's RtpDemuxer, in the case that a transceiver has
+ // created a default stream, and then some other channel gets the SSRC
+ // signaled in the corresponding Unified Plan "m=" section. Specifically, we
+ // need to disable payload type based demuxing when two bundled "m=" sections
+ // are using the same payload type(s). For more context
+ // see https://bugs.chromium.org/p/webrtc/issues/detail?id=11477
+ const SessionDescriptionInterface* sdesc =
+ (source == cricket::CS_LOCAL ? local_description()
+ : remote_description());
+ struct PayloadTypes {
+ std::set<int> audio_payload_types;
+ std::set<int> video_payload_types;
+ bool pt_demuxing_possible_audio = true;
+ bool pt_demuxing_possible_video = true;
+ };
+ std::map<const cricket::ContentGroup*, PayloadTypes> payload_types_by_bundle;
+ // If the MID is missing from *any* receiving m= section, this is set to true.
+ bool mid_header_extension_missing_audio = false;
+ bool mid_header_extension_missing_video = false;
+ for (auto& content_info : sdesc->description()->contents()) {
+ auto it = bundle_groups_by_mid.find(content_info.name);
+ const cricket::ContentGroup* bundle_group =
+ it != bundle_groups_by_mid.end() ? it->second : nullptr;
+ // If this m= section isn't bundled, it's safe to demux by payload type
+ // since other m= sections using the same payload type will also be using
+ // different transports.
+ if (!bundle_group) {
+ continue;
+ }
+ PayloadTypes* payload_types = &payload_types_by_bundle[bundle_group];
+ if (content_info.rejected ||
+ (source == cricket::ContentSource::CS_LOCAL &&
+ !RtpTransceiverDirectionHasRecv(
+ content_info.media_description()->direction())) ||
+ (source == cricket::ContentSource::CS_REMOTE &&
+ !RtpTransceiverDirectionHasSend(
+ content_info.media_description()->direction()))) {
+ // Ignore transceivers that are not receiving.
+ continue;
+ }
+ switch (content_info.media_description()->type()) {
+ case cricket::MediaType::MEDIA_TYPE_AUDIO: {
+ if (!mid_header_extension_missing_audio) {
+ mid_header_extension_missing_audio =
+ !ContentHasHeaderExtension(content_info, RtpExtension::kMidUri);
+ }
+ const cricket::AudioContentDescription* audio_desc =
+ content_info.media_description()->as_audio();
+ for (const cricket::AudioCodec& audio : audio_desc->codecs()) {
+ if (payload_types->audio_payload_types.count(audio.id)) {
+ // Two m= sections are using the same payload type, thus demuxing
+ // by payload type is not possible.
+ payload_types->pt_demuxing_possible_audio = false;
+ }
+ payload_types->audio_payload_types.insert(audio.id);
+ }
+ break;
+ }
+ case cricket::MediaType::MEDIA_TYPE_VIDEO: {
+ if (!mid_header_extension_missing_video) {
+ mid_header_extension_missing_video =
+ !ContentHasHeaderExtension(content_info, RtpExtension::kMidUri);
+ }
+ const cricket::VideoContentDescription* video_desc =
+ content_info.media_description()->as_video();
+ for (const cricket::VideoCodec& video : video_desc->codecs()) {
+ if (payload_types->video_payload_types.count(video.id)) {
+ // Two m= sections are using the same payload type, thus demuxing
+ // by payload type is not possible.
+ payload_types->pt_demuxing_possible_video = false;
+ }
+ payload_types->video_payload_types.insert(video.id);
+ }
+ break;
+ }
+ default:
+ // Ignore data channels.
+ continue;
+ }
+ }
+
+ // In Unified Plan, payload type demuxing is useful for legacy endpoints that
+ // don't support the MID header extension, but it can also cause incorrrect
+ // forwarding of packets when going from one m= section to multiple m=
+ // sections in the same BUNDLE. This only happens if media arrives prior to
+ // negotiation, but this can cause missing video and unsignalled ssrc bugs
+ // severe enough to warrant disabling PT demuxing in such cases. Therefore, if
+ // a MID header extension is present on all m= sections for a given kind
+ // (audio/video) then we use that as an OK to disable payload type demuxing in
+ // BUNDLEs of that kind. However if PT demuxing was ever turned on (e.g. MID
+ // was ever removed on ANY m= section of that kind) then we continue to allow
+ // PT demuxing in order to prevent disabling it in follow-up O/A exchanges and
+ // allowing early media by PT.
+ bool bundled_pt_demux_allowed_audio = !IsUnifiedPlan() ||
+ mid_header_extension_missing_audio ||
+ pt_demuxing_has_been_used_audio_;
+ bool bundled_pt_demux_allowed_video = !IsUnifiedPlan() ||
+ mid_header_extension_missing_video ||
+ pt_demuxing_has_been_used_video_;
+
+ // Gather all updates ahead of time so that all channels can be updated in a
+ // single BlockingCall; necessary due to thread guards.
+ std::vector<std::pair<bool, cricket::ChannelInterface*>> channels_to_update;
+ for (const auto& transceiver : transceivers()->ListInternal()) {
+ cricket::ChannelInterface* channel = transceiver->channel();
+ const ContentInfo* content =
+ FindMediaSectionForTransceiver(transceiver, sdesc);
+ if (!channel || !content) {
+ continue;
+ }
+
+ const cricket::MediaType media_type = channel->media_type();
+ if (media_type != cricket::MediaType::MEDIA_TYPE_AUDIO &&
+ media_type != cricket::MediaType::MEDIA_TYPE_VIDEO) {
+ continue;
+ }
+
+ RtpTransceiverDirection local_direction =
+ content->media_description()->direction();
+ if (source == cricket::CS_REMOTE) {
+ local_direction = RtpTransceiverDirectionReversed(local_direction);
+ }
+
+ auto bundle_it = bundle_groups_by_mid.find(channel->mid());
+ const cricket::ContentGroup* bundle_group =
+ bundle_it != bundle_groups_by_mid.end() ? bundle_it->second : nullptr;
+ bool pt_demux_enabled = RtpTransceiverDirectionHasRecv(local_direction);
+ if (media_type == cricket::MediaType::MEDIA_TYPE_AUDIO) {
+ pt_demux_enabled &=
+ !bundle_group ||
+ (bundled_pt_demux_allowed_audio &&
+ payload_types_by_bundle[bundle_group].pt_demuxing_possible_audio);
+ if (pt_demux_enabled) {
+ pt_demuxing_has_been_used_audio_ = true;
+ }
+ } else {
+ RTC_DCHECK_EQ(media_type, cricket::MediaType::MEDIA_TYPE_VIDEO);
+ pt_demux_enabled &=
+ !bundle_group ||
+ (bundled_pt_demux_allowed_video &&
+ payload_types_by_bundle[bundle_group].pt_demuxing_possible_video);
+ if (pt_demux_enabled) {
+ pt_demuxing_has_been_used_video_ = true;
+ }
+ }
+
+ channels_to_update.emplace_back(pt_demux_enabled, transceiver->channel());
+ }
+
+ if (channels_to_update.empty()) {
+ return true;
+ }
+
+ // TODO(bugs.webrtc.org/11993): This BlockingCall() will also block on the
+ // network thread for every demuxer sink that needs to be updated. The demuxer
+ // state needs to be fully (and only) managed on the network thread and once
+ // that's the case, there's no need to stop by on the worker. Ideally we could
+ // also do this without blocking.
+ return context_->worker_thread()->BlockingCall([&channels_to_update]() {
+ for (const auto& it : channels_to_update) {
+ if (!it.second->SetPayloadTypeDemuxingEnabled(it.first)) {
+ // Note that the state has already been irrevocably changed at this
+ // point. Is it useful to stop the loop?
+ return false;
+ }
+ }
+ return true;
+ });
+}
+
+bool SdpOfferAnswerHandler::ConfiguredForMedia() const {
+ return context_->media_engine();
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/sdp_offer_answer.h b/third_party/libwebrtc/pc/sdp_offer_answer.h
new file mode 100644
index 0000000000..8aa7040b16
--- /dev/null
+++ b/third_party/libwebrtc/pc/sdp_offer_answer.h
@@ -0,0 +1,690 @@
+/*
+ * Copyright 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_SDP_OFFER_ANSWER_H_
+#define PC_SDP_OFFER_ANSWER_H_
+
+#include <stddef.h>
+#include <stdint.h>
+
+#include <functional>
+#include <map>
+#include <memory>
+#include <set>
+#include <string>
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "api/audio_options.h"
+#include "api/candidate.h"
+#include "api/jsep.h"
+#include "api/jsep_ice_candidate.h"
+#include "api/media_stream_interface.h"
+#include "api/media_types.h"
+#include "api/peer_connection_interface.h"
+#include "api/rtc_error.h"
+#include "api/rtp_transceiver_direction.h"
+#include "api/rtp_transceiver_interface.h"
+#include "api/scoped_refptr.h"
+#include "api/sequence_checker.h"
+#include "api/set_local_description_observer_interface.h"
+#include "api/set_remote_description_observer_interface.h"
+#include "api/uma_metrics.h"
+#include "api/video/video_bitrate_allocator_factory.h"
+#include "media/base/media_channel.h"
+#include "media/base/stream_params.h"
+#include "p2p/base/port_allocator.h"
+#include "pc/connection_context.h"
+#include "pc/data_channel_controller.h"
+#include "pc/jsep_transport_controller.h"
+#include "pc/media_session.h"
+#include "pc/media_stream_observer.h"
+#include "pc/peer_connection_internal.h"
+#include "pc/rtp_receiver.h"
+#include "pc/rtp_transceiver.h"
+#include "pc/rtp_transmission_manager.h"
+#include "pc/sdp_state_provider.h"
+#include "pc/session_description.h"
+#include "pc/stream_collection.h"
+#include "pc/transceiver_list.h"
+#include "pc/webrtc_session_description_factory.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/operations_chain.h"
+#include "rtc_base/ssl_stream_adapter.h"
+#include "rtc_base/thread.h"
+#include "rtc_base/thread_annotations.h"
+#include "rtc_base/unique_id_generator.h"
+#include "rtc_base/weak_ptr.h"
+
+namespace webrtc {
+
+// SdpOfferAnswerHandler is a component
+// of the PeerConnection object as defined
+// by the PeerConnectionInterface API surface.
+// The class is responsible for the following:
+// - Parsing and interpreting SDP.
+// - Generating offers and answers based on the current state.
+// This class lives on the signaling thread.
+class SdpOfferAnswerHandler : public SdpStateProvider {
+ public:
+ ~SdpOfferAnswerHandler();
+
+ // Creates an SdpOfferAnswerHandler. Modifies dependencies.
+ static std::unique_ptr<SdpOfferAnswerHandler> Create(
+ PeerConnectionSdpMethods* pc,
+ const PeerConnectionInterface::RTCConfiguration& configuration,
+ PeerConnectionDependencies& dependencies,
+ ConnectionContext* context);
+
+ void ResetSessionDescFactory() {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ webrtc_session_desc_factory_.reset();
+ }
+ const WebRtcSessionDescriptionFactory* webrtc_session_desc_factory() const {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ return webrtc_session_desc_factory_.get();
+ }
+
+ // Change signaling state to Closed, and perform appropriate actions.
+ void Close();
+
+ // Called as part of destroying the owning PeerConnection.
+ void PrepareForShutdown();
+
+ // Implementation of SdpStateProvider
+ PeerConnectionInterface::SignalingState signaling_state() const override;
+
+ const SessionDescriptionInterface* local_description() const override;
+ const SessionDescriptionInterface* remote_description() const override;
+ const SessionDescriptionInterface* current_local_description() const override;
+ const SessionDescriptionInterface* current_remote_description()
+ const override;
+ const SessionDescriptionInterface* pending_local_description() const override;
+ const SessionDescriptionInterface* pending_remote_description()
+ const override;
+
+ bool NeedsIceRestart(const std::string& content_name) const override;
+ bool IceRestartPending(const std::string& content_name) const override;
+ absl::optional<rtc::SSLRole> GetDtlsRole(
+ const std::string& mid) const override;
+
+ void RestartIce();
+
+ // JSEP01
+ void CreateOffer(
+ CreateSessionDescriptionObserver* observer,
+ const PeerConnectionInterface::RTCOfferAnswerOptions& options);
+ void CreateAnswer(
+ CreateSessionDescriptionObserver* observer,
+ const PeerConnectionInterface::RTCOfferAnswerOptions& options);
+
+ void SetLocalDescription(
+ std::unique_ptr<SessionDescriptionInterface> desc,
+ rtc::scoped_refptr<SetLocalDescriptionObserverInterface> observer);
+ void SetLocalDescription(
+ rtc::scoped_refptr<SetLocalDescriptionObserverInterface> observer);
+ void SetLocalDescription(SetSessionDescriptionObserver* observer,
+ SessionDescriptionInterface* desc);
+ void SetLocalDescription(SetSessionDescriptionObserver* observer);
+
+ void SetRemoteDescription(
+ std::unique_ptr<SessionDescriptionInterface> desc,
+ rtc::scoped_refptr<SetRemoteDescriptionObserverInterface> observer);
+ void SetRemoteDescription(SetSessionDescriptionObserver* observer,
+ SessionDescriptionInterface* desc);
+
+ PeerConnectionInterface::RTCConfiguration GetConfiguration();
+ RTCError SetConfiguration(
+ const PeerConnectionInterface::RTCConfiguration& configuration);
+ bool AddIceCandidate(const IceCandidateInterface* candidate);
+ void AddIceCandidate(std::unique_ptr<IceCandidateInterface> candidate,
+ std::function<void(RTCError)> callback);
+ bool RemoveIceCandidates(const std::vector<cricket::Candidate>& candidates);
+ // Adds a locally generated candidate to the local description.
+ void AddLocalIceCandidate(const JsepIceCandidate* candidate);
+ void RemoveLocalIceCandidates(
+ const std::vector<cricket::Candidate>& candidates);
+ bool ShouldFireNegotiationNeededEvent(uint32_t event_id);
+
+ bool AddStream(MediaStreamInterface* local_stream);
+ void RemoveStream(MediaStreamInterface* local_stream);
+
+ absl::optional<bool> is_caller() const;
+ bool HasNewIceCredentials();
+ void UpdateNegotiationNeeded();
+ void AllocateSctpSids();
+ // Based on the negotiation state, guess what the SSLRole might be without
+ // directly getting the information from the transport.
+ // This is used for allocating stream ids for data channels.
+ // See also `InternalDataChannelInit::fallback_ssl_role`.
+ absl::optional<rtc::SSLRole> GuessSslRole() const;
+
+ // Destroys all media BaseChannels.
+ void DestroyMediaChannels();
+
+ rtc::scoped_refptr<StreamCollectionInterface> local_streams();
+ rtc::scoped_refptr<StreamCollectionInterface> remote_streams();
+
+ bool initial_offerer() {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ if (initial_offerer_) {
+ return *initial_offerer_;
+ }
+ return false;
+ }
+
+ private:
+ class RemoteDescriptionOperation;
+ class ImplicitCreateSessionDescriptionObserver;
+
+ friend class ImplicitCreateSessionDescriptionObserver;
+ class SetSessionDescriptionObserverAdapter;
+
+ friend class SetSessionDescriptionObserverAdapter;
+
+ enum class SessionError {
+ kNone, // No error.
+ kContent, // Error in BaseChannel SetLocalContent/SetRemoteContent.
+ kTransport, // Error from the underlying transport.
+ };
+
+ // Represents the [[LocalIceCredentialsToReplace]] internal slot in the spec.
+ // It makes the next CreateOffer() produce new ICE credentials even if
+ // RTCOfferAnswerOptions::ice_restart is false.
+ // https://w3c.github.io/webrtc-pc/#dfn-localufragstoreplace
+ // TODO(hbos): When JsepTransportController/JsepTransport supports rollback,
+ // move this type of logic to JsepTransportController/JsepTransport.
+ class LocalIceCredentialsToReplace;
+
+ // Only called by the Create() function.
+ explicit SdpOfferAnswerHandler(PeerConnectionSdpMethods* pc,
+ ConnectionContext* context);
+ // Called from the `Create()` function. Can only be called
+ // once. Modifies dependencies.
+ void Initialize(
+ const PeerConnectionInterface::RTCConfiguration& configuration,
+ PeerConnectionDependencies& dependencies,
+ ConnectionContext* context);
+
+ rtc::Thread* signaling_thread() const;
+ rtc::Thread* network_thread() const;
+ // Non-const versions of local_description()/remote_description(), for use
+ // internally.
+ SessionDescriptionInterface* mutable_local_description()
+ RTC_RUN_ON(signaling_thread()) {
+ return pending_local_description_ ? pending_local_description_.get()
+ : current_local_description_.get();
+ }
+ SessionDescriptionInterface* mutable_remote_description()
+ RTC_RUN_ON(signaling_thread()) {
+ return pending_remote_description_ ? pending_remote_description_.get()
+ : current_remote_description_.get();
+ }
+
+ // Synchronous implementations of SetLocalDescription/SetRemoteDescription
+ // that return an RTCError instead of invoking a callback.
+ RTCError ApplyLocalDescription(
+ std::unique_ptr<SessionDescriptionInterface> desc,
+ const std::map<std::string, const cricket::ContentGroup*>&
+ bundle_groups_by_mid);
+ void ApplyRemoteDescription(
+ std::unique_ptr<RemoteDescriptionOperation> operation);
+
+ RTCError ReplaceRemoteDescription(
+ std::unique_ptr<SessionDescriptionInterface> desc,
+ SdpType sdp_type,
+ std::unique_ptr<SessionDescriptionInterface>* replaced_description)
+ RTC_RUN_ON(signaling_thread());
+
+ // Part of ApplyRemoteDescription steps specific to Unified Plan.
+ void ApplyRemoteDescriptionUpdateTransceiverState(SdpType sdp_type);
+
+ // Part of ApplyRemoteDescription steps specific to plan b.
+ void PlanBUpdateSendersAndReceivers(
+ const cricket::ContentInfo* audio_content,
+ const cricket::AudioContentDescription* audio_desc,
+ const cricket::ContentInfo* video_content,
+ const cricket::VideoContentDescription* video_desc);
+
+ // Implementation of the offer/answer exchange operations. These are chained
+ // onto the `operations_chain_` when the public CreateOffer(), CreateAnswer(),
+ // SetLocalDescription() and SetRemoteDescription() methods are invoked.
+ void DoCreateOffer(
+ const PeerConnectionInterface::RTCOfferAnswerOptions& options,
+ rtc::scoped_refptr<CreateSessionDescriptionObserver> observer);
+ void DoCreateAnswer(
+ const PeerConnectionInterface::RTCOfferAnswerOptions& options,
+ rtc::scoped_refptr<CreateSessionDescriptionObserver> observer);
+ void DoSetLocalDescription(
+ std::unique_ptr<SessionDescriptionInterface> desc,
+ rtc::scoped_refptr<SetLocalDescriptionObserverInterface> observer);
+ void DoSetRemoteDescription(
+ std::unique_ptr<RemoteDescriptionOperation> operation);
+
+ // Called after a DoSetRemoteDescription operation completes.
+ void SetRemoteDescriptionPostProcess(bool was_answer)
+ RTC_RUN_ON(signaling_thread());
+
+ // Update the state, signaling if necessary.
+ void ChangeSignalingState(
+ PeerConnectionInterface::SignalingState signaling_state);
+
+ RTCError UpdateSessionState(
+ SdpType type,
+ cricket::ContentSource source,
+ const cricket::SessionDescription* description,
+ const std::map<std::string, const cricket::ContentGroup*>&
+ bundle_groups_by_mid);
+
+ bool IsUnifiedPlan() const RTC_RUN_ON(signaling_thread());
+
+ // Signals from MediaStreamObserver.
+ void OnAudioTrackAdded(AudioTrackInterface* track,
+ MediaStreamInterface* stream)
+ RTC_RUN_ON(signaling_thread());
+ void OnAudioTrackRemoved(AudioTrackInterface* track,
+ MediaStreamInterface* stream)
+ RTC_RUN_ON(signaling_thread());
+ void OnVideoTrackAdded(VideoTrackInterface* track,
+ MediaStreamInterface* stream)
+ RTC_RUN_ON(signaling_thread());
+ void OnVideoTrackRemoved(VideoTrackInterface* track,
+ MediaStreamInterface* stream)
+ RTC_RUN_ON(signaling_thread());
+
+ // | desc_type | is the type of the description that caused the rollback.
+ RTCError Rollback(SdpType desc_type);
+ void OnOperationsChainEmpty();
+
+ // Runs the algorithm **set the associated remote streams** specified in
+ // https://w3c.github.io/webrtc-pc/#set-associated-remote-streams.
+ void SetAssociatedRemoteStreams(
+ rtc::scoped_refptr<RtpReceiverInternal> receiver,
+ const std::vector<std::string>& stream_ids,
+ std::vector<rtc::scoped_refptr<MediaStreamInterface>>* added_streams,
+ std::vector<rtc::scoped_refptr<MediaStreamInterface>>* removed_streams);
+
+ bool CheckIfNegotiationIsNeeded();
+ void GenerateNegotiationNeededEvent();
+ // Helper method which verifies SDP.
+ RTCError ValidateSessionDescription(
+ const SessionDescriptionInterface* sdesc,
+ cricket::ContentSource source,
+ const std::map<std::string, const cricket::ContentGroup*>&
+ bundle_groups_by_mid) RTC_RUN_ON(signaling_thread());
+
+ // Updates the local RtpTransceivers according to the JSEP rules. Called as
+ // part of setting the local/remote description.
+ RTCError UpdateTransceiversAndDataChannels(
+ cricket::ContentSource source,
+ const SessionDescriptionInterface& new_session,
+ const SessionDescriptionInterface* old_local_description,
+ const SessionDescriptionInterface* old_remote_description,
+ const std::map<std::string, const cricket::ContentGroup*>&
+ bundle_groups_by_mid);
+
+ // Associate the given transceiver according to the JSEP rules.
+ RTCErrorOr<
+ rtc::scoped_refptr<RtpTransceiverProxyWithInternal<RtpTransceiver>>>
+ AssociateTransceiver(cricket::ContentSource source,
+ SdpType type,
+ size_t mline_index,
+ const cricket::ContentInfo& content,
+ const cricket::ContentInfo* old_local_content,
+ const cricket::ContentInfo* old_remote_content)
+ RTC_RUN_ON(signaling_thread());
+
+ // Returns the media section in the given session description that is
+ // associated with the RtpTransceiver. Returns null if none found or this
+ // RtpTransceiver is not associated. Logic varies depending on the
+ // SdpSemantics specified in the configuration.
+ const cricket::ContentInfo* FindMediaSectionForTransceiver(
+ const RtpTransceiver* transceiver,
+ const SessionDescriptionInterface* sdesc) const;
+
+ // Either creates or destroys the transceiver's BaseChannel according to the
+ // given media section.
+ RTCError UpdateTransceiverChannel(
+ rtc::scoped_refptr<RtpTransceiverProxyWithInternal<RtpTransceiver>>
+ transceiver,
+ const cricket::ContentInfo& content,
+ const cricket::ContentGroup* bundle_group) RTC_RUN_ON(signaling_thread());
+
+ // Either creates or destroys the local data channel according to the given
+ // media section.
+ RTCError UpdateDataChannelTransport(cricket::ContentSource source,
+ const cricket::ContentInfo& content,
+ const cricket::ContentGroup* bundle_group)
+ RTC_RUN_ON(signaling_thread());
+ // Check if a call to SetLocalDescription is acceptable with a session
+ // description of the given type.
+ bool ExpectSetLocalDescription(SdpType type);
+ // Check if a call to SetRemoteDescription is acceptable with a session
+ // description of the given type.
+ bool ExpectSetRemoteDescription(SdpType type);
+
+ // The offer/answer machinery assumes the media section MID is present and
+ // unique. To support legacy end points that do not supply a=mid lines, this
+ // method will modify the session description to add MIDs generated according
+ // to the SDP semantics.
+ void FillInMissingRemoteMids(cricket::SessionDescription* remote_description);
+
+ // Returns an RtpTransceiver, if available, that can be used to receive the
+ // given media type according to JSEP rules.
+ rtc::scoped_refptr<RtpTransceiverProxyWithInternal<RtpTransceiver>>
+ FindAvailableTransceiverToReceive(cricket::MediaType media_type) const;
+
+ // Returns a MediaSessionOptions struct with options decided by `options`,
+ // the local MediaStreams and DataChannels.
+ void GetOptionsForOffer(const PeerConnectionInterface::RTCOfferAnswerOptions&
+ offer_answer_options,
+ cricket::MediaSessionOptions* session_options);
+ void GetOptionsForPlanBOffer(
+ const PeerConnectionInterface::RTCOfferAnswerOptions&
+ offer_answer_options,
+ cricket::MediaSessionOptions* session_options)
+ RTC_RUN_ON(signaling_thread());
+ void GetOptionsForUnifiedPlanOffer(
+ const PeerConnectionInterface::RTCOfferAnswerOptions&
+ offer_answer_options,
+ cricket::MediaSessionOptions* session_options)
+ RTC_RUN_ON(signaling_thread());
+
+ // Returns a MediaSessionOptions struct with options decided by
+ // `constraints`, the local MediaStreams and DataChannels.
+ void GetOptionsForAnswer(const PeerConnectionInterface::RTCOfferAnswerOptions&
+ offer_answer_options,
+ cricket::MediaSessionOptions* session_options);
+ void GetOptionsForPlanBAnswer(
+ const PeerConnectionInterface::RTCOfferAnswerOptions&
+ offer_answer_options,
+ cricket::MediaSessionOptions* session_options)
+ RTC_RUN_ON(signaling_thread());
+ void GetOptionsForUnifiedPlanAnswer(
+ const PeerConnectionInterface::RTCOfferAnswerOptions&
+ offer_answer_options,
+ cricket::MediaSessionOptions* session_options)
+ RTC_RUN_ON(signaling_thread());
+
+ const char* SessionErrorToString(SessionError error) const;
+ std::string GetSessionErrorMsg();
+ // Returns the last error in the session. See the enum above for details.
+ SessionError session_error() const {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ return session_error_;
+ }
+ const std::string& session_error_desc() const { return session_error_desc_; }
+
+ RTCError HandleLegacyOfferOptions(
+ const PeerConnectionInterface::RTCOfferAnswerOptions& options);
+ void RemoveRecvDirectionFromReceivingTransceiversOfType(
+ cricket::MediaType media_type) RTC_RUN_ON(signaling_thread());
+ void AddUpToOneReceivingTransceiverOfType(cricket::MediaType media_type);
+
+ std::vector<
+ rtc::scoped_refptr<RtpTransceiverProxyWithInternal<RtpTransceiver>>>
+ GetReceivingTransceiversOfType(cricket::MediaType media_type)
+ RTC_RUN_ON(signaling_thread());
+
+ // Runs the algorithm specified in
+ // https://w3c.github.io/webrtc-pc/#process-remote-track-removal
+ // This method will update the following lists:
+ // `remove_list` is the list of transceivers for which the receiving track is
+ // being removed.
+ // `removed_streams` is the list of streams which no longer have a receiving
+ // track so should be removed.
+ void ProcessRemovalOfRemoteTrack(
+ const rtc::scoped_refptr<RtpTransceiverProxyWithInternal<RtpTransceiver>>
+ transceiver,
+ std::vector<rtc::scoped_refptr<RtpTransceiverInterface>>* remove_list,
+ std::vector<rtc::scoped_refptr<MediaStreamInterface>>* removed_streams);
+
+ void RemoveRemoteStreamsIfEmpty(
+ const std::vector<rtc::scoped_refptr<MediaStreamInterface>>&
+ remote_streams,
+ std::vector<rtc::scoped_refptr<MediaStreamInterface>>* removed_streams);
+
+ // Remove all local and remote senders of type `media_type`.
+ // Called when a media type is rejected (m-line set to port 0).
+ void RemoveSenders(cricket::MediaType media_type);
+
+ // Loops through the vector of `streams` and finds added and removed
+ // StreamParams since last time this method was called.
+ // For each new or removed StreamParam, OnLocalSenderSeen or
+ // OnLocalSenderRemoved is invoked.
+ void UpdateLocalSenders(const std::vector<cricket::StreamParams>& streams,
+ cricket::MediaType media_type);
+
+ // Makes sure a MediaStreamTrack is created for each StreamParam in `streams`,
+ // and existing MediaStreamTracks are removed if there is no corresponding
+ // StreamParam. If `default_track_needed` is true, a default MediaStreamTrack
+ // is created if it doesn't exist; if false, it's removed if it exists.
+ // `media_type` is the type of the `streams` and can be either audio or video.
+ // If a new MediaStream is created it is added to `new_streams`.
+ void UpdateRemoteSendersList(
+ const std::vector<cricket::StreamParams>& streams,
+ bool default_track_needed,
+ cricket::MediaType media_type,
+ StreamCollection* new_streams);
+
+ // Enables media channels to allow sending of media.
+ // This enables media to flow on all configured audio/video channels.
+ void EnableSending();
+ // Push the media parts of the local or remote session description
+ // down to all of the channels, and start SCTP if needed.
+ RTCError PushdownMediaDescription(
+ SdpType type,
+ cricket::ContentSource source,
+ const std::map<std::string, const cricket::ContentGroup*>&
+ bundle_groups_by_mid);
+
+ RTCError PushdownTransportDescription(cricket::ContentSource source,
+ SdpType type);
+ // Helper function to remove stopped transceivers.
+ void RemoveStoppedTransceivers();
+ // Deletes the corresponding channel of contents that don't exist in `desc`.
+ // `desc` can be null. This means that all channels are deleted.
+ void RemoveUnusedChannels(const cricket::SessionDescription* desc);
+
+ // Finds remote MediaStreams without any tracks and removes them from
+ // `remote_streams_` and notifies the observer that the MediaStreams no longer
+ // exist.
+ void UpdateEndedRemoteMediaStreams();
+
+ // Uses all remote candidates in the currently set remote_description().
+ // If no remote description is currently set (nullptr), the return value will
+ // be true. If `UseCandidate()` fails for any candidate in the remote
+ // description, the return value will be false.
+ bool UseCandidatesInRemoteDescription();
+ // Uses `candidate` in this session.
+ bool UseCandidate(const IceCandidateInterface* candidate);
+ // Returns true if we are ready to push down the remote candidate.
+ // `remote_desc` is the new remote description, or NULL if the current remote
+ // description should be used. Output `valid` is true if the candidate media
+ // index is valid.
+ bool ReadyToUseRemoteCandidate(const IceCandidateInterface* candidate,
+ const SessionDescriptionInterface* remote_desc,
+ bool* valid);
+
+ RTCErrorOr<const cricket::ContentInfo*> FindContentInfo(
+ const SessionDescriptionInterface* description,
+ const IceCandidateInterface* candidate) RTC_RUN_ON(signaling_thread());
+
+ // Functions for dealing with transports.
+ // Note that cricket code uses the term "channel" for what other code
+ // refers to as "transport".
+
+ // Allocates media channels based on the `desc`. If `desc` doesn't have
+ // the BUNDLE option, this method will disable BUNDLE in PortAllocator.
+ // This method will also delete any existing media channels before creating.
+ RTCError CreateChannels(const cricket::SessionDescription& desc);
+
+ // Generates MediaDescriptionOptions for the `session_opts` based on existing
+ // local description or remote description.
+ void GenerateMediaDescriptionOptions(
+ const SessionDescriptionInterface* session_desc,
+ RtpTransceiverDirection audio_direction,
+ RtpTransceiverDirection video_direction,
+ absl::optional<size_t>* audio_index,
+ absl::optional<size_t>* video_index,
+ absl::optional<size_t>* data_index,
+ cricket::MediaSessionOptions* session_options);
+
+ // Generates the active MediaDescriptionOptions for the local data channel
+ // given the specified MID.
+ cricket::MediaDescriptionOptions GetMediaDescriptionOptionsForActiveData(
+ const std::string& mid) const;
+
+ // Generates the rejected MediaDescriptionOptions for the local data channel
+ // given the specified MID.
+ cricket::MediaDescriptionOptions GetMediaDescriptionOptionsForRejectedData(
+ const std::string& mid) const;
+
+ // Based on number of transceivers per media type, enabled or disable
+ // payload type based demuxing in the affected channels.
+ bool UpdatePayloadTypeDemuxingState(
+ cricket::ContentSource source,
+ const std::map<std::string, const cricket::ContentGroup*>&
+ bundle_groups_by_mid);
+
+ // Updates the error state, signaling if necessary.
+ void SetSessionError(SessionError error, const std::string& error_desc);
+
+ // Implements AddIceCandidate without reporting usage, but returns the
+ // particular success/error value that should be reported (and can be utilized
+ // for other purposes).
+ AddIceCandidateResult AddIceCandidateInternal(
+ const IceCandidateInterface* candidate);
+
+ // ==================================================================
+ // Access to pc_ variables
+ cricket::MediaEngineInterface* media_engine() const;
+ TransceiverList* transceivers();
+ const TransceiverList* transceivers() const;
+ DataChannelController* data_channel_controller();
+ const DataChannelController* data_channel_controller() const;
+ cricket::PortAllocator* port_allocator();
+ const cricket::PortAllocator* port_allocator() const;
+ RtpTransmissionManager* rtp_manager();
+ const RtpTransmissionManager* rtp_manager() const;
+ JsepTransportController* transport_controller_s()
+ RTC_RUN_ON(signaling_thread());
+ const JsepTransportController* transport_controller_s() const
+ RTC_RUN_ON(signaling_thread());
+ JsepTransportController* transport_controller_n()
+ RTC_RUN_ON(network_thread());
+ const JsepTransportController* transport_controller_n() const
+ RTC_RUN_ON(network_thread());
+ // ===================================================================
+ const cricket::AudioOptions& audio_options() { return audio_options_; }
+ const cricket::VideoOptions& video_options() { return video_options_; }
+ bool ConfiguredForMedia() const;
+
+ PeerConnectionSdpMethods* const pc_;
+ ConnectionContext* const context_;
+
+ std::unique_ptr<WebRtcSessionDescriptionFactory> webrtc_session_desc_factory_
+ RTC_GUARDED_BY(signaling_thread());
+
+ std::unique_ptr<SessionDescriptionInterface> current_local_description_
+ RTC_GUARDED_BY(signaling_thread());
+ std::unique_ptr<SessionDescriptionInterface> pending_local_description_
+ RTC_GUARDED_BY(signaling_thread());
+ std::unique_ptr<SessionDescriptionInterface> current_remote_description_
+ RTC_GUARDED_BY(signaling_thread());
+ std::unique_ptr<SessionDescriptionInterface> pending_remote_description_
+ RTC_GUARDED_BY(signaling_thread());
+
+ PeerConnectionInterface::SignalingState signaling_state_
+ RTC_GUARDED_BY(signaling_thread()) = PeerConnectionInterface::kStable;
+
+ // Whether this peer is the caller. Set when the local description is applied.
+ absl::optional<bool> is_caller_ RTC_GUARDED_BY(signaling_thread());
+
+ // Streams added via AddStream.
+ const rtc::scoped_refptr<StreamCollection> local_streams_
+ RTC_GUARDED_BY(signaling_thread());
+ // Streams created as a result of SetRemoteDescription.
+ const rtc::scoped_refptr<StreamCollection> remote_streams_
+ RTC_GUARDED_BY(signaling_thread());
+
+ std::vector<std::unique_ptr<MediaStreamObserver>> stream_observers_
+ RTC_GUARDED_BY(signaling_thread());
+
+ // The operations chain is used by the offer/answer exchange methods to ensure
+ // they are executed in the right order. For example, if
+ // SetRemoteDescription() is invoked while CreateOffer() is still pending, the
+ // SRD operation will not start until CreateOffer() has completed. See
+ // https://w3c.github.io/webrtc-pc/#dfn-operations-chain.
+ rtc::scoped_refptr<rtc::OperationsChain> operations_chain_
+ RTC_GUARDED_BY(signaling_thread());
+
+ // One PeerConnection has only one RTCP CNAME.
+ // https://tools.ietf.org/html/draft-ietf-rtcweb-rtp-usage-26#section-4.9
+ const std::string rtcp_cname_;
+
+ // MIDs will be generated using this generator which will keep track of
+ // all the MIDs that have been seen over the life of the PeerConnection.
+ rtc::UniqueStringGenerator mid_generator_ RTC_GUARDED_BY(signaling_thread());
+
+ // List of content names for which the remote side triggered an ICE restart.
+ std::set<std::string> pending_ice_restarts_
+ RTC_GUARDED_BY(signaling_thread());
+
+ std::unique_ptr<LocalIceCredentialsToReplace>
+ local_ice_credentials_to_replace_ RTC_GUARDED_BY(signaling_thread());
+
+ bool remote_peer_supports_msid_ RTC_GUARDED_BY(signaling_thread()) = false;
+ bool is_negotiation_needed_ RTC_GUARDED_BY(signaling_thread()) = false;
+ uint32_t negotiation_needed_event_id_ RTC_GUARDED_BY(signaling_thread()) = 0;
+ bool update_negotiation_needed_on_empty_chain_
+ RTC_GUARDED_BY(signaling_thread()) = false;
+ // If PT demuxing is successfully negotiated one time we will allow PT
+ // demuxing for the rest of the session so that PT-based apps default to PT
+ // demuxing in follow-up O/A exchanges.
+ bool pt_demuxing_has_been_used_audio_ RTC_GUARDED_BY(signaling_thread()) =
+ false;
+ bool pt_demuxing_has_been_used_video_ RTC_GUARDED_BY(signaling_thread()) =
+ false;
+
+ // In Unified Plan, if we encounter remote SDP that does not contain an a=msid
+ // line we create and use a stream with a random ID for our receivers. This is
+ // to support legacy endpoints that do not support the a=msid attribute (as
+ // opposed to streamless tracks with "a=msid:-").
+ rtc::scoped_refptr<MediaStreamInterface> missing_msid_default_stream_
+ RTC_GUARDED_BY(signaling_thread());
+
+ SessionError session_error_ RTC_GUARDED_BY(signaling_thread()) =
+ SessionError::kNone;
+ std::string session_error_desc_ RTC_GUARDED_BY(signaling_thread());
+
+ // Member variables for caching global options.
+ cricket::AudioOptions audio_options_ RTC_GUARDED_BY(signaling_thread());
+ cricket::VideoOptions video_options_ RTC_GUARDED_BY(signaling_thread());
+
+ // A video bitrate allocator factory.
+ // This can be injected using the PeerConnectionDependencies,
+ // or else the CreateBuiltinVideoBitrateAllocatorFactory() will be called.
+ // Note that one can still choose to override this in a MediaEngine
+ // if one wants too.
+ std::unique_ptr<webrtc::VideoBitrateAllocatorFactory>
+ video_bitrate_allocator_factory_ RTC_GUARDED_BY(signaling_thread());
+
+ // Whether we are the initial offerer on the association. This
+ // determines the SSL role.
+ absl::optional<bool> initial_offerer_ RTC_GUARDED_BY(signaling_thread());
+
+ rtc::WeakPtrFactory<SdpOfferAnswerHandler> weak_ptr_factory_
+ RTC_GUARDED_BY(signaling_thread());
+};
+
+} // namespace webrtc
+
+#endif // PC_SDP_OFFER_ANSWER_H_
diff --git a/third_party/libwebrtc/pc/sdp_offer_answer_unittest.cc b/third_party/libwebrtc/pc/sdp_offer_answer_unittest.cc
new file mode 100644
index 0000000000..94ceff10ac
--- /dev/null
+++ b/third_party/libwebrtc/pc/sdp_offer_answer_unittest.cc
@@ -0,0 +1,1100 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <memory>
+#include <utility>
+#include <vector>
+
+#include "absl/strings/str_replace.h"
+#include "api/audio/audio_mixer.h"
+#include "api/audio_codecs/builtin_audio_decoder_factory.h"
+#include "api/audio_codecs/builtin_audio_encoder_factory.h"
+#include "api/create_peerconnection_factory.h"
+#include "api/media_types.h"
+#include "api/peer_connection_interface.h"
+#include "api/rtp_transceiver_interface.h"
+#include "api/scoped_refptr.h"
+#include "api/video_codecs/video_decoder_factory_template.h"
+#include "api/video_codecs/video_decoder_factory_template_dav1d_adapter.h"
+#include "api/video_codecs/video_decoder_factory_template_libvpx_vp8_adapter.h"
+#include "api/video_codecs/video_decoder_factory_template_libvpx_vp9_adapter.h"
+#include "api/video_codecs/video_decoder_factory_template_open_h264_adapter.h"
+#include "api/video_codecs/video_encoder_factory_template.h"
+#include "api/video_codecs/video_encoder_factory_template_libaom_av1_adapter.h"
+#include "api/video_codecs/video_encoder_factory_template_libvpx_vp8_adapter.h"
+#include "api/video_codecs/video_encoder_factory_template_libvpx_vp9_adapter.h"
+#include "api/video_codecs/video_encoder_factory_template_open_h264_adapter.h"
+#include "modules/audio_device/include/audio_device.h"
+#include "modules/audio_processing/include/audio_processing.h"
+#include "p2p/base/port_allocator.h"
+#include "pc/peer_connection_wrapper.h"
+#include "pc/session_description.h"
+#include "pc/test/fake_audio_capture_module.h"
+#include "pc/test/mock_peer_connection_observers.h"
+#include "rtc_base/rtc_certificate_generator.h"
+#include "rtc_base/thread.h"
+#include "system_wrappers/include/metrics.h"
+#include "test/gtest.h"
+
+// This file contains unit tests that relate to the behavior of the
+// SdpOfferAnswer module.
+// Tests are writen as integration tests with PeerConnection, since the
+// behaviors are still linked so closely that it is hard to test them in
+// isolation.
+
+namespace webrtc {
+
+using RTCConfiguration = PeerConnectionInterface::RTCConfiguration;
+
+namespace {
+
+std::unique_ptr<rtc::Thread> CreateAndStartThread() {
+ auto thread = rtc::Thread::Create();
+ thread->Start();
+ return thread;
+}
+
+} // namespace
+
+class SdpOfferAnswerTest : public ::testing::Test {
+ public:
+ SdpOfferAnswerTest()
+ // Note: We use a PeerConnectionFactory with a distinct
+ // signaling thread, so that thread handling can be tested.
+ : signaling_thread_(CreateAndStartThread()),
+ pc_factory_(CreatePeerConnectionFactory(
+ nullptr,
+ nullptr,
+ signaling_thread_.get(),
+ FakeAudioCaptureModule::Create(),
+ CreateBuiltinAudioEncoderFactory(),
+ CreateBuiltinAudioDecoderFactory(),
+ std::make_unique<
+ VideoEncoderFactoryTemplate<LibvpxVp8EncoderTemplateAdapter,
+ LibvpxVp9EncoderTemplateAdapter,
+ OpenH264EncoderTemplateAdapter,
+ LibaomAv1EncoderTemplateAdapter>>(),
+ std::make_unique<
+ VideoDecoderFactoryTemplate<LibvpxVp8DecoderTemplateAdapter,
+ LibvpxVp9DecoderTemplateAdapter,
+ OpenH264DecoderTemplateAdapter,
+ Dav1dDecoderTemplateAdapter>>(),
+ nullptr /* audio_mixer */,
+ nullptr /* audio_processing */)) {
+ webrtc::metrics::Reset();
+ }
+
+ std::unique_ptr<PeerConnectionWrapper> CreatePeerConnection() {
+ RTCConfiguration config;
+ config.sdp_semantics = SdpSemantics::kUnifiedPlan;
+ return CreatePeerConnection(config);
+ }
+
+ std::unique_ptr<PeerConnectionWrapper> CreatePeerConnection(
+ const RTCConfiguration& config) {
+ auto observer = std::make_unique<MockPeerConnectionObserver>();
+ auto result = pc_factory_->CreatePeerConnectionOrError(
+ config, PeerConnectionDependencies(observer.get()));
+ EXPECT_TRUE(result.ok());
+ observer->SetPeerConnectionInterface(result.value().get());
+ return std::make_unique<PeerConnectionWrapper>(
+ pc_factory_, result.MoveValue(), std::move(observer));
+ }
+
+ protected:
+ std::unique_ptr<rtc::Thread> signaling_thread_;
+ rtc::scoped_refptr<PeerConnectionFactoryInterface> pc_factory_;
+
+ private:
+ rtc::AutoThread main_thread_;
+};
+
+TEST_F(SdpOfferAnswerTest, OnTrackReturnsProxiedObject) {
+ auto caller = CreatePeerConnection();
+ auto callee = CreatePeerConnection();
+
+ auto audio_transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO);
+
+ ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get()));
+ // Verify that caller->observer->OnTrack() has been called with a
+ // proxied transceiver object.
+ ASSERT_EQ(callee->observer()->on_track_transceivers_.size(), 1u);
+ auto transceiver = callee->observer()->on_track_transceivers_[0];
+ // Since the signaling thread is not the current thread,
+ // this will DCHECK if the transceiver is not proxied.
+ transceiver->stopped();
+}
+
+TEST_F(SdpOfferAnswerTest, BundleRejectsCodecCollisionsAudioVideo) {
+ auto pc = CreatePeerConnection();
+ std::string sdp =
+ "v=0\r\n"
+ "o=- 0 3 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "a=group:BUNDLE 0 1\r\n"
+ "a=fingerprint:sha-1 "
+ "4A:AD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B:19:E5:7C:AB\r\n"
+ "a=setup:actpass\r\n"
+ "a=ice-ufrag:ETEn\r\n"
+ "a=ice-pwd:OtSK0WpNtpUjkY4+86js7Z/l\r\n"
+ "m=audio 9 UDP/TLS/RTP/SAVPF 111\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=rtcp-mux\r\n"
+ "a=sendonly\r\n"
+ "a=mid:0\r\n"
+ "a=rtpmap:111 opus/48000/2\r\n"
+ "m=video 9 UDP/TLS/RTP/SAVPF 111\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=rtcp-mux\r\n"
+ "a=sendonly\r\n"
+ "a=mid:1\r\n"
+ "a=rtpmap:111 H264/90000\r\n"
+ "a=fmtp:111 "
+ "level-asymmetry-allowed=1;packetization-mode=0;profile-level-id="
+ "42e01f\r\n";
+
+ auto desc = CreateSessionDescription(SdpType::kOffer, sdp);
+ ASSERT_NE(desc, nullptr);
+ RTCError error;
+ pc->SetRemoteDescription(std::move(desc), &error);
+ // There is no error yet but the metrics counter will increase.
+ EXPECT_TRUE(error.ok());
+ EXPECT_METRIC_EQ(
+ 1, webrtc::metrics::NumEvents(
+ "WebRTC.PeerConnection.ValidBundledPayloadTypes", false));
+
+ // Tolerate codec collisions in rejected m-lines.
+ pc = CreatePeerConnection();
+ auto rejected_offer = CreateSessionDescription(
+ SdpType::kOffer,
+ absl::StrReplaceAll(sdp, {{"m=video 9 ", "m=video 0 "}}));
+ pc->SetRemoteDescription(std::move(rejected_offer), &error);
+ EXPECT_TRUE(error.ok());
+ EXPECT_METRIC_EQ(1,
+ webrtc::metrics::NumEvents(
+ "WebRTC.PeerConnection.ValidBundledPayloadTypes", true));
+}
+
+TEST_F(SdpOfferAnswerTest, BundleRejectsCodecCollisionsVideoFmtp) {
+ auto pc = CreatePeerConnection();
+ std::string sdp =
+ "v=0\r\n"
+ "o=- 0 3 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "a=group:BUNDLE 0 1\r\n"
+ "a=fingerprint:sha-1 "
+ "4A:AD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B:19:E5:7C:AB\r\n"
+ "a=setup:actpass\r\n"
+ "a=ice-ufrag:ETEn\r\n"
+ "a=ice-pwd:OtSK0WpNtpUjkY4+86js7Z/l\r\n"
+ "m=video 9 UDP/TLS/RTP/SAVPF 111\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=rtcp-mux\r\n"
+ "a=sendonly\r\n"
+ "a=mid:0\r\n"
+ "a=rtpmap:111 H264/90000\r\n"
+ "a=fmtp:111 "
+ "level-asymmetry-allowed=1;packetization-mode=0;profile-level-id="
+ "42e01f\r\n"
+ "m=video 9 UDP/TLS/RTP/SAVPF 111\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=rtcp-mux\r\n"
+ "a=sendonly\r\n"
+ "a=mid:1\r\n"
+ "a=rtpmap:111 H264/90000\r\n"
+ "a=fmtp:111 "
+ "level-asymmetry-allowed=1;packetization-mode=1;profile-level-id="
+ "42e01f\r\n";
+
+ auto desc = CreateSessionDescription(SdpType::kOffer, sdp);
+ ASSERT_NE(desc, nullptr);
+ RTCError error;
+ pc->SetRemoteDescription(std::move(desc), &error);
+ EXPECT_TRUE(error.ok());
+ EXPECT_METRIC_EQ(
+ 1, webrtc::metrics::NumEvents(
+ "WebRTC.PeerConnection.ValidBundledPayloadTypes", false));
+}
+
+TEST_F(SdpOfferAnswerTest, BundleCodecCollisionInDifferentBundlesAllowed) {
+ auto pc = CreatePeerConnection();
+ std::string sdp =
+ "v=0\r\n"
+ "o=- 0 3 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "a=group:BUNDLE 0\r\n"
+ "a=group:BUNDLE 1\r\n"
+ "a=fingerprint:sha-1 "
+ "4A:AD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B:19:E5:7C:AB\r\n"
+ "a=setup:actpass\r\n"
+ "a=ice-ufrag:ETEn\r\n"
+ "a=ice-pwd:OtSK0WpNtpUjkY4+86js7Z/l\r\n"
+ "m=video 9 UDP/TLS/RTP/SAVPF 111\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=rtcp-mux\r\n"
+ "a=sendonly\r\n"
+ "a=mid:0\r\n"
+ "a=rtpmap:111 H264/90000\r\n"
+ "a=fmtp:111 "
+ "level-asymmetry-allowed=1;packetization-mode=0;profile-level-id="
+ "42e01f\r\n"
+ "m=video 9 UDP/TLS/RTP/SAVPF 111\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=rtcp-mux\r\n"
+ "a=sendonly\r\n"
+ "a=mid:1\r\n"
+ "a=rtpmap:111 H264/90000\r\n"
+ "a=fmtp:111 "
+ "level-asymmetry-allowed=1;packetization-mode=1;profile-level-id="
+ "42e01f\r\n";
+
+ auto desc = CreateSessionDescription(SdpType::kOffer, sdp);
+ ASSERT_NE(desc, nullptr);
+ RTCError error;
+ pc->SetRemoteDescription(std::move(desc), &error);
+ EXPECT_TRUE(error.ok());
+ EXPECT_METRIC_EQ(
+ 0, webrtc::metrics::NumEvents(
+ "WebRTC.PeerConnection.ValidBundledPayloadTypes", false));
+}
+
+TEST_F(SdpOfferAnswerTest, BundleMeasuresHeaderExtensionIdCollision) {
+ auto pc = CreatePeerConnection();
+ std::string sdp =
+ "v=0\r\n"
+ "o=- 0 3 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "a=group:BUNDLE 0 1\r\n"
+ "a=fingerprint:sha-1 "
+ "4A:AD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B:19:E5:7C:AB\r\n"
+ "a=setup:actpass\r\n"
+ "a=ice-ufrag:ETEn\r\n"
+ "a=ice-pwd:OtSK0WpNtpUjkY4+86js7Z/l\r\n"
+ "m=audio 9 UDP/TLS/RTP/SAVPF 111\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=rtcp-mux\r\n"
+ "a=sendonly\r\n"
+ "a=mid:0\r\n"
+ "a=rtpmap:111 opus/48000/2\r\n"
+ "a=extmap:3 "
+ "http://www.ietf.org/id/"
+ "draft-holmer-rmcat-transport-wide-cc-extensions-01\r\n"
+ "m=video 9 UDP/TLS/RTP/SAVPF 112\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=rtcp-mux\r\n"
+ "a=sendonly\r\n"
+ "a=mid:1\r\n"
+ "a=rtpmap:112 VP8/90000\r\n"
+ "a=extmap:3 "
+ "http://www.ietf.org/id/"
+ "draft-holmer-rmcat-transport-wide-cc-extensions-01\r\n";
+ auto desc = CreateSessionDescription(SdpType::kOffer, sdp);
+ ASSERT_NE(desc, nullptr);
+ RTCError error;
+ pc->SetRemoteDescription(std::move(desc), &error);
+ EXPECT_TRUE(error.ok());
+}
+
+// extmap:3 is used with two different URIs which is not allowed.
+TEST_F(SdpOfferAnswerTest, BundleRejectsHeaderExtensionIdCollision) {
+ auto pc = CreatePeerConnection();
+ std::string sdp =
+ "v=0\r\n"
+ "o=- 0 3 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "a=group:BUNDLE 0 1\r\n"
+ "a=fingerprint:sha-1 "
+ "4A:AD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B:19:E5:7C:AB\r\n"
+ "a=setup:actpass\r\n"
+ "a=ice-ufrag:ETEn\r\n"
+ "a=ice-pwd:OtSK0WpNtpUjkY4+86js7Z/l\r\n"
+ "m=audio 9 UDP/TLS/RTP/SAVPF 111\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=rtcp-mux\r\n"
+ "a=sendonly\r\n"
+ "a=mid:0\r\n"
+ "a=rtpmap:111 opus/48000/2\r\n"
+ "a=extmap:3 "
+ "http://www.ietf.org/id/"
+ "draft-holmer-rmcat-transport-wide-cc-extensions-01\r\n"
+ "m=video 9 UDP/TLS/RTP/SAVPF 112\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=rtcp-mux\r\n"
+ "a=sendonly\r\n"
+ "a=mid:1\r\n"
+ "a=rtpmap:112 VP8/90000\r\n"
+ "a=extmap:3 urn:3gpp:video-orientation\r\n";
+ auto desc = CreateSessionDescription(SdpType::kOffer, sdp);
+ ASSERT_NE(desc, nullptr);
+ RTCError error;
+ pc->SetRemoteDescription(std::move(desc), &error);
+ EXPECT_FALSE(error.ok());
+ EXPECT_EQ(error.type(), RTCErrorType::INVALID_PARAMETER);
+}
+
+// transport-wide cc is negotiated with two different ids 3 and 4.
+// This is not a good idea but tolerable.
+TEST_F(SdpOfferAnswerTest, BundleAcceptsDifferentIdsForSameExtension) {
+ auto pc = CreatePeerConnection();
+ std::string sdp =
+ "v=0\r\n"
+ "o=- 0 3 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "a=group:BUNDLE 0 1\r\n"
+ "a=fingerprint:sha-1 "
+ "4A:AD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B:19:E5:7C:AB\r\n"
+ "a=setup:actpass\r\n"
+ "a=ice-ufrag:ETEn\r\n"
+ "a=ice-pwd:OtSK0WpNtpUjkY4+86js7Z/l\r\n"
+ "m=audio 9 UDP/TLS/RTP/SAVPF 111\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=rtcp-mux\r\n"
+ "a=sendonly\r\n"
+ "a=mid:0\r\n"
+ "a=rtpmap:111 opus/48000/2\r\n"
+ "a=extmap:3 "
+ "http://www.ietf.org/id/"
+ "draft-holmer-rmcat-transport-wide-cc-extensions-01\r\n"
+ "m=video 9 UDP/TLS/RTP/SAVPF 112\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=rtcp-mux\r\n"
+ "a=sendonly\r\n"
+ "a=mid:1\r\n"
+ "a=rtpmap:112 VP8/90000\r\n"
+ "a=extmap:4 "
+ "http://www.ietf.org/id/"
+ "draft-holmer-rmcat-transport-wide-cc-extensions-01\r\n";
+ auto desc = CreateSessionDescription(SdpType::kOffer, sdp);
+ ASSERT_NE(desc, nullptr);
+ RTCError error;
+ pc->SetRemoteDescription(std::move(desc), &error);
+ EXPECT_TRUE(error.ok());
+}
+
+TEST_F(SdpOfferAnswerTest, LargeMidsAreRejected) {
+ auto pc = CreatePeerConnection();
+ std::string sdp =
+ "v=0\r\n"
+ "o=- 0 3 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "a=fingerprint:sha-1 "
+ "4A:AD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B:19:E5:7C:AB\r\n"
+ "a=setup:actpass\r\n"
+ "a=ice-ufrag:ETEn\r\n"
+ "a=ice-pwd:OtSK0WpNtpUjkY4+86js7Z/l\r\n"
+ "m=video 9 UDP/TLS/RTP/SAVPF 111\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=rtcp-mux\r\n"
+ "a=sendonly\r\n"
+ "a=rtpmap:111 VP8/90000\r\n"
+ "a=mid:01234567890123456\r\n";
+ auto desc = CreateSessionDescription(SdpType::kOffer, sdp);
+ ASSERT_NE(desc, nullptr);
+ RTCError error;
+ pc->SetRemoteDescription(std::move(desc), &error);
+ EXPECT_FALSE(error.ok());
+ EXPECT_EQ(error.type(), RTCErrorType::INVALID_PARAMETER);
+}
+
+TEST_F(SdpOfferAnswerTest, RollbackPreservesAddTrackMid) {
+ std::string sdp =
+ "v=0\r\n"
+ "o=- 4131505339648218884 3 IN IP4 **-----**\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "a=ice-lite\r\n"
+ "a=msid-semantic: WMS 100030878598094:4Qs1PjbLM32RK5u3\r\n"
+ "a=ice-ufrag:zGWFZ+fVXDeN6UoI/136\r\n"
+ "a=ice-pwd:9AUNgUqRNI5LSIrC1qFD2iTR\r\n"
+ "a=fingerprint:sha-256 "
+ "AD:52:52:E0:B1:37:34:21:0E:15:8E:B7:56:56:7B:B4:39:0E:6D:1C:F5:84:A7:EE:"
+ "B5:27:3E:30:B1:7D:69:42\r\n"
+ "a=extmap:1 urn:ietf:params:rtp-hdrext:ssrc-audio-level\r\n"
+ "a=extmap:4 urn:ietf:params:rtp-hdrext:sdes:mid\r\n"
+ "a=group:BUNDLE 0 1\r\n"
+ "m=audio 40005 UDP/TLS/RTP/SAVPF 111\r\n"
+ "a=rtpmap:111 opus/48000/2\r\n"
+ "a=fmtp:111 "
+ "maxaveragebitrate=20000;maxplaybackrate=16000;minptime=10;usedtx=1;"
+ "useinbandfec=1;stereo=0\r\n"
+ "a=rtcp-fb:111 nack\r\n"
+ "a=setup:passive\r\n"
+ "a=mid:0\r\n"
+ "a=msid:- 75156ebd-e705-4da1-920e-2dac39794dfd\r\n"
+ "a=ptime:60\r\n"
+ "a=recvonly\r\n"
+ "a=rtcp-mux\r\n"
+ "m=audio 40005 UDP/TLS/RTP/SAVPF 111\r\n"
+ "a=rtpmap:111 opus/48000/2\r\n"
+ "a=fmtp:111 "
+ "maxaveragebitrate=20000;maxplaybackrate=16000;minptime=10;usedtx=1;"
+ "useinbandfec=1;stereo=0\r\n"
+ "a=rtcp-fb:111 nack\r\n"
+ "a=setup:passive\r\n"
+ "a=mid:1\r\n"
+ "a=msid:100030878598094:4Qs1PjbLM32RK5u3 9695447562408476674\r\n"
+ "a=ptime:60\r\n"
+ "a=sendonly\r\n"
+ "a=ssrc:2565730539 cname:100030878598094:4Qs1PjbLM32RK5u3\r\n"
+ "a=rtcp-mux\r\n";
+ auto pc = CreatePeerConnection();
+ auto audio_track = pc->AddAudioTrack("audio_track", {});
+ auto first_transceiver = pc->pc()->GetTransceivers()[0];
+ EXPECT_FALSE(first_transceiver->mid().has_value());
+ auto desc = CreateSessionDescription(SdpType::kOffer, sdp);
+ ASSERT_NE(desc, nullptr);
+ RTCError error;
+ ASSERT_TRUE(pc->SetRemoteDescription(std::move(desc)));
+ pc->CreateAnswerAndSetAsLocal();
+ auto saved_mid = first_transceiver->mid();
+ EXPECT_TRUE(saved_mid.has_value());
+ auto offer_before_rollback = pc->CreateOfferAndSetAsLocal();
+ EXPECT_EQ(saved_mid, first_transceiver->mid());
+ auto rollback = pc->CreateRollback();
+ ASSERT_NE(rollback, nullptr);
+ ASSERT_TRUE(pc->SetLocalDescription(std::move(rollback)));
+ EXPECT_EQ(saved_mid, first_transceiver->mid());
+ auto offer2 = pc->CreateOfferAndSetAsLocal();
+ ASSERT_NE(offer2, nullptr);
+ EXPECT_EQ(saved_mid, first_transceiver->mid());
+}
+
+#ifdef WEBRTC_HAVE_SCTP
+
+TEST_F(SdpOfferAnswerTest, RejectedDataChannelsDoNotGetReoffered) {
+ auto pc = CreatePeerConnection();
+ EXPECT_TRUE(pc->pc()->CreateDataChannelOrError("dc", nullptr).ok());
+ EXPECT_TRUE(pc->CreateOfferAndSetAsLocal());
+ auto mid = pc->pc()->local_description()->description()->contents()[0].mid();
+
+ // An answer that rejects the datachannel content.
+ std::string sdp =
+ "v=0\r\n"
+ "o=- 4131505339648218884 3 IN IP4 **-----**\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "a=ice-ufrag:zGWFZ+fVXDeN6UoI/136\r\n"
+ "a=ice-pwd:9AUNgUqRNI5LSIrC1qFD2iTR\r\n"
+ "a=fingerprint:sha-256 "
+ "AD:52:52:E0:B1:37:34:21:0E:15:8E:B7:56:56:7B:B4:39:0E:6D:1C:F5:84:A7:EE:"
+ "B5:27:3E:30:B1:7D:69:42\r\n"
+ "a=setup:passive\r\n"
+ "m=application 0 UDP/DTLS/SCTP webrtc-datachannel\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=sctp-port:5000\r\n"
+ "a=max-message-size:262144\r\n"
+ "a=mid:" +
+ mid + "\r\n";
+ auto answer = CreateSessionDescription(SdpType::kAnswer, sdp);
+ ASSERT_TRUE(pc->SetRemoteDescription(std::move(answer)));
+ // The subsequent offer should not recycle the m-line since the existing data
+ // channel is closed.
+ auto offer = pc->CreateOffer();
+ const auto& offer_contents = offer->description()->contents();
+ ASSERT_EQ(offer_contents.size(), 1u);
+ EXPECT_EQ(offer_contents[0].mid(), mid);
+ EXPECT_EQ(offer_contents[0].rejected, true);
+}
+
+TEST_F(SdpOfferAnswerTest, RejectedDataChannelsDoGetReofferedWhenActive) {
+ auto pc = CreatePeerConnection();
+ EXPECT_TRUE(pc->pc()->CreateDataChannelOrError("dc", nullptr).ok());
+ EXPECT_TRUE(pc->CreateOfferAndSetAsLocal());
+ auto mid = pc->pc()->local_description()->description()->contents()[0].mid();
+
+ // An answer that rejects the datachannel content.
+ std::string sdp =
+ "v=0\r\n"
+ "o=- 4131505339648218884 3 IN IP4 **-----**\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "a=ice-ufrag:zGWFZ+fVXDeN6UoI/136\r\n"
+ "a=ice-pwd:9AUNgUqRNI5LSIrC1qFD2iTR\r\n"
+ "a=fingerprint:sha-256 "
+ "AD:52:52:E0:B1:37:34:21:0E:15:8E:B7:56:56:7B:B4:39:0E:6D:1C:F5:84:A7:EE:"
+ "B5:27:3E:30:B1:7D:69:42\r\n"
+ "a=setup:passive\r\n"
+ "m=application 0 UDP/DTLS/SCTP webrtc-datachannel\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=sctp-port:5000\r\n"
+ "a=max-message-size:262144\r\n"
+ "a=mid:" +
+ mid + "\r\n";
+ auto answer = CreateSessionDescription(SdpType::kAnswer, sdp);
+ ASSERT_TRUE(pc->SetRemoteDescription(std::move(answer)));
+
+ // The subsequent offer should recycle the m-line when there is a new data
+ // channel.
+ EXPECT_TRUE(pc->pc()->CreateDataChannelOrError("dc2", nullptr).ok());
+ EXPECT_TRUE(pc->pc()->ShouldFireNegotiationNeededEvent(
+ pc->observer()->latest_negotiation_needed_event()));
+
+ auto offer = pc->CreateOffer();
+ const auto& offer_contents = offer->description()->contents();
+ ASSERT_EQ(offer_contents.size(), 1u);
+ EXPECT_EQ(offer_contents[0].mid(), mid);
+ EXPECT_EQ(offer_contents[0].rejected, false);
+}
+
+#endif // WEBRTC_HAVE_SCTP
+
+TEST_F(SdpOfferAnswerTest, SimulcastAnswerWithNoRidsIsRejected) {
+ auto pc = CreatePeerConnection();
+
+ RtpTransceiverInit init;
+ RtpEncodingParameters rid1;
+ rid1.rid = "1";
+ init.send_encodings.push_back(rid1);
+ RtpEncodingParameters rid2;
+ rid2.rid = "2";
+ init.send_encodings.push_back(rid2);
+
+ auto transceiver = pc->AddTransceiver(cricket::MEDIA_TYPE_VIDEO, init);
+ EXPECT_TRUE(pc->CreateOfferAndSetAsLocal());
+ auto mid = pc->pc()->local_description()->description()->contents()[0].mid();
+
+ // A SDP answer with simulcast but without mid/rid extensions.
+ std::string sdp =
+ "v=0\r\n"
+ "o=- 4131505339648218884 3 IN IP4 **-----**\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "a=ice-ufrag:zGWFZ+fVXDeN6UoI/136\r\n"
+ "a=ice-pwd:9AUNgUqRNI5LSIrC1qFD2iTR\r\n"
+ "a=fingerprint:sha-256 "
+ "AD:52:52:E0:B1:37:34:21:0E:15:8E:B7:56:56:7B:B4:39:0E:6D:1C:F5:84:A7:EE:"
+ "B5:27:3E:30:B1:7D:69:42\r\n"
+ "a=setup:passive\r\n"
+ "m=video 9 UDP/TLS/RTP/SAVPF 96\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=rtcp:9 IN IP4 0.0.0.0\r\n"
+ "a=mid:" +
+ mid +
+ "\r\n"
+ "a=recvonly\r\n"
+ "a=rtcp-mux\r\n"
+ "a=rtcp-rsize\r\n"
+ "a=rtpmap:96 VP8/90000\r\n"
+ "a=rid:1 recv\r\n"
+ "a=rid:2 recv\r\n"
+ "a=simulcast:recv 1;2\r\n";
+ std::string extensions =
+ "a=extmap:9 urn:ietf:params:rtp-hdrext:sdes:mid\r\n"
+ "a=extmap:10 urn:ietf:params:rtp-hdrext:sdes:rtp-stream-id\r\n";
+ auto answer = CreateSessionDescription(SdpType::kAnswer, sdp);
+ EXPECT_FALSE(pc->SetRemoteDescription(std::move(answer)));
+
+ auto answer_with_extensions =
+ CreateSessionDescription(SdpType::kAnswer, sdp + extensions);
+ EXPECT_TRUE(pc->SetRemoteDescription(std::move(answer_with_extensions)));
+
+ // Tolerate the lack of mid/rid extensions in rejected m-lines.
+ EXPECT_TRUE(pc->CreateOfferAndSetAsLocal());
+ auto rejected_answer = CreateSessionDescription(
+ SdpType::kAnswer,
+ absl::StrReplaceAll(sdp, {{"m=video 9 ", "m=video 0 "}}));
+ EXPECT_TRUE(pc->SetRemoteDescription(std::move(rejected_answer)));
+}
+
+TEST_F(SdpOfferAnswerTest, ExpectAllSsrcsSpecifiedInSsrcGroupFid) {
+ auto pc = CreatePeerConnection();
+ std::string sdp =
+ "v=0\r\n"
+ "o=- 0 3 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "a=group:BUNDLE 0\r\n"
+ "a=fingerprint:sha-1 "
+ "4A:AD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B:19:E5:7C:AB\r\n"
+ "a=setup:actpass\r\n"
+ "a=ice-ufrag:ETEn\r\n"
+ "a=ice-pwd:OtSK0WpNtpUjkY4+86js7Z/l\r\n"
+ "m=video 9 UDP/TLS/RTP/SAVPF 96 97\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=rtcp-mux\r\n"
+ "a=sendonly\r\n"
+ "a=mid:0\r\n"
+ "a=rtpmap:96 H264/90000\r\n"
+ "a=fmtp:96 "
+ "level-asymmetry-allowed=1;packetization-mode=1;profile-level-id="
+ "42e01f\r\n"
+ "a=rtpmap:97 rtx/90000\r\n"
+ "a=fmtp:97 apt=96\r\n"
+ "a=ssrc-group:FID 1 2\r\n"
+ "a=ssrc:1 cname:test\r\n";
+ auto offer = CreateSessionDescription(SdpType::kOffer, sdp);
+ RTCError error;
+ pc->SetRemoteDescription(std::move(offer), &error);
+ EXPECT_FALSE(error.ok());
+ EXPECT_EQ(error.type(), RTCErrorType::INVALID_PARAMETER);
+}
+
+TEST_F(SdpOfferAnswerTest, ExpectAllSsrcsSpecifiedInSsrcGroupFecFr) {
+ auto pc = CreatePeerConnection();
+ std::string sdp =
+ "v=0\r\n"
+ "o=- 0 3 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "a=group:BUNDLE 0\r\n"
+ "a=fingerprint:sha-1 "
+ "4A:AD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B:19:E5:7C:AB\r\n"
+ "a=setup:actpass\r\n"
+ "a=ice-ufrag:ETEn\r\n"
+ "a=ice-pwd:OtSK0WpNtpUjkY4+86js7Z/l\r\n"
+ "m=video 9 UDP/TLS/RTP/SAVPF 96 98\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=rtcp-mux\r\n"
+ "a=sendonly\r\n"
+ "a=mid:0\r\n"
+ "a=rtpmap:96 H264/90000\r\n"
+ "a=fmtp:96 "
+ "level-asymmetry-allowed=1;packetization-mode=1;profile-level-id="
+ "42e01f\r\n"
+ "a=rtpmap:98 flexfec-03/90000\r\n"
+ "a=fmtp:98 repair-window=10000000\r\n"
+ "a=ssrc-group:FEC-FR 1 2\r\n"
+ "a=ssrc:1 cname:test\r\n";
+ auto offer = CreateSessionDescription(SdpType::kOffer, sdp);
+ RTCError error;
+ pc->SetRemoteDescription(std::move(offer), &error);
+ EXPECT_FALSE(error.ok());
+ EXPECT_EQ(error.type(), RTCErrorType::INVALID_PARAMETER);
+}
+
+TEST_F(SdpOfferAnswerTest, ExpectTwoSsrcsInSsrcGroupFid) {
+ auto pc = CreatePeerConnection();
+ std::string sdp =
+ "v=0\r\n"
+ "o=- 0 3 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "a=group:BUNDLE 0\r\n"
+ "a=fingerprint:sha-1 "
+ "4A:AD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B:19:E5:7C:AB\r\n"
+ "a=setup:actpass\r\n"
+ "a=ice-ufrag:ETEn\r\n"
+ "a=ice-pwd:OtSK0WpNtpUjkY4+86js7Z/l\r\n"
+ "m=video 9 UDP/TLS/RTP/SAVPF 96 97\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=rtcp-mux\r\n"
+ "a=sendonly\r\n"
+ "a=mid:0\r\n"
+ "a=rtpmap:96 H264/90000\r\n"
+ "a=fmtp:96 "
+ "level-asymmetry-allowed=1;packetization-mode=1;profile-level-id="
+ "42e01f\r\n"
+ "a=rtpmap:97 rtx/90000\r\n"
+ "a=fmtp:97 apt=96\r\n"
+ "a=ssrc-group:FID 1 2 3\r\n"
+ "a=ssrc:1 cname:test\r\n"
+ "a=ssrc:2 cname:test\r\n"
+ "a=ssrc:3 cname:test\r\n";
+ auto offer = CreateSessionDescription(SdpType::kOffer, sdp);
+ RTCError error;
+ pc->SetRemoteDescription(std::move(offer), &error);
+ EXPECT_FALSE(error.ok());
+ EXPECT_EQ(error.type(), RTCErrorType::INVALID_PARAMETER);
+}
+
+TEST_F(SdpOfferAnswerTest, ExpectTwoSsrcsInSsrcGroupFecFr) {
+ auto pc = CreatePeerConnection();
+ std::string sdp =
+ "v=0\r\n"
+ "o=- 0 3 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "a=group:BUNDLE 0\r\n"
+ "a=fingerprint:sha-1 "
+ "4A:AD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B:19:E5:7C:AB\r\n"
+ "a=setup:actpass\r\n"
+ "a=ice-ufrag:ETEn\r\n"
+ "a=ice-pwd:OtSK0WpNtpUjkY4+86js7Z/l\r\n"
+ "m=video 9 UDP/TLS/RTP/SAVPF 96 98\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=rtcp-mux\r\n"
+ "a=sendonly\r\n"
+ "a=mid:0\r\n"
+ "a=rtpmap:96 H264/90000\r\n"
+ "a=fmtp:96 "
+ "level-asymmetry-allowed=1;packetization-mode=1;profile-level-id="
+ "42e01f\r\n"
+ "a=rtpmap:98 flexfec-03/90000\r\n"
+ "a=fmtp:98 repair-window=10000000\r\n"
+ "a=ssrc-group:FEC-FR 1 2 3\r\n"
+ "a=ssrc:1 cname:test\r\n"
+ "a=ssrc:2 cname:test\r\n"
+ "a=ssrc:3 cname:test\r\n";
+ auto offer = CreateSessionDescription(SdpType::kOffer, sdp);
+ RTCError error;
+ pc->SetRemoteDescription(std::move(offer), &error);
+ EXPECT_FALSE(error.ok());
+ EXPECT_EQ(error.type(), RTCErrorType::INVALID_PARAMETER);
+}
+
+TEST_F(SdpOfferAnswerTest, ExpectAtMostFourSsrcsInSsrcGroupSIM) {
+ auto pc = CreatePeerConnection();
+ std::string sdp =
+ "v=0\r\n"
+ "o=- 0 3 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "a=group:BUNDLE 0\r\n"
+ "a=fingerprint:sha-1 "
+ "4A:AD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B:19:E5:7C:AB\r\n"
+ "a=setup:actpass\r\n"
+ "a=ice-ufrag:ETEn\r\n"
+ "a=ice-pwd:OtSK0WpNtpUjkY4+86js7Z/l\r\n"
+ "m=video 9 UDP/TLS/RTP/SAVPF 96 97\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=rtcp-mux\r\n"
+ "a=sendonly\r\n"
+ "a=mid:0\r\n"
+ "a=rtpmap:96 H264/90000\r\n"
+ "a=fmtp:96 "
+ "level-asymmetry-allowed=1;packetization-mode=1;profile-level-id="
+ "42e01f\r\n"
+ "a=rtpmap:97 rtx/90000\r\n"
+ "a=fmtp:97 apt=96\r\n"
+ "a=ssrc-group:SIM 1 2 3 4\r\n"
+ "a=ssrc:1 cname:test\r\n"
+ "a=ssrc:2 cname:test\r\n"
+ "a=ssrc:3 cname:test\r\n"
+ "a=ssrc:4 cname:test\r\n";
+ auto offer = CreateSessionDescription(SdpType::kOffer, sdp);
+ RTCError error;
+ pc->SetRemoteDescription(std::move(offer), &error);
+ EXPECT_FALSE(error.ok());
+ EXPECT_EQ(error.type(), RTCErrorType::INVALID_PARAMETER);
+}
+
+TEST_F(SdpOfferAnswerTest, DuplicateSsrcsDisallowedInLocalDescription) {
+ auto pc = CreatePeerConnection();
+ pc->AddAudioTrack("audio_track", {});
+ pc->AddVideoTrack("video_track", {});
+ auto offer = pc->CreateOffer();
+ auto& offer_contents = offer->description()->contents();
+ ASSERT_EQ(offer_contents.size(), 2u);
+ uint32_t second_ssrc = offer_contents[1].media_description()->first_ssrc();
+
+ offer->description()
+ ->contents()[0]
+ .media_description()
+ ->mutable_streams()[0]
+ .ssrcs[0] = second_ssrc;
+ EXPECT_FALSE(pc->SetLocalDescription(std::move(offer)));
+}
+
+TEST_F(SdpOfferAnswerTest,
+ DuplicateSsrcsAcrossMlinesDisallowedInLocalDescriptionTwoSsrc) {
+ auto pc = CreatePeerConnection();
+
+ pc->AddAudioTrack("audio_track", {});
+ pc->AddVideoTrack("video_track", {});
+ auto offer = pc->CreateOffer();
+ auto& offer_contents = offer->description()->contents();
+ ASSERT_EQ(offer_contents.size(), 2u);
+ uint32_t audio_ssrc = offer_contents[0].media_description()->first_ssrc();
+ ASSERT_EQ(offer_contents[1].media_description()->streams().size(), 1u);
+ auto& video_stream = offer->description()
+ ->contents()[1]
+ .media_description()
+ ->mutable_streams()[0];
+ ASSERT_EQ(video_stream.ssrcs.size(), 2u);
+ ASSERT_EQ(video_stream.ssrc_groups.size(), 1u);
+ video_stream.ssrcs[1] = audio_ssrc;
+ video_stream.ssrc_groups[0].ssrcs[1] = audio_ssrc;
+ video_stream.ssrc_groups[0].semantics = cricket::kSimSsrcGroupSemantics;
+ std::string sdp;
+ offer->ToString(&sdp);
+
+ // Trim the last two lines which contain ssrc-specific attributes
+ // that we change/munge above. Guarded with expectation about what
+ // should be removed in case the SDP generation changes.
+ size_t end = sdp.rfind("\r\n");
+ end = sdp.rfind("\r\n", end - 2);
+ end = sdp.rfind("\r\n", end - 2);
+ EXPECT_EQ(sdp.substr(end + 2), "a=ssrc:" + rtc::ToString(audio_ssrc) +
+ " cname:" + video_stream.cname +
+ "\r\n"
+ "a=ssrc:" +
+ rtc::ToString(audio_ssrc) +
+ " msid:- video_track\r\n");
+
+ auto modified_offer =
+ CreateSessionDescription(SdpType::kOffer, sdp.substr(0, end + 2));
+ EXPECT_FALSE(pc->SetLocalDescription(std::move(modified_offer)));
+}
+
+TEST_F(SdpOfferAnswerTest,
+ DuplicateSsrcsAcrossMlinesDisallowedInLocalDescriptionThreeSsrcs) {
+ auto pc = CreatePeerConnection();
+
+ pc->AddAudioTrack("audio_track", {});
+ pc->AddVideoTrack("video_track", {});
+ auto offer = pc->CreateOffer();
+ auto& offer_contents = offer->description()->contents();
+ ASSERT_EQ(offer_contents.size(), 2u);
+ uint32_t audio_ssrc = offer_contents[0].media_description()->first_ssrc();
+ ASSERT_EQ(offer_contents[1].media_description()->streams().size(), 1u);
+ auto& video_stream = offer->description()
+ ->contents()[1]
+ .media_description()
+ ->mutable_streams()[0];
+ ASSERT_EQ(video_stream.ssrcs.size(), 2u);
+ ASSERT_EQ(video_stream.ssrc_groups.size(), 1u);
+ video_stream.ssrcs.push_back(audio_ssrc);
+ video_stream.ssrc_groups[0].ssrcs.push_back(audio_ssrc);
+ video_stream.ssrc_groups[0].semantics = cricket::kSimSsrcGroupSemantics;
+ std::string sdp;
+ offer->ToString(&sdp);
+
+ // Trim the last two lines which contain ssrc-specific attributes
+ // that we change/munge above. Guarded with expectation about what
+ // should be removed in case the SDP generation changes.
+ size_t end = sdp.rfind("\r\n");
+ end = sdp.rfind("\r\n", end - 2);
+ end = sdp.rfind("\r\n", end - 2);
+ EXPECT_EQ(sdp.substr(end + 2), "a=ssrc:" + rtc::ToString(audio_ssrc) +
+ " cname:" + video_stream.cname +
+ "\r\n"
+ "a=ssrc:" +
+ rtc::ToString(audio_ssrc) +
+ " msid:- video_track\r\n");
+
+ auto modified_offer =
+ CreateSessionDescription(SdpType::kOffer, sdp.substr(0, end + 2));
+ EXPECT_FALSE(pc->SetLocalDescription(std::move(modified_offer)));
+}
+
+TEST_F(SdpOfferAnswerTest, AllowOnlyOneSsrcGroupPerSemanticAndPrimarySsrc) {
+ auto pc = CreatePeerConnection();
+
+ pc->AddAudioTrack("audio_track", {});
+ pc->AddVideoTrack("video_track", {});
+ auto offer = pc->CreateOffer();
+ auto& offer_contents = offer->description()->contents();
+ ASSERT_EQ(offer_contents.size(), 2u);
+ uint32_t audio_ssrc = offer_contents[0].media_description()->first_ssrc();
+ ASSERT_EQ(offer_contents[1].media_description()->streams().size(), 1u);
+ auto& video_stream = offer->description()
+ ->contents()[1]
+ .media_description()
+ ->mutable_streams()[0];
+ ASSERT_EQ(video_stream.ssrcs.size(), 2u);
+ ASSERT_EQ(video_stream.ssrc_groups.size(), 1u);
+ video_stream.ssrcs.push_back(audio_ssrc);
+ video_stream.ssrc_groups.push_back(
+ {cricket::kFidSsrcGroupSemantics, {video_stream.ssrcs[0], audio_ssrc}});
+ std::string sdp;
+ offer->ToString(&sdp);
+
+ // Trim the last two lines which contain ssrc-specific attributes
+ // that we change/munge above. Guarded with expectation about what
+ // should be removed in case the SDP generation changes.
+ size_t end = sdp.rfind("\r\n");
+ end = sdp.rfind("\r\n", end - 2);
+ end = sdp.rfind("\r\n", end - 2);
+ EXPECT_EQ(sdp.substr(end + 2), "a=ssrc:" + rtc::ToString(audio_ssrc) +
+ " cname:" + video_stream.cname +
+ "\r\n"
+ "a=ssrc:" +
+ rtc::ToString(audio_ssrc) +
+ " msid:- video_track\r\n");
+
+ auto modified_offer =
+ CreateSessionDescription(SdpType::kOffer, sdp.substr(0, end + 2));
+ EXPECT_FALSE(pc->SetLocalDescription(std::move(modified_offer)));
+}
+
+TEST_F(SdpOfferAnswerTest, OfferWithRtxAndNoMsidIsNotRejected) {
+ auto pc = CreatePeerConnection();
+ std::string sdp =
+ "v=0\r\n"
+ "o=- 0 3 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "a=group:BUNDLE 0\r\n"
+ "a=fingerprint:sha-1 "
+ "4A:AD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B:19:E5:7C:AB\r\n"
+ "a=setup:actpass\r\n"
+ "a=ice-ufrag:ETEn\r\n"
+ "a=ice-pwd:OtSK0WpNtpUjkY4+86js7Z/l\r\n"
+ "m=video 9 UDP/TLS/RTP/SAVPF 96 97\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=rtcp-mux\r\n"
+ "a=sendonly\r\n"
+ "a=mid:0\r\n"
+ // "a=msid:stream obsoletetrack\r\n"
+ "a=rtpmap:96 VP8/90000\r\n"
+ "a=rtpmap:97 rtx/90000\r\n"
+ "a=fmtp:97 apt=96\r\n"
+ "a=ssrc-group:FID 1 2\r\n"
+ "a=ssrc:1 cname:test\r\n"
+ "a=ssrc:2 cname:test\r\n";
+ auto offer = CreateSessionDescription(SdpType::kOffer, sdp);
+ EXPECT_TRUE(pc->SetRemoteDescription(std::move(offer)));
+}
+
+TEST_F(SdpOfferAnswerTest, RejectsAnswerWithInvalidTransport) {
+ auto pc1 = CreatePeerConnection();
+ pc1->AddAudioTrack("audio_track", {});
+ auto pc2 = CreatePeerConnection();
+ pc2->AddAudioTrack("anotheraudio_track", {});
+
+ auto initial_offer = pc1->CreateOfferAndSetAsLocal();
+ ASSERT_EQ(initial_offer->description()->contents().size(), 1u);
+ auto mid = initial_offer->description()->contents()[0].mid();
+
+ EXPECT_TRUE(pc2->SetRemoteDescription(std::move(initial_offer)));
+ auto initial_answer = pc2->CreateAnswerAndSetAsLocal();
+
+ std::string sdp;
+ initial_answer->ToString(&sdp);
+ EXPECT_TRUE(pc1->SetRemoteDescription(std::move(initial_answer)));
+
+ auto transceivers = pc1->pc()->GetTransceivers();
+ ASSERT_EQ(transceivers.size(), 1u);
+ // This stops the only transport.
+ transceivers[0]->StopStandard();
+
+ auto subsequent_offer = pc1->CreateOfferAndSetAsLocal();
+ // But the remote answers with a non-rejected m-line which is not valid.
+ auto bad_answer = CreateSessionDescription(
+ SdpType::kAnswer,
+ absl::StrReplaceAll(sdp, {{"a=group:BUNDLE " + mid + "\r\n", ""}}));
+
+ RTCError error;
+ pc1->SetRemoteDescription(std::move(bad_answer), &error);
+ EXPECT_FALSE(error.ok());
+ EXPECT_EQ(error.type(), RTCErrorType::INVALID_PARAMETER);
+}
+
+TEST_F(SdpOfferAnswerTest, SdpMungingWithInvalidPayloadTypeIsRejected) {
+ auto pc = CreatePeerConnection();
+ pc->AddAudioTrack("audio_track", {});
+
+ auto offer = pc->CreateOffer();
+ ASSERT_EQ(offer->description()->contents().size(), 1u);
+ auto* audio =
+ offer->description()->contents()[0].media_description()->as_audio();
+ ASSERT_GT(audio->codecs().size(), 0u);
+ EXPECT_TRUE(audio->rtcp_mux());
+ auto codecs = audio->codecs();
+ for (int invalid_payload_type = 64; invalid_payload_type < 96;
+ invalid_payload_type++) {
+ codecs[0].id =
+ invalid_payload_type; // The range [64-95] is disallowed with rtcp_mux.
+ audio->set_codecs(codecs);
+ // ASSERT to avoid getting into a bad state.
+ ASSERT_FALSE(pc->SetLocalDescription(offer->Clone()));
+ ASSERT_FALSE(pc->SetRemoteDescription(offer->Clone()));
+ }
+}
+
+// Test variant with boolean order for audio-video and video-audio.
+class SdpOfferAnswerShuffleMediaTypes
+ : public SdpOfferAnswerTest,
+ public testing::WithParamInterface<bool> {
+ public:
+ SdpOfferAnswerShuffleMediaTypes() : SdpOfferAnswerTest() {}
+};
+
+TEST_P(SdpOfferAnswerShuffleMediaTypes,
+ RecyclingWithDifferentKindAndSameMidFailsAnswer) {
+ bool audio_first = GetParam();
+ auto pc1 = CreatePeerConnection();
+ auto pc2 = CreatePeerConnection();
+ if (audio_first) {
+ pc1->AddAudioTrack("audio_track", {});
+ pc2->AddVideoTrack("video_track", {});
+ } else {
+ pc2->AddAudioTrack("audio_track", {});
+ pc1->AddVideoTrack("video_track", {});
+ }
+
+ auto initial_offer = pc1->CreateOfferAndSetAsLocal();
+ ASSERT_EQ(initial_offer->description()->contents().size(), 1u);
+ auto mid1 = initial_offer->description()->contents()[0].mid();
+ std::string rejected_answer_sdp =
+ "v=0\r\n"
+ "o=- 8621259572628890423 2 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "m=" +
+ std::string(audio_first ? "audio" : "video") +
+ " 0 UDP/TLS/RTP/SAVPF 111\r\n"
+ "c=IN IP4 0.0.0.0\r\n";
+ auto rejected_answer =
+ CreateSessionDescription(SdpType::kAnswer, rejected_answer_sdp);
+ EXPECT_TRUE(pc1->SetRemoteDescription(std::move(rejected_answer)));
+
+ auto offer =
+ pc2->CreateOfferAndSetAsLocal(); // This will generate a mid=0 too
+ ASSERT_EQ(offer->description()->contents().size(), 1u);
+ auto mid2 = offer->description()->contents()[0].mid();
+ EXPECT_EQ(mid1, mid2); // Check that the mids collided.
+ EXPECT_TRUE(pc1->SetRemoteDescription(std::move(offer)));
+ auto answer = pc1->CreateAnswer();
+ EXPECT_FALSE(pc1->SetLocalDescription(std::move(answer)));
+}
+
+// Similar to the previous test but with implicit rollback and creating
+// an offer, triggering a different codepath.
+TEST_P(SdpOfferAnswerShuffleMediaTypes,
+ RecyclingWithDifferentKindAndSameMidFailsOffer) {
+ bool audio_first = GetParam();
+ auto pc1 = CreatePeerConnection();
+ auto pc2 = CreatePeerConnection();
+ if (audio_first) {
+ pc1->AddAudioTrack("audio_track", {});
+ pc2->AddVideoTrack("video_track", {});
+ } else {
+ pc2->AddAudioTrack("audio_track", {});
+ pc1->AddVideoTrack("video_track", {});
+ }
+
+ auto initial_offer = pc1->CreateOfferAndSetAsLocal();
+ ASSERT_EQ(initial_offer->description()->contents().size(), 1u);
+ auto mid1 = initial_offer->description()->contents()[0].mid();
+ std::string rejected_answer_sdp =
+ "v=0\r\n"
+ "o=- 8621259572628890423 2 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "m=" +
+ std::string(audio_first ? "audio" : "video") +
+ " 0 UDP/TLS/RTP/SAVPF 111\r\n"
+ "c=IN IP4 0.0.0.0\r\n";
+ auto rejected_answer =
+ CreateSessionDescription(SdpType::kAnswer, rejected_answer_sdp);
+ EXPECT_TRUE(pc1->SetRemoteDescription(std::move(rejected_answer)));
+
+ auto offer =
+ pc2->CreateOfferAndSetAsLocal(); // This will generate a mid=0 too
+ ASSERT_EQ(offer->description()->contents().size(), 1u);
+ auto mid2 = offer->description()->contents()[0].mid();
+ EXPECT_EQ(mid1, mid2); // Check that the mids collided.
+ EXPECT_TRUE(pc1->SetRemoteDescription(std::move(offer)));
+ EXPECT_FALSE(pc1->CreateOffer());
+}
+
+INSTANTIATE_TEST_SUITE_P(SdpOfferAnswerShuffleMediaTypes,
+ SdpOfferAnswerShuffleMediaTypes,
+ ::testing::Values(true, false));
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/sdp_state_provider.h b/third_party/libwebrtc/pc/sdp_state_provider.h
new file mode 100644
index 0000000000..23ffc91bd9
--- /dev/null
+++ b/third_party/libwebrtc/pc/sdp_state_provider.h
@@ -0,0 +1,54 @@
+/*
+ * Copyright 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_SDP_STATE_PROVIDER_H_
+#define PC_SDP_STATE_PROVIDER_H_
+
+#include <string>
+
+#include "api/jsep.h"
+#include "api/peer_connection_interface.h"
+
+namespace webrtc {
+
+// This interface provides access to the state of an SDP offer/answer
+// negotiation.
+//
+// All the functions are const, so using this interface serves as
+// assurance that the user is not modifying the state.
+class SdpStateProvider {
+ public:
+ virtual ~SdpStateProvider() {}
+
+ virtual PeerConnectionInterface::SignalingState signaling_state() const = 0;
+
+ virtual const SessionDescriptionInterface* local_description() const = 0;
+ virtual const SessionDescriptionInterface* remote_description() const = 0;
+ virtual const SessionDescriptionInterface* current_local_description()
+ const = 0;
+ virtual const SessionDescriptionInterface* current_remote_description()
+ const = 0;
+ virtual const SessionDescriptionInterface* pending_local_description()
+ const = 0;
+ virtual const SessionDescriptionInterface* pending_remote_description()
+ const = 0;
+
+ // Whether an ICE restart has been asked for. Used in CreateOffer.
+ virtual bool NeedsIceRestart(const std::string& content_name) const = 0;
+ // Whether an ICE restart was indicated in the remote offer.
+ // Used in CreateAnswer.
+ virtual bool IceRestartPending(const std::string& content_name) const = 0;
+ virtual absl::optional<rtc::SSLRole> GetDtlsRole(
+ const std::string& mid) const = 0;
+};
+
+} // namespace webrtc
+
+#endif // PC_SDP_STATE_PROVIDER_H_
diff --git a/third_party/libwebrtc/pc/sdp_utils.cc b/third_party/libwebrtc/pc/sdp_utils.cc
new file mode 100644
index 0000000000..ca61f0013f
--- /dev/null
+++ b/third_party/libwebrtc/pc/sdp_utils.cc
@@ -0,0 +1,73 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/sdp_utils.h"
+
+#include <utility>
+#include <vector>
+
+#include "api/jsep_session_description.h"
+#include "rtc_base/checks.h"
+
+namespace webrtc {
+
+std::unique_ptr<SessionDescriptionInterface> CloneSessionDescription(
+ const SessionDescriptionInterface* sdesc) {
+ RTC_DCHECK(sdesc);
+ return CloneSessionDescriptionAsType(sdesc, sdesc->GetType());
+}
+
+std::unique_ptr<SessionDescriptionInterface> CloneSessionDescriptionAsType(
+ const SessionDescriptionInterface* sdesc,
+ SdpType type) {
+ RTC_DCHECK(sdesc);
+ auto clone = std::make_unique<JsepSessionDescription>(type);
+ if (sdesc->description()) {
+ clone->Initialize(sdesc->description()->Clone(), sdesc->session_id(),
+ sdesc->session_version());
+ }
+ // As of writing, our version of GCC does not allow returning a unique_ptr of
+ // a subclass as a unique_ptr of a base class. To get around this, we need to
+ // std::move the return value.
+ return std::move(clone);
+}
+
+bool SdpContentsAll(SdpContentPredicate pred,
+ const cricket::SessionDescription* desc) {
+ RTC_DCHECK(desc);
+ for (const auto& content : desc->contents()) {
+ const auto* transport_info = desc->GetTransportInfoByName(content.name);
+ if (!pred(&content, transport_info)) {
+ return false;
+ }
+ }
+ return true;
+}
+
+bool SdpContentsNone(SdpContentPredicate pred,
+ const cricket::SessionDescription* desc) {
+ return SdpContentsAll(
+ [pred](const cricket::ContentInfo* content_info,
+ const cricket::TransportInfo* transport_info) {
+ return !pred(content_info, transport_info);
+ },
+ desc);
+}
+
+void SdpContentsForEach(SdpContentMutator fn,
+ cricket::SessionDescription* desc) {
+ RTC_DCHECK(desc);
+ for (auto& content : desc->contents()) {
+ auto* transport_info = desc->GetTransportInfoByName(content.name);
+ fn(&content, transport_info);
+ }
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/sdp_utils.h b/third_party/libwebrtc/pc/sdp_utils.h
new file mode 100644
index 0000000000..effd7cd034
--- /dev/null
+++ b/third_party/libwebrtc/pc/sdp_utils.h
@@ -0,0 +1,62 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_SDP_UTILS_H_
+#define PC_SDP_UTILS_H_
+
+#include <functional>
+#include <memory>
+#include <string>
+
+#include "api/jsep.h"
+#include "p2p/base/transport_info.h"
+#include "pc/session_description.h"
+#include "rtc_base/system/rtc_export.h"
+
+namespace webrtc {
+
+// Returns a copy of the given session description.
+RTC_EXPORT std::unique_ptr<SessionDescriptionInterface> CloneSessionDescription(
+ const SessionDescriptionInterface* sdesc);
+
+// Returns a copy of the given session description with the type changed.
+RTC_EXPORT std::unique_ptr<SessionDescriptionInterface>
+CloneSessionDescriptionAsType(const SessionDescriptionInterface* sdesc,
+ SdpType type);
+
+// Function that takes a single session description content with its
+// corresponding transport and produces a boolean.
+typedef std::function<bool(const cricket::ContentInfo*,
+ const cricket::TransportInfo*)>
+ SdpContentPredicate;
+
+// Returns true if the predicate returns true for all contents in the given
+// session description.
+bool SdpContentsAll(SdpContentPredicate pred,
+ const cricket::SessionDescription* desc);
+
+// Returns true if the predicate returns true for none of the contents in the
+// given session description.
+bool SdpContentsNone(SdpContentPredicate pred,
+ const cricket::SessionDescription* desc);
+
+// Function that takes a single session description content with its
+// corresponding transport and can mutate the content and/or the transport.
+typedef std::function<void(cricket::ContentInfo*, cricket::TransportInfo*)>
+ SdpContentMutator;
+
+// Applies the mutator function over all contents in the given session
+// description.
+void SdpContentsForEach(SdpContentMutator fn,
+ cricket::SessionDescription* desc);
+
+} // namespace webrtc
+
+#endif // PC_SDP_UTILS_H_
diff --git a/third_party/libwebrtc/pc/session_description.cc b/third_party/libwebrtc/pc/session_description.cc
new file mode 100644
index 0000000000..e1152eb107
--- /dev/null
+++ b/third_party/libwebrtc/pc/session_description.cc
@@ -0,0 +1,311 @@
+/*
+ * Copyright 2010 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/session_description.h"
+
+#include "absl/algorithm/container.h"
+#include "absl/memory/memory.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/strings/string_builder.h"
+
+namespace cricket {
+namespace {
+
+ContentInfo* FindContentInfoByName(ContentInfos* contents,
+ const std::string& name) {
+ RTC_DCHECK(contents);
+ for (ContentInfo& content : *contents) {
+ if (content.name == name) {
+ return &content;
+ }
+ }
+ return nullptr;
+}
+
+} // namespace
+
+const ContentInfo* FindContentInfoByName(const ContentInfos& contents,
+ const std::string& name) {
+ for (ContentInfos::const_iterator content = contents.begin();
+ content != contents.end(); ++content) {
+ if (content->name == name) {
+ return &(*content);
+ }
+ }
+ return NULL;
+}
+
+const ContentInfo* FindContentInfoByType(const ContentInfos& contents,
+ MediaProtocolType type) {
+ for (const auto& content : contents) {
+ if (content.type == type) {
+ return &content;
+ }
+ }
+ return nullptr;
+}
+
+ContentGroup::ContentGroup(const std::string& semantics)
+ : semantics_(semantics) {}
+
+ContentGroup::ContentGroup(const ContentGroup&) = default;
+ContentGroup::ContentGroup(ContentGroup&&) = default;
+ContentGroup& ContentGroup::operator=(const ContentGroup&) = default;
+ContentGroup& ContentGroup::operator=(ContentGroup&&) = default;
+ContentGroup::~ContentGroup() = default;
+
+const std::string* ContentGroup::FirstContentName() const {
+ return (!content_names_.empty()) ? &(*content_names_.begin()) : NULL;
+}
+
+bool ContentGroup::HasContentName(absl::string_view content_name) const {
+ return absl::c_linear_search(content_names_, content_name);
+}
+
+void ContentGroup::AddContentName(absl::string_view content_name) {
+ if (!HasContentName(content_name)) {
+ content_names_.emplace_back(content_name);
+ }
+}
+
+bool ContentGroup::RemoveContentName(absl::string_view content_name) {
+ ContentNames::iterator iter = absl::c_find(content_names_, content_name);
+ if (iter == content_names_.end()) {
+ return false;
+ }
+ content_names_.erase(iter);
+ return true;
+}
+
+std::string ContentGroup::ToString() const {
+ rtc::StringBuilder acc;
+ acc << semantics_ << "(";
+ if (!content_names_.empty()) {
+ for (const auto& name : content_names_) {
+ acc << name << " ";
+ }
+ }
+ acc << ")";
+ return acc.Release();
+}
+
+SessionDescription::SessionDescription() = default;
+SessionDescription::SessionDescription(const SessionDescription&) = default;
+
+SessionDescription::~SessionDescription() {}
+
+std::unique_ptr<SessionDescription> SessionDescription::Clone() const {
+ // Copy using the private copy constructor.
+ // This will clone the descriptions using ContentInfo's copy constructor.
+ return absl::WrapUnique(new SessionDescription(*this));
+}
+
+const ContentInfo* SessionDescription::GetContentByName(
+ const std::string& name) const {
+ return FindContentInfoByName(contents_, name);
+}
+
+ContentInfo* SessionDescription::GetContentByName(const std::string& name) {
+ return FindContentInfoByName(&contents_, name);
+}
+
+const MediaContentDescription* SessionDescription::GetContentDescriptionByName(
+ const std::string& name) const {
+ const ContentInfo* cinfo = FindContentInfoByName(contents_, name);
+ if (cinfo == NULL) {
+ return NULL;
+ }
+
+ return cinfo->media_description();
+}
+
+MediaContentDescription* SessionDescription::GetContentDescriptionByName(
+ const std::string& name) {
+ ContentInfo* cinfo = FindContentInfoByName(&contents_, name);
+ if (cinfo == NULL) {
+ return NULL;
+ }
+
+ return cinfo->media_description();
+}
+
+const ContentInfo* SessionDescription::FirstContentByType(
+ MediaProtocolType type) const {
+ return FindContentInfoByType(contents_, type);
+}
+
+const ContentInfo* SessionDescription::FirstContent() const {
+ return (contents_.empty()) ? NULL : &(*contents_.begin());
+}
+
+void SessionDescription::AddContent(
+ const std::string& name,
+ MediaProtocolType type,
+ std::unique_ptr<MediaContentDescription> description) {
+ ContentInfo content(type);
+ content.name = name;
+ content.set_media_description(std::move(description));
+ AddContent(std::move(content));
+}
+
+void SessionDescription::AddContent(
+ const std::string& name,
+ MediaProtocolType type,
+ bool rejected,
+ std::unique_ptr<MediaContentDescription> description) {
+ ContentInfo content(type);
+ content.name = name;
+ content.rejected = rejected;
+ content.set_media_description(std::move(description));
+ AddContent(std::move(content));
+}
+
+void SessionDescription::AddContent(
+ const std::string& name,
+ MediaProtocolType type,
+ bool rejected,
+ bool bundle_only,
+ std::unique_ptr<MediaContentDescription> description) {
+ ContentInfo content(type);
+ content.name = name;
+ content.rejected = rejected;
+ content.bundle_only = bundle_only;
+ content.set_media_description(std::move(description));
+ AddContent(std::move(content));
+}
+
+void SessionDescription::AddContent(ContentInfo&& content) {
+ if (extmap_allow_mixed()) {
+ // Mixed support on session level overrides setting on media level.
+ content.media_description()->set_extmap_allow_mixed_enum(
+ MediaContentDescription::kSession);
+ }
+ contents_.push_back(std::move(content));
+}
+
+bool SessionDescription::RemoveContentByName(const std::string& name) {
+ for (ContentInfos::iterator content = contents_.begin();
+ content != contents_.end(); ++content) {
+ if (content->name == name) {
+ contents_.erase(content);
+ return true;
+ }
+ }
+
+ return false;
+}
+
+void SessionDescription::AddTransportInfo(const TransportInfo& transport_info) {
+ transport_infos_.push_back(transport_info);
+}
+
+bool SessionDescription::RemoveTransportInfoByName(const std::string& name) {
+ for (TransportInfos::iterator transport_info = transport_infos_.begin();
+ transport_info != transport_infos_.end(); ++transport_info) {
+ if (transport_info->content_name == name) {
+ transport_infos_.erase(transport_info);
+ return true;
+ }
+ }
+ return false;
+}
+
+const TransportInfo* SessionDescription::GetTransportInfoByName(
+ const std::string& name) const {
+ for (TransportInfos::const_iterator iter = transport_infos_.begin();
+ iter != transport_infos_.end(); ++iter) {
+ if (iter->content_name == name) {
+ return &(*iter);
+ }
+ }
+ return NULL;
+}
+
+TransportInfo* SessionDescription::GetTransportInfoByName(
+ const std::string& name) {
+ for (TransportInfos::iterator iter = transport_infos_.begin();
+ iter != transport_infos_.end(); ++iter) {
+ if (iter->content_name == name) {
+ return &(*iter);
+ }
+ }
+ return NULL;
+}
+
+void SessionDescription::RemoveGroupByName(const std::string& name) {
+ for (ContentGroups::iterator iter = content_groups_.begin();
+ iter != content_groups_.end(); ++iter) {
+ if (iter->semantics() == name) {
+ content_groups_.erase(iter);
+ break;
+ }
+ }
+}
+
+bool SessionDescription::HasGroup(const std::string& name) const {
+ for (ContentGroups::const_iterator iter = content_groups_.begin();
+ iter != content_groups_.end(); ++iter) {
+ if (iter->semantics() == name) {
+ return true;
+ }
+ }
+ return false;
+}
+
+const ContentGroup* SessionDescription::GetGroupByName(
+ const std::string& name) const {
+ for (ContentGroups::const_iterator iter = content_groups_.begin();
+ iter != content_groups_.end(); ++iter) {
+ if (iter->semantics() == name) {
+ return &(*iter);
+ }
+ }
+ return NULL;
+}
+
+std::vector<const ContentGroup*> SessionDescription::GetGroupsByName(
+ const std::string& name) const {
+ std::vector<const ContentGroup*> content_groups;
+ for (const ContentGroup& content_group : content_groups_) {
+ if (content_group.semantics() == name) {
+ content_groups.push_back(&content_group);
+ }
+ }
+ return content_groups;
+}
+
+ContentInfo::~ContentInfo() {}
+
+// Copy operator.
+ContentInfo::ContentInfo(const ContentInfo& o)
+ : name(o.name),
+ type(o.type),
+ rejected(o.rejected),
+ bundle_only(o.bundle_only),
+ description_(o.description_->Clone()) {}
+
+ContentInfo& ContentInfo::operator=(const ContentInfo& o) {
+ name = o.name;
+ type = o.type;
+ rejected = o.rejected;
+ bundle_only = o.bundle_only;
+ description_ = o.description_->Clone();
+ return *this;
+}
+
+const MediaContentDescription* ContentInfo::media_description() const {
+ return description_.get();
+}
+
+MediaContentDescription* ContentInfo::media_description() {
+ return description_.get();
+}
+
+} // namespace cricket
diff --git a/third_party/libwebrtc/pc/session_description.h b/third_party/libwebrtc/pc/session_description.h
new file mode 100644
index 0000000000..403e46529f
--- /dev/null
+++ b/third_party/libwebrtc/pc/session_description.h
@@ -0,0 +1,598 @@
+/*
+ * Copyright 2004 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_SESSION_DESCRIPTION_H_
+#define PC_SESSION_DESCRIPTION_H_
+
+#include <stddef.h>
+#include <stdint.h>
+
+#include <algorithm>
+#include <memory>
+#include <string>
+#include <type_traits>
+#include <utility>
+#include <vector>
+
+#include "absl/memory/memory.h"
+#include "absl/strings/string_view.h"
+#include "api/crypto_params.h"
+#include "api/media_types.h"
+#include "api/rtp_parameters.h"
+#include "api/rtp_transceiver_direction.h"
+#include "api/rtp_transceiver_interface.h"
+#include "media/base/codec.h"
+#include "media/base/media_channel.h"
+#include "media/base/media_constants.h"
+#include "media/base/rid_description.h"
+#include "media/base/stream_params.h"
+#include "p2p/base/transport_description.h"
+#include "p2p/base/transport_info.h"
+#include "pc/media_protocol_names.h"
+#include "pc/simulcast_description.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/socket_address.h"
+#include "rtc_base/system/rtc_export.h"
+
+namespace cricket {
+
+using CryptoParamsVec = std::vector<CryptoParams>;
+using RtpHeaderExtensions = std::vector<webrtc::RtpExtension>;
+
+// Options to control how session descriptions are generated.
+const int kAutoBandwidth = -1;
+
+class AudioContentDescription;
+class VideoContentDescription;
+class SctpDataContentDescription;
+class UnsupportedContentDescription;
+
+// Describes a session description media section. There are subclasses for each
+// media type (audio, video, data) that will have additional information.
+class MediaContentDescription {
+ public:
+ MediaContentDescription() = default;
+ virtual ~MediaContentDescription() = default;
+
+ virtual MediaType type() const = 0;
+
+ // Try to cast this media description to an AudioContentDescription. Returns
+ // nullptr if the cast fails.
+ virtual AudioContentDescription* as_audio() { return nullptr; }
+ virtual const AudioContentDescription* as_audio() const { return nullptr; }
+
+ // Try to cast this media description to a VideoContentDescription. Returns
+ // nullptr if the cast fails.
+ virtual VideoContentDescription* as_video() { return nullptr; }
+ virtual const VideoContentDescription* as_video() const { return nullptr; }
+
+ virtual SctpDataContentDescription* as_sctp() { return nullptr; }
+ virtual const SctpDataContentDescription* as_sctp() const { return nullptr; }
+
+ virtual UnsupportedContentDescription* as_unsupported() { return nullptr; }
+ virtual const UnsupportedContentDescription* as_unsupported() const {
+ return nullptr;
+ }
+
+ // Copy operator that returns an unique_ptr.
+ // Not a virtual function.
+ // If a type-specific variant of Clone() is desired, override it, or
+ // simply use std::make_unique<typename>(*this) instead of Clone().
+ std::unique_ptr<MediaContentDescription> Clone() const {
+ return absl::WrapUnique(CloneInternal());
+ }
+
+ // `protocol` is the expected media transport protocol, such as RTP/AVPF,
+ // RTP/SAVPF or SCTP/DTLS.
+ std::string protocol() const { return protocol_; }
+ virtual void set_protocol(absl::string_view protocol) {
+ protocol_ = std::string(protocol);
+ }
+
+ webrtc::RtpTransceiverDirection direction() const { return direction_; }
+ void set_direction(webrtc::RtpTransceiverDirection direction) {
+ direction_ = direction;
+ }
+
+ bool rtcp_mux() const { return rtcp_mux_; }
+ void set_rtcp_mux(bool mux) { rtcp_mux_ = mux; }
+
+ bool rtcp_reduced_size() const { return rtcp_reduced_size_; }
+ void set_rtcp_reduced_size(bool reduced_size) {
+ rtcp_reduced_size_ = reduced_size;
+ }
+
+ // Indicates support for the remote network estimate packet type. This
+ // functionality is experimental and subject to change without notice.
+ bool remote_estimate() const { return remote_estimate_; }
+ void set_remote_estimate(bool remote_estimate) {
+ remote_estimate_ = remote_estimate;
+ }
+
+ int bandwidth() const { return bandwidth_; }
+ void set_bandwidth(int bandwidth) { bandwidth_ = bandwidth; }
+ std::string bandwidth_type() const { return bandwidth_type_; }
+ void set_bandwidth_type(std::string bandwidth_type) {
+ bandwidth_type_ = bandwidth_type;
+ }
+
+ const std::vector<CryptoParams>& cryptos() const { return cryptos_; }
+ void AddCrypto(const CryptoParams& params) { cryptos_.push_back(params); }
+ void set_cryptos(const std::vector<CryptoParams>& cryptos) {
+ cryptos_ = cryptos;
+ }
+
+ // List of RTP header extensions. URIs are **NOT** guaranteed to be unique
+ // as they can appear twice when both encrypted and non-encrypted extensions
+ // are present.
+ // Use RtpExtension::FindHeaderExtensionByUri for finding and
+ // RtpExtension::DeduplicateHeaderExtensions for filtering.
+ const RtpHeaderExtensions& rtp_header_extensions() const {
+ return rtp_header_extensions_;
+ }
+ void set_rtp_header_extensions(const RtpHeaderExtensions& extensions) {
+ rtp_header_extensions_ = extensions;
+ rtp_header_extensions_set_ = true;
+ }
+ void AddRtpHeaderExtension(const webrtc::RtpExtension& ext) {
+ rtp_header_extensions_.push_back(ext);
+ rtp_header_extensions_set_ = true;
+ }
+ void ClearRtpHeaderExtensions() {
+ rtp_header_extensions_.clear();
+ rtp_header_extensions_set_ = true;
+ }
+ // We can't always tell if an empty list of header extensions is
+ // because the other side doesn't support them, or just isn't hooked up to
+ // signal them. For now we assume an empty list means no signaling, but
+ // provide the ClearRtpHeaderExtensions method to allow "no support" to be
+ // clearly indicated (i.e. when derived from other information).
+ bool rtp_header_extensions_set() const { return rtp_header_extensions_set_; }
+ const StreamParamsVec& streams() const { return send_streams_; }
+ // TODO(pthatcher): Remove this by giving mediamessage.cc access
+ // to MediaContentDescription
+ StreamParamsVec& mutable_streams() { return send_streams_; }
+ void AddStream(const StreamParams& stream) {
+ send_streams_.push_back(stream);
+ }
+ // Legacy streams have an ssrc, but nothing else.
+ void AddLegacyStream(uint32_t ssrc) {
+ AddStream(StreamParams::CreateLegacy(ssrc));
+ }
+ void AddLegacyStream(uint32_t ssrc, uint32_t fid_ssrc) {
+ StreamParams sp = StreamParams::CreateLegacy(ssrc);
+ sp.AddFidSsrc(ssrc, fid_ssrc);
+ AddStream(sp);
+ }
+
+ uint32_t first_ssrc() const {
+ if (send_streams_.empty()) {
+ return 0;
+ }
+ return send_streams_[0].first_ssrc();
+ }
+ bool has_ssrcs() const {
+ if (send_streams_.empty()) {
+ return false;
+ }
+ return send_streams_[0].has_ssrcs();
+ }
+
+ void set_conference_mode(bool enable) { conference_mode_ = enable; }
+ bool conference_mode() const { return conference_mode_; }
+
+ // https://tools.ietf.org/html/rfc4566#section-5.7
+ // May be present at the media or session level of SDP. If present at both
+ // levels, the media-level attribute overwrites the session-level one.
+ void set_connection_address(const rtc::SocketAddress& address) {
+ connection_address_ = address;
+ }
+ const rtc::SocketAddress& connection_address() const {
+ return connection_address_;
+ }
+
+ // Determines if it's allowed to mix one- and two-byte rtp header extensions
+ // within the same rtp stream.
+ enum ExtmapAllowMixed { kNo, kSession, kMedia };
+ void set_extmap_allow_mixed_enum(ExtmapAllowMixed new_extmap_allow_mixed) {
+ if (new_extmap_allow_mixed == kMedia &&
+ extmap_allow_mixed_enum_ == kSession) {
+ // Do not downgrade from session level to media level.
+ return;
+ }
+ extmap_allow_mixed_enum_ = new_extmap_allow_mixed;
+ }
+ ExtmapAllowMixed extmap_allow_mixed_enum() const {
+ return extmap_allow_mixed_enum_;
+ }
+ bool extmap_allow_mixed() const { return extmap_allow_mixed_enum_ != kNo; }
+
+ // Simulcast functionality.
+ bool HasSimulcast() const { return !simulcast_.empty(); }
+ SimulcastDescription& simulcast_description() { return simulcast_; }
+ const SimulcastDescription& simulcast_description() const {
+ return simulcast_;
+ }
+ void set_simulcast_description(const SimulcastDescription& simulcast) {
+ simulcast_ = simulcast;
+ }
+ const std::vector<RidDescription>& receive_rids() const {
+ return receive_rids_;
+ }
+ void set_receive_rids(const std::vector<RidDescription>& rids) {
+ receive_rids_ = rids;
+ }
+
+ // Codecs should be in preference order (most preferred codec first).
+ const std::vector<Codec>& codecs() const { return codecs_; }
+ void set_codecs(const std::vector<Codec>& codecs) { codecs_ = codecs; }
+ virtual bool has_codecs() const { return !codecs_.empty(); }
+ bool HasCodec(int id) {
+ return absl::c_find_if(codecs_, [id](const cricket::Codec codec) {
+ return codec.id == id;
+ }) != codecs_.end();
+ }
+ void AddCodec(const Codec& codec) { codecs_.push_back(codec); }
+ void AddOrReplaceCodec(const Codec& codec) {
+ for (auto it = codecs_.begin(); it != codecs_.end(); ++it) {
+ if (it->id == codec.id) {
+ *it = codec;
+ return;
+ }
+ }
+ AddCodec(codec);
+ }
+ void AddCodecs(const std::vector<Codec>& codecs) {
+ for (const auto& codec : codecs) {
+ AddCodec(codec);
+ }
+ }
+
+ protected:
+ // TODO(bugs.webrtc.org/15214): move all RTP related things to
+ // RtpMediaDescription that the SCTP content description does
+ // not inherit from.
+ std::string protocol_;
+
+ private:
+ bool rtcp_mux_ = false;
+ bool rtcp_reduced_size_ = false;
+ bool remote_estimate_ = false;
+ int bandwidth_ = kAutoBandwidth;
+ std::string bandwidth_type_ = kApplicationSpecificBandwidth;
+
+ std::vector<CryptoParams> cryptos_;
+ std::vector<webrtc::RtpExtension> rtp_header_extensions_;
+ bool rtp_header_extensions_set_ = false;
+ StreamParamsVec send_streams_;
+ bool conference_mode_ = false;
+ webrtc::RtpTransceiverDirection direction_ =
+ webrtc::RtpTransceiverDirection::kSendRecv;
+ rtc::SocketAddress connection_address_;
+ ExtmapAllowMixed extmap_allow_mixed_enum_ = kMedia;
+
+ SimulcastDescription simulcast_;
+ std::vector<RidDescription> receive_rids_;
+
+ // Copy function that returns a raw pointer. Caller will assert ownership.
+ // Should only be called by the Clone() function. Must be implemented
+ // by each final subclass.
+ virtual MediaContentDescription* CloneInternal() const = 0;
+
+ std::vector<Codec> codecs_;
+};
+
+class RtpMediaContentDescription : public MediaContentDescription {};
+
+class AudioContentDescription : public RtpMediaContentDescription {
+ public:
+ void set_protocol(absl::string_view protocol) override {
+ RTC_DCHECK(IsRtpProtocol(protocol));
+ protocol_ = std::string(protocol);
+ }
+ MediaType type() const override { return MEDIA_TYPE_AUDIO; }
+ AudioContentDescription* as_audio() override { return this; }
+ const AudioContentDescription* as_audio() const override { return this; }
+
+ private:
+ AudioContentDescription* CloneInternal() const override {
+ return new AudioContentDescription(*this);
+ }
+};
+
+class VideoContentDescription : public RtpMediaContentDescription {
+ public:
+ void set_protocol(absl::string_view protocol) override {
+ RTC_DCHECK(IsRtpProtocol(protocol));
+ protocol_ = std::string(protocol);
+ }
+ MediaType type() const override { return MEDIA_TYPE_VIDEO; }
+ VideoContentDescription* as_video() override { return this; }
+ const VideoContentDescription* as_video() const override { return this; }
+
+ private:
+ VideoContentDescription* CloneInternal() const override {
+ return new VideoContentDescription(*this);
+ }
+};
+
+class SctpDataContentDescription : public MediaContentDescription {
+ public:
+ SctpDataContentDescription() {}
+ SctpDataContentDescription(const SctpDataContentDescription& o)
+ : MediaContentDescription(o),
+ use_sctpmap_(o.use_sctpmap_),
+ port_(o.port_),
+ max_message_size_(o.max_message_size_) {}
+ MediaType type() const override { return MEDIA_TYPE_DATA; }
+ SctpDataContentDescription* as_sctp() override { return this; }
+ const SctpDataContentDescription* as_sctp() const override { return this; }
+
+ bool has_codecs() const override { return false; }
+ void set_protocol(absl::string_view protocol) override {
+ RTC_DCHECK(IsSctpProtocol(protocol));
+ protocol_ = std::string(protocol);
+ }
+
+ bool use_sctpmap() const { return use_sctpmap_; }
+ void set_use_sctpmap(bool enable) { use_sctpmap_ = enable; }
+ int port() const { return port_; }
+ void set_port(int port) { port_ = port; }
+ int max_message_size() const { return max_message_size_; }
+ void set_max_message_size(int max_message_size) {
+ max_message_size_ = max_message_size;
+ }
+
+ private:
+ SctpDataContentDescription* CloneInternal() const override {
+ return new SctpDataContentDescription(*this);
+ }
+ bool use_sctpmap_ = true; // Note: "true" is no longer conformant.
+ // Defaults should be constants imported from SCTP. Quick hack.
+ int port_ = 5000;
+ // draft-ietf-mmusic-sdp-sctp-23: Max message size default is 64K
+ int max_message_size_ = 64 * 1024;
+};
+
+class UnsupportedContentDescription : public MediaContentDescription {
+ public:
+ explicit UnsupportedContentDescription(absl::string_view media_type)
+ : media_type_(media_type) {}
+ MediaType type() const override { return MEDIA_TYPE_UNSUPPORTED; }
+
+ UnsupportedContentDescription* as_unsupported() override { return this; }
+ const UnsupportedContentDescription* as_unsupported() const override {
+ return this;
+ }
+
+ bool has_codecs() const override { return false; }
+ const std::string& media_type() const { return media_type_; }
+
+ private:
+ UnsupportedContentDescription* CloneInternal() const override {
+ return new UnsupportedContentDescription(*this);
+ }
+
+ std::string media_type_;
+};
+
+// Protocol used for encoding media. This is the "top level" protocol that may
+// be wrapped by zero or many transport protocols (UDP, ICE, etc.).
+enum class MediaProtocolType {
+ kRtp, // Section will use the RTP protocol (e.g., for audio or video).
+ // https://tools.ietf.org/html/rfc3550
+ kSctp, // Section will use the SCTP protocol (e.g., for a data channel).
+ // https://tools.ietf.org/html/rfc4960
+ kOther // Section will use another top protocol which is not
+ // explicitly supported.
+};
+
+// Represents a session description section. Most information about the section
+// is stored in the description, which is a subclass of MediaContentDescription.
+// Owns the description.
+class RTC_EXPORT ContentInfo {
+ public:
+ explicit ContentInfo(MediaProtocolType type) : type(type) {}
+ ~ContentInfo();
+ // Copy
+ ContentInfo(const ContentInfo& o);
+ ContentInfo& operator=(const ContentInfo& o);
+ ContentInfo(ContentInfo&& o) = default;
+ ContentInfo& operator=(ContentInfo&& o) = default;
+
+ // Alias for `name`.
+ std::string mid() const { return name; }
+ void set_mid(const std::string& mid) { this->name = mid; }
+
+ // Alias for `description`.
+ MediaContentDescription* media_description();
+ const MediaContentDescription* media_description() const;
+
+ void set_media_description(std::unique_ptr<MediaContentDescription> desc) {
+ description_ = std::move(desc);
+ }
+
+ // TODO(bugs.webrtc.org/8620): Rename this to mid.
+ std::string name;
+ MediaProtocolType type;
+ bool rejected = false;
+ bool bundle_only = false;
+
+ private:
+ friend class SessionDescription;
+ std::unique_ptr<MediaContentDescription> description_;
+};
+
+typedef std::vector<std::string> ContentNames;
+
+// This class provides a mechanism to aggregate different media contents into a
+// group. This group can also be shared with the peers in a pre-defined format.
+// GroupInfo should be populated only with the `content_name` of the
+// MediaDescription.
+class ContentGroup {
+ public:
+ explicit ContentGroup(const std::string& semantics);
+ ContentGroup(const ContentGroup&);
+ ContentGroup(ContentGroup&&);
+ ContentGroup& operator=(const ContentGroup&);
+ ContentGroup& operator=(ContentGroup&&);
+ ~ContentGroup();
+
+ const std::string& semantics() const { return semantics_; }
+ const ContentNames& content_names() const { return content_names_; }
+
+ const std::string* FirstContentName() const;
+ bool HasContentName(absl::string_view content_name) const;
+ void AddContentName(absl::string_view content_name);
+ bool RemoveContentName(absl::string_view content_name);
+ // for debugging
+ std::string ToString() const;
+
+ private:
+ std::string semantics_;
+ ContentNames content_names_;
+};
+
+typedef std::vector<ContentInfo> ContentInfos;
+typedef std::vector<ContentGroup> ContentGroups;
+
+const ContentInfo* FindContentInfoByName(const ContentInfos& contents,
+ const std::string& name);
+const ContentInfo* FindContentInfoByType(const ContentInfos& contents,
+ const std::string& type);
+
+// Determines how the MSID will be signaled in the SDP. These can be used as
+// flags to indicate both or none.
+enum MsidSignaling {
+ // Signal MSID with one a=msid line in the media section.
+ kMsidSignalingMediaSection = 0x1,
+ // Signal MSID with a=ssrc: msid lines in the media section.
+ kMsidSignalingSsrcAttribute = 0x2
+};
+
+// Describes a collection of contents, each with its own name and
+// type. Analogous to a <jingle> or <session> stanza. Assumes that
+// contents are unique be name, but doesn't enforce that.
+class SessionDescription {
+ public:
+ SessionDescription();
+ ~SessionDescription();
+
+ std::unique_ptr<SessionDescription> Clone() const;
+
+ // Content accessors.
+ const ContentInfos& contents() const { return contents_; }
+ ContentInfos& contents() { return contents_; }
+ const ContentInfo* GetContentByName(const std::string& name) const;
+ ContentInfo* GetContentByName(const std::string& name);
+ const MediaContentDescription* GetContentDescriptionByName(
+ const std::string& name) const;
+ MediaContentDescription* GetContentDescriptionByName(const std::string& name);
+ const ContentInfo* FirstContentByType(MediaProtocolType type) const;
+ const ContentInfo* FirstContent() const;
+
+ // Content mutators.
+ // Adds a content to this description. Takes ownership of ContentDescription*.
+ void AddContent(const std::string& name,
+ MediaProtocolType type,
+ std::unique_ptr<MediaContentDescription> description);
+ void AddContent(const std::string& name,
+ MediaProtocolType type,
+ bool rejected,
+ std::unique_ptr<MediaContentDescription> description);
+ void AddContent(const std::string& name,
+ MediaProtocolType type,
+ bool rejected,
+ bool bundle_only,
+ std::unique_ptr<MediaContentDescription> description);
+ void AddContent(ContentInfo&& content);
+
+ bool RemoveContentByName(const std::string& name);
+
+ // Transport accessors.
+ const TransportInfos& transport_infos() const { return transport_infos_; }
+ TransportInfos& transport_infos() { return transport_infos_; }
+ const TransportInfo* GetTransportInfoByName(const std::string& name) const;
+ TransportInfo* GetTransportInfoByName(const std::string& name);
+ const TransportDescription* GetTransportDescriptionByName(
+ const std::string& name) const {
+ const TransportInfo* tinfo = GetTransportInfoByName(name);
+ return tinfo ? &tinfo->description : NULL;
+ }
+
+ // Transport mutators.
+ void set_transport_infos(const TransportInfos& transport_infos) {
+ transport_infos_ = transport_infos;
+ }
+ // Adds a TransportInfo to this description.
+ void AddTransportInfo(const TransportInfo& transport_info);
+ bool RemoveTransportInfoByName(const std::string& name);
+
+ // Group accessors.
+ const ContentGroups& groups() const { return content_groups_; }
+ const ContentGroup* GetGroupByName(const std::string& name) const;
+ std::vector<const ContentGroup*> GetGroupsByName(
+ const std::string& name) const;
+ bool HasGroup(const std::string& name) const;
+
+ // Group mutators.
+ void AddGroup(const ContentGroup& group) { content_groups_.push_back(group); }
+ // Remove the first group with the same semantics specified by `name`.
+ void RemoveGroupByName(const std::string& name);
+
+ // Global attributes.
+ void set_msid_supported(bool supported) { msid_supported_ = supported; }
+ bool msid_supported() const { return msid_supported_; }
+
+ // Determines how the MSIDs were/will be signaled. Flag value composed of
+ // MsidSignaling bits (see enum above).
+ void set_msid_signaling(int msid_signaling) {
+ msid_signaling_ = msid_signaling;
+ }
+ int msid_signaling() const { return msid_signaling_; }
+
+ // Determines if it's allowed to mix one- and two-byte rtp header extensions
+ // within the same rtp stream.
+ void set_extmap_allow_mixed(bool supported) {
+ extmap_allow_mixed_ = supported;
+ MediaContentDescription::ExtmapAllowMixed media_level_setting =
+ supported ? MediaContentDescription::kSession
+ : MediaContentDescription::kNo;
+ for (auto& content : contents_) {
+ // Do not set to kNo if the current setting is kMedia.
+ if (supported || content.media_description()->extmap_allow_mixed_enum() !=
+ MediaContentDescription::kMedia) {
+ content.media_description()->set_extmap_allow_mixed_enum(
+ media_level_setting);
+ }
+ }
+ }
+ bool extmap_allow_mixed() const { return extmap_allow_mixed_; }
+
+ private:
+ SessionDescription(const SessionDescription&);
+
+ ContentInfos contents_;
+ TransportInfos transport_infos_;
+ ContentGroups content_groups_;
+ bool msid_supported_ = true;
+ // Default to what Plan B would do.
+ // TODO(bugs.webrtc.org/8530): Change default to kMsidSignalingMediaSection.
+ int msid_signaling_ = kMsidSignalingSsrcAttribute;
+ bool extmap_allow_mixed_ = true;
+};
+
+// Indicates whether a session description was sent by the local client or
+// received from the remote client.
+enum ContentSource { CS_LOCAL, CS_REMOTE };
+
+} // namespace cricket
+
+#endif // PC_SESSION_DESCRIPTION_H_
diff --git a/third_party/libwebrtc/pc/session_description_unittest.cc b/third_party/libwebrtc/pc/session_description_unittest.cc
new file mode 100644
index 0000000000..4d0913bad2
--- /dev/null
+++ b/third_party/libwebrtc/pc/session_description_unittest.cc
@@ -0,0 +1,133 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "pc/session_description.h"
+
+#include "test/gtest.h"
+
+namespace cricket {
+
+TEST(MediaContentDescriptionTest, ExtmapAllowMixedDefaultValue) {
+ VideoContentDescription video_desc;
+ EXPECT_EQ(MediaContentDescription::kMedia,
+ video_desc.extmap_allow_mixed_enum());
+}
+
+TEST(MediaContentDescriptionTest, SetExtmapAllowMixed) {
+ VideoContentDescription video_desc;
+ video_desc.set_extmap_allow_mixed_enum(MediaContentDescription::kNo);
+ EXPECT_EQ(MediaContentDescription::kNo, video_desc.extmap_allow_mixed_enum());
+ video_desc.set_extmap_allow_mixed_enum(MediaContentDescription::kMedia);
+ EXPECT_EQ(MediaContentDescription::kMedia,
+ video_desc.extmap_allow_mixed_enum());
+ video_desc.set_extmap_allow_mixed_enum(MediaContentDescription::kSession);
+ EXPECT_EQ(MediaContentDescription::kSession,
+ video_desc.extmap_allow_mixed_enum());
+
+ // Not allowed to downgrade from kSession to kMedia.
+ video_desc.set_extmap_allow_mixed_enum(MediaContentDescription::kMedia);
+ EXPECT_EQ(MediaContentDescription::kSession,
+ video_desc.extmap_allow_mixed_enum());
+
+ // Always okay to set not supported.
+ video_desc.set_extmap_allow_mixed_enum(MediaContentDescription::kNo);
+ EXPECT_EQ(MediaContentDescription::kNo, video_desc.extmap_allow_mixed_enum());
+ video_desc.set_extmap_allow_mixed_enum(MediaContentDescription::kMedia);
+ EXPECT_EQ(MediaContentDescription::kMedia,
+ video_desc.extmap_allow_mixed_enum());
+ video_desc.set_extmap_allow_mixed_enum(MediaContentDescription::kNo);
+ EXPECT_EQ(MediaContentDescription::kNo, video_desc.extmap_allow_mixed_enum());
+}
+
+TEST(MediaContentDescriptionTest, MixedOneTwoByteHeaderSupported) {
+ VideoContentDescription video_desc;
+ video_desc.set_extmap_allow_mixed_enum(MediaContentDescription::kNo);
+ EXPECT_FALSE(video_desc.extmap_allow_mixed());
+ video_desc.set_extmap_allow_mixed_enum(MediaContentDescription::kMedia);
+ EXPECT_TRUE(video_desc.extmap_allow_mixed());
+ video_desc.set_extmap_allow_mixed_enum(MediaContentDescription::kSession);
+ EXPECT_TRUE(video_desc.extmap_allow_mixed());
+}
+
+TEST(SessionDescriptionTest, SetExtmapAllowMixed) {
+ SessionDescription session_desc;
+ session_desc.set_extmap_allow_mixed(true);
+ EXPECT_TRUE(session_desc.extmap_allow_mixed());
+ session_desc.set_extmap_allow_mixed(false);
+ EXPECT_FALSE(session_desc.extmap_allow_mixed());
+}
+
+TEST(SessionDescriptionTest, SetExtmapAllowMixedPropagatesToMediaLevel) {
+ SessionDescription session_desc;
+ session_desc.AddContent("video", MediaProtocolType::kRtp,
+ std::make_unique<VideoContentDescription>());
+ MediaContentDescription* video_desc =
+ session_desc.GetContentDescriptionByName("video");
+
+ // Setting true on session level propagates to media level.
+ session_desc.set_extmap_allow_mixed(true);
+ EXPECT_EQ(MediaContentDescription::kSession,
+ video_desc->extmap_allow_mixed_enum());
+
+ // Don't downgrade from session level to media level
+ video_desc->set_extmap_allow_mixed_enum(MediaContentDescription::kMedia);
+ EXPECT_EQ(MediaContentDescription::kSession,
+ video_desc->extmap_allow_mixed_enum());
+
+ // Setting false on session level propagates to media level if the current
+ // state is kSession.
+ session_desc.set_extmap_allow_mixed(false);
+ EXPECT_EQ(MediaContentDescription::kNo,
+ video_desc->extmap_allow_mixed_enum());
+
+ // Now possible to set at media level.
+ video_desc->set_extmap_allow_mixed_enum(MediaContentDescription::kMedia);
+ EXPECT_EQ(MediaContentDescription::kMedia,
+ video_desc->extmap_allow_mixed_enum());
+
+ // Setting false on session level does not override on media level if current
+ // state is kMedia.
+ session_desc.set_extmap_allow_mixed(false);
+ EXPECT_EQ(MediaContentDescription::kMedia,
+ video_desc->extmap_allow_mixed_enum());
+
+ // Setting true on session level overrides setting on media level.
+ session_desc.set_extmap_allow_mixed(true);
+ EXPECT_EQ(MediaContentDescription::kSession,
+ video_desc->extmap_allow_mixed_enum());
+}
+
+TEST(SessionDescriptionTest, AddContentTransfersExtmapAllowMixedSetting) {
+ SessionDescription session_desc;
+ session_desc.set_extmap_allow_mixed(false);
+ std::unique_ptr<MediaContentDescription> audio_desc =
+ std::make_unique<AudioContentDescription>();
+ audio_desc->set_extmap_allow_mixed_enum(MediaContentDescription::kMedia);
+
+ // If session setting is false, media level setting is preserved when new
+ // content is added.
+ session_desc.AddContent("audio", MediaProtocolType::kRtp,
+ std::move(audio_desc));
+ EXPECT_EQ(MediaContentDescription::kMedia,
+ session_desc.GetContentDescriptionByName("audio")
+ ->extmap_allow_mixed_enum());
+
+ // If session setting is true, it's transferred to media level when new
+ // content is added.
+ session_desc.set_extmap_allow_mixed(true);
+ std::unique_ptr<MediaContentDescription> video_desc =
+ std::make_unique<VideoContentDescription>();
+ session_desc.AddContent("video", MediaProtocolType::kRtp,
+ std::move(video_desc));
+ EXPECT_EQ(MediaContentDescription::kSession,
+ session_desc.GetContentDescriptionByName("video")
+ ->extmap_allow_mixed_enum());
+}
+
+} // namespace cricket
diff --git a/third_party/libwebrtc/pc/simulcast_description.cc b/third_party/libwebrtc/pc/simulcast_description.cc
new file mode 100644
index 0000000000..ec87415677
--- /dev/null
+++ b/third_party/libwebrtc/pc/simulcast_description.cc
@@ -0,0 +1,57 @@
+/*
+ * Copyright 2018 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/simulcast_description.h"
+
+#include "rtc_base/checks.h"
+
+namespace cricket {
+
+SimulcastLayer::SimulcastLayer(absl::string_view rid, bool is_paused)
+ : rid{rid}, is_paused{is_paused} {
+ RTC_DCHECK(!rid.empty());
+}
+
+bool SimulcastLayer::operator==(const SimulcastLayer& other) const {
+ return rid == other.rid && is_paused == other.is_paused;
+}
+
+void SimulcastLayerList::AddLayer(const SimulcastLayer& layer) {
+ list_.push_back({layer});
+}
+
+void SimulcastLayerList::AddLayerWithAlternatives(
+ const std::vector<SimulcastLayer>& rids) {
+ RTC_DCHECK(!rids.empty());
+ list_.push_back(rids);
+}
+
+const std::vector<SimulcastLayer>& SimulcastLayerList::operator[](
+ size_t index) const {
+ RTC_DCHECK_LT(index, list_.size());
+ return list_[index];
+}
+
+bool SimulcastDescription::empty() const {
+ return send_layers_.empty() && receive_layers_.empty();
+}
+
+std::vector<SimulcastLayer> SimulcastLayerList::GetAllLayers() const {
+ std::vector<SimulcastLayer> result;
+ for (auto groupIt = begin(); groupIt != end(); groupIt++) {
+ for (auto it = groupIt->begin(); it != groupIt->end(); it++) {
+ result.push_back(*it);
+ }
+ }
+
+ return result;
+}
+
+} // namespace cricket
diff --git a/third_party/libwebrtc/pc/simulcast_description.h b/third_party/libwebrtc/pc/simulcast_description.h
new file mode 100644
index 0000000000..7caf164de5
--- /dev/null
+++ b/third_party/libwebrtc/pc/simulcast_description.h
@@ -0,0 +1,115 @@
+/*
+ * Copyright 2018 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_SIMULCAST_DESCRIPTION_H_
+#define PC_SIMULCAST_DESCRIPTION_H_
+
+#include <stddef.h>
+
+#include <string>
+#include <vector>
+
+#include "absl/strings/string_view.h"
+
+namespace cricket {
+
+// Describes a Simulcast Layer.
+// Each simulcast layer has a rid as the identifier and a paused flag.
+// See also: https://tools.ietf.org/html/draft-ietf-mmusic-rid-15 for
+// an explanation about rids.
+struct SimulcastLayer final {
+ SimulcastLayer(absl::string_view rid, bool is_paused);
+
+ SimulcastLayer(const SimulcastLayer& other) = default;
+ SimulcastLayer& operator=(const SimulcastLayer& other) = default;
+ bool operator==(const SimulcastLayer& other) const;
+
+ std::string rid;
+ bool is_paused;
+};
+
+// Describes a list of Simulcast layers.
+// Simulcast layers are specified in order of preference.
+// Each layer can have a list of alternatives (in order of preference).
+// https://tools.ietf.org/html/draft-ietf-mmusic-sdp-simulcast-13#section-5.1
+// Example Usage:
+// To populate a list that specifies the following:
+// 1. Layer 1 or Layer 2
+// 2. Layer 3
+// 3. Layer 4 or Layer 5
+// Use the following code:
+// SimulcastLayerList list;
+// list.AddLayerWithAlternatives(
+// {SimulcastLayer("1", false), SimulcastLayer("2", false});
+// list.AddLayer("3");
+// list.AddLayerWithAlternatives(
+// {SimulcastLayer("4", false), SimulcastLayer("5", false});
+class SimulcastLayerList final {
+ public:
+ // Type definitions required by a container.
+ typedef size_t size_type;
+ typedef std::vector<SimulcastLayer> value_type;
+ typedef std::vector<std::vector<SimulcastLayer>>::const_iterator
+ const_iterator;
+
+ // Use to add a layer when there will be no alternatives.
+ void AddLayer(const SimulcastLayer& layer);
+
+ // Use to add a list of alternatives.
+ // The alternatives should be specified in order of preference.
+ void AddLayerWithAlternatives(const std::vector<SimulcastLayer>& layers);
+
+ // Read-only access to the contents.
+ // Note: This object does not allow removal of layers.
+ const_iterator begin() const { return list_.begin(); }
+
+ const_iterator end() const { return list_.end(); }
+
+ const std::vector<SimulcastLayer>& operator[](size_t index) const;
+
+ size_t size() const { return list_.size(); }
+ bool empty() const { return list_.empty(); }
+
+ // Provides access to all the layers in the simulcast without their
+ // association into groups of alternatives.
+ std::vector<SimulcastLayer> GetAllLayers() const;
+
+ private:
+ // TODO(amithi, bugs.webrtc.org/10075):
+ // Validate that rids do not repeat in the list.
+ std::vector<std::vector<SimulcastLayer>> list_;
+};
+
+// Describes the simulcast options of a video media section.
+// This will list the send and receive layers (along with their alternatives).
+// Each simulcast layer has an identifier (rid) and can optionally be paused.
+// The order of the layers (as well as alternates) indicates user preference
+// from first to last (most preferred to least preferred).
+// https://tools.ietf.org/html/draft-ietf-mmusic-sdp-simulcast-13#section-5.1
+class SimulcastDescription final {
+ public:
+ const SimulcastLayerList& send_layers() const { return send_layers_; }
+ SimulcastLayerList& send_layers() { return send_layers_; }
+
+ const SimulcastLayerList& receive_layers() const { return receive_layers_; }
+ SimulcastLayerList& receive_layers() { return receive_layers_; }
+
+ bool empty() const;
+
+ private:
+ // TODO(amithi, bugs.webrtc.org/10075):
+ // Validate that rids do not repeat in send and receive layers.
+ SimulcastLayerList send_layers_;
+ SimulcastLayerList receive_layers_;
+};
+
+} // namespace cricket
+
+#endif // PC_SIMULCAST_DESCRIPTION_H_
diff --git a/third_party/libwebrtc/pc/simulcast_sdp_serializer.cc b/third_party/libwebrtc/pc/simulcast_sdp_serializer.cc
new file mode 100644
index 0000000000..ceb2881550
--- /dev/null
+++ b/third_party/libwebrtc/pc/simulcast_sdp_serializer.cc
@@ -0,0 +1,395 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/simulcast_sdp_serializer.h"
+
+#include <map>
+#include <string>
+#include <type_traits>
+#include <utility>
+#include <vector>
+
+#include "absl/algorithm/container.h"
+#include "absl/strings/string_view.h"
+#include "absl/types/optional.h"
+#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/string_encode.h"
+#include "rtc_base/string_to_number.h"
+#include "rtc_base/strings/string_builder.h"
+
+using cricket::RidDescription;
+using cricket::RidDirection;
+using cricket::SimulcastDescription;
+using cricket::SimulcastLayer;
+using cricket::SimulcastLayerList;
+
+namespace webrtc {
+
+namespace {
+
+// delimiters
+const char kDelimiterComma[] = ",";
+const char kDelimiterCommaChar = ',';
+const char kDelimiterEqual[] = "=";
+const char kDelimiterEqualChar = '=';
+const char kDelimiterSemicolon[] = ";";
+const char kDelimiterSemicolonChar = ';';
+const char kDelimiterSpace[] = " ";
+const char kDelimiterSpaceChar = ' ';
+
+// https://tools.ietf.org/html/draft-ietf-mmusic-sdp-simulcast-13#section-5.1
+// https://tools.ietf.org/html/draft-ietf-mmusic-rid-15#section-10
+const char kSimulcastPausedStream[] = "~";
+const char kSimulcastPausedStreamChar = '~';
+const char kSendDirection[] = "send";
+const char kReceiveDirection[] = "recv";
+const char kPayloadType[] = "pt";
+
+RTCError ParseError(absl::string_view message) {
+ return RTCError(RTCErrorType::SYNTAX_ERROR, message);
+}
+
+// These methods serialize simulcast according to the specification:
+// https://tools.ietf.org/html/draft-ietf-mmusic-sdp-simulcast-13#section-5.1
+rtc::StringBuilder& operator<<(rtc::StringBuilder& builder,
+ const SimulcastLayer& simulcast_layer) {
+ if (simulcast_layer.is_paused) {
+ builder << kSimulcastPausedStream;
+ }
+ builder << simulcast_layer.rid;
+ return builder;
+}
+
+rtc::StringBuilder& operator<<(
+ rtc::StringBuilder& builder,
+ const std::vector<SimulcastLayer>& layer_alternatives) {
+ bool first = true;
+ for (const SimulcastLayer& rid : layer_alternatives) {
+ if (!first) {
+ builder << kDelimiterComma;
+ }
+ builder << rid;
+ first = false;
+ }
+ return builder;
+}
+
+rtc::StringBuilder& operator<<(rtc::StringBuilder& builder,
+ const SimulcastLayerList& simulcast_layers) {
+ bool first = true;
+ for (const auto& alternatives : simulcast_layers) {
+ if (!first) {
+ builder << kDelimiterSemicolon;
+ }
+ builder << alternatives;
+ first = false;
+ }
+ return builder;
+}
+// This method deserializes simulcast according to the specification:
+// https://tools.ietf.org/html/draft-ietf-mmusic-sdp-simulcast-13#section-5.1
+// sc-str-list = sc-alt-list *( ";" sc-alt-list )
+// sc-alt-list = sc-id *( "," sc-id )
+// sc-id-paused = "~"
+// sc-id = [sc-id-paused] rid-id
+// rid-id = 1*(alpha-numeric / "-" / "_") ; see: I-D.ietf-mmusic-rid
+RTCErrorOr<SimulcastLayerList> ParseSimulcastLayerList(const std::string& str) {
+ std::vector<absl::string_view> tokens =
+ rtc::split(str, kDelimiterSemicolonChar);
+ if (tokens.empty()) {
+ return ParseError("Layer list cannot be empty.");
+ }
+
+ SimulcastLayerList result;
+ for (const absl::string_view& token : tokens) {
+ if (token.empty()) {
+ return ParseError("Simulcast alternative layer list is empty.");
+ }
+
+ std::vector<absl::string_view> rid_tokens =
+ rtc::split(token, kDelimiterCommaChar);
+
+ if (rid_tokens.empty()) {
+ return ParseError("Simulcast alternative layer list is malformed.");
+ }
+
+ std::vector<SimulcastLayer> layers;
+ for (const absl::string_view& rid_token : rid_tokens) {
+ if (rid_token.empty() || rid_token == kSimulcastPausedStream) {
+ return ParseError("Rid must not be empty.");
+ }
+
+ bool paused = rid_token[0] == kSimulcastPausedStreamChar;
+ absl::string_view rid = paused ? rid_token.substr(1) : rid_token;
+ layers.push_back(SimulcastLayer(rid, paused));
+ }
+
+ result.AddLayerWithAlternatives(layers);
+ }
+
+ return std::move(result);
+}
+
+webrtc::RTCError ParseRidPayloadList(const std::string& payload_list,
+ RidDescription* rid_description) {
+ RTC_DCHECK(rid_description);
+ std::vector<int>& payload_types = rid_description->payload_types;
+ // Check that the description doesn't have any payload types or restrictions.
+ // If the pt= field is specified, it must be first and must not repeat.
+ if (!payload_types.empty()) {
+ return ParseError("Multiple pt= found in RID Description.");
+ }
+ if (!rid_description->restrictions.empty()) {
+ return ParseError("Payload list must appear first in the restrictions.");
+ }
+
+ // If the pt= field is specified, it must have a value.
+ if (payload_list.empty()) {
+ return ParseError("Payload list must have at least one value.");
+ }
+
+ // Tokenize the ',' delimited list
+ std::vector<std::string> string_payloads;
+ rtc::tokenize(payload_list, kDelimiterCommaChar, &string_payloads);
+ if (string_payloads.empty()) {
+ return ParseError("Payload list must have at least one value.");
+ }
+
+ for (const std::string& payload_type : string_payloads) {
+ absl::optional<int> value = rtc::StringToNumber<int>(payload_type);
+ if (!value.has_value()) {
+ return ParseError("Invalid payload type: " + payload_type);
+ }
+
+ // Check if the value already appears in the payload list.
+ if (absl::c_linear_search(payload_types, value.value())) {
+ return ParseError("Duplicate payload type in list: " + payload_type);
+ }
+ payload_types.push_back(value.value());
+ }
+
+ return RTCError::OK();
+}
+
+} // namespace
+
+std::string SimulcastSdpSerializer::SerializeSimulcastDescription(
+ const cricket::SimulcastDescription& simulcast) const {
+ rtc::StringBuilder sb;
+ std::string delimiter;
+
+ if (!simulcast.send_layers().empty()) {
+ sb << kSendDirection << kDelimiterSpace << simulcast.send_layers();
+ delimiter = kDelimiterSpace;
+ }
+
+ if (!simulcast.receive_layers().empty()) {
+ sb << delimiter << kReceiveDirection << kDelimiterSpace
+ << simulcast.receive_layers();
+ }
+
+ return sb.str();
+}
+
+// https://tools.ietf.org/html/draft-ietf-mmusic-sdp-simulcast-13#section-5.1
+// a:simulcast:<send> <streams> <recv> <streams>
+// Formal Grammar
+// sc-value = ( sc-send [SP sc-recv] ) / ( sc-recv [SP sc-send] )
+// sc-send = %s"send" SP sc-str-list
+// sc-recv = %s"recv" SP sc-str-list
+// sc-str-list = sc-alt-list *( ";" sc-alt-list )
+// sc-alt-list = sc-id *( "," sc-id )
+// sc-id-paused = "~"
+// sc-id = [sc-id-paused] rid-id
+// rid-id = 1*(alpha-numeric / "-" / "_") ; see: I-D.ietf-mmusic-rid
+RTCErrorOr<SimulcastDescription>
+SimulcastSdpSerializer::DeserializeSimulcastDescription(
+ absl::string_view string) const {
+ std::vector<std::string> tokens;
+ rtc::tokenize(std::string(string), kDelimiterSpaceChar, &tokens);
+
+ if (tokens.size() != 2 && tokens.size() != 4) {
+ return ParseError("Must have one or two <direction, streams> pairs.");
+ }
+
+ bool bidirectional = tokens.size() == 4; // indicates both send and recv
+
+ // Tokens 0, 2 (if exists) should be send / recv
+ if ((tokens[0] != kSendDirection && tokens[0] != kReceiveDirection) ||
+ (bidirectional && tokens[2] != kSendDirection &&
+ tokens[2] != kReceiveDirection) ||
+ (bidirectional && tokens[0] == tokens[2])) {
+ return ParseError("Valid values: send / recv.");
+ }
+
+ // Tokens 1, 3 (if exists) should be alternative layer lists
+ RTCErrorOr<SimulcastLayerList> list1, list2;
+ list1 = ParseSimulcastLayerList(tokens[1]);
+ if (!list1.ok()) {
+ return list1.MoveError();
+ }
+
+ if (bidirectional) {
+ list2 = ParseSimulcastLayerList(tokens[3]);
+ if (!list2.ok()) {
+ return list2.MoveError();
+ }
+ }
+
+ // Set the layers so that list1 is for send and list2 is for recv
+ if (tokens[0] != kSendDirection) {
+ std::swap(list1, list2);
+ }
+
+ // Set the layers according to which pair is send and which is recv
+ // At this point if the simulcast is unidirectional then
+ // either `list1` or `list2` will be in 'error' state indicating that
+ // the value should not be used.
+ SimulcastDescription simulcast;
+ if (list1.ok()) {
+ simulcast.send_layers() = list1.MoveValue();
+ }
+
+ if (list2.ok()) {
+ simulcast.receive_layers() = list2.MoveValue();
+ }
+
+ return std::move(simulcast);
+}
+
+std::string SimulcastSdpSerializer::SerializeRidDescription(
+ const RidDescription& rid_description) const {
+ RTC_DCHECK(!rid_description.rid.empty());
+ RTC_DCHECK(rid_description.direction == RidDirection::kSend ||
+ rid_description.direction == RidDirection::kReceive);
+
+ rtc::StringBuilder builder;
+ builder << rid_description.rid << kDelimiterSpace
+ << (rid_description.direction == RidDirection::kSend
+ ? kSendDirection
+ : kReceiveDirection);
+
+ const auto& payload_types = rid_description.payload_types;
+ const auto& restrictions = rid_description.restrictions;
+
+ // First property is separated by ' ', the next ones by ';'.
+ const char* propertyDelimiter = kDelimiterSpace;
+
+ // Serialize any codecs in the description.
+ if (!payload_types.empty()) {
+ builder << propertyDelimiter << kPayloadType << kDelimiterEqual;
+ propertyDelimiter = kDelimiterSemicolon;
+ const char* formatDelimiter = "";
+ for (int payload_type : payload_types) {
+ builder << formatDelimiter << payload_type;
+ formatDelimiter = kDelimiterComma;
+ }
+ }
+
+ // Serialize any restrictions in the description.
+ for (const auto& pair : restrictions) {
+ // Serialize key=val pairs. =val part is ommitted if val is empty.
+ builder << propertyDelimiter << pair.first;
+ if (!pair.second.empty()) {
+ builder << kDelimiterEqual << pair.second;
+ }
+
+ propertyDelimiter = kDelimiterSemicolon;
+ }
+
+ return builder.str();
+}
+
+// https://tools.ietf.org/html/draft-ietf-mmusic-rid-15#section-10
+// Formal Grammar
+// rid-syntax = %s"a=rid:" rid-id SP rid-dir
+// [ rid-pt-param-list / rid-param-list ]
+// rid-id = 1*(alpha-numeric / "-" / "_")
+// rid-dir = %s"send" / %s"recv"
+// rid-pt-param-list = SP rid-fmt-list *( ";" rid-param )
+// rid-param-list = SP rid-param *( ";" rid-param )
+// rid-fmt-list = %s"pt=" fmt *( "," fmt )
+// rid-param = 1*(alpha-numeric / "-") [ "=" param-val ]
+// param-val = *( %x20-58 / %x60-7E )
+// ; Any printable character except semicolon
+RTCErrorOr<RidDescription> SimulcastSdpSerializer::DeserializeRidDescription(
+ absl::string_view string) const {
+ std::vector<std::string> tokens;
+ rtc::tokenize(std::string(string), kDelimiterSpaceChar, &tokens);
+
+ if (tokens.size() < 2) {
+ return ParseError("RID Description must contain <RID> <direction>.");
+ }
+
+ if (tokens.size() > 3) {
+ return ParseError("Invalid RID Description format. Too many arguments.");
+ }
+
+ if (!IsLegalRsidName(tokens[0])) {
+ return ParseError("Invalid RID value: " + tokens[0] + ".");
+ }
+
+ if (tokens[1] != kSendDirection && tokens[1] != kReceiveDirection) {
+ return ParseError("Invalid RID direction. Supported values: send / recv.");
+ }
+
+ RidDirection direction = tokens[1] == kSendDirection ? RidDirection::kSend
+ : RidDirection::kReceive;
+
+ RidDescription rid_description(tokens[0], direction);
+
+ // If there is a third argument it is a payload list and/or restriction list.
+ if (tokens.size() == 3) {
+ std::vector<std::string> restrictions;
+ rtc::tokenize(tokens[2], kDelimiterSemicolonChar, &restrictions);
+
+ // Check for malformed restriction list, such as ';' or ';;;' etc.
+ if (restrictions.empty()) {
+ return ParseError("Invalid RID restriction list: " + tokens[2]);
+ }
+
+ // Parse the restrictions. The payload indicator (pt) can only appear first.
+ for (const std::string& restriction : restrictions) {
+ std::vector<std::string> parts;
+ rtc::tokenize(restriction, kDelimiterEqualChar, &parts);
+ if (parts.empty() || parts.size() > 2) {
+ return ParseError("Invalid format for restriction: " + restriction);
+ }
+
+ // `parts` contains at least one value and it does not contain a space.
+ // Note: `parts` and other values might still contain tab, newline,
+ // unprintable characters, etc. which will not generate errors here but
+ // will (most-likely) be ignored by components down stream.
+ if (parts[0] == kPayloadType) {
+ RTCError error = ParseRidPayloadList(
+ parts.size() > 1 ? parts[1] : std::string(), &rid_description);
+ if (!error.ok()) {
+ return std::move(error);
+ }
+
+ continue;
+ }
+
+ // Parse `parts` as a key=value pair which allows unspecified values.
+ if (rid_description.restrictions.find(parts[0]) !=
+ rid_description.restrictions.end()) {
+ return ParseError("Duplicate restriction specified: " + parts[0]);
+ }
+
+ rid_description.restrictions[parts[0]] =
+ parts.size() > 1 ? parts[1] : std::string();
+ }
+ }
+
+ return std::move(rid_description);
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/simulcast_sdp_serializer.h b/third_party/libwebrtc/pc/simulcast_sdp_serializer.h
new file mode 100644
index 0000000000..4811e5272d
--- /dev/null
+++ b/third_party/libwebrtc/pc/simulcast_sdp_serializer.h
@@ -0,0 +1,61 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_SIMULCAST_SDP_SERIALIZER_H_
+#define PC_SIMULCAST_SDP_SERIALIZER_H_
+
+#include <string>
+
+#include "absl/strings/string_view.h"
+#include "api/rtc_error.h"
+#include "media/base/rid_description.h"
+#include "pc/session_description.h"
+#include "pc/simulcast_description.h"
+
+namespace webrtc {
+
+// This class serializes simulcast components of the SDP.
+// Example:
+// SimulcastDescription can be serialized and deserialized by this class.
+// The serializer will know how to translate the data to spec-compliant
+// format without knowing about the SDP attribute details (a=simulcast:)
+// Usage:
+// Consider the SDP attribute for simulcast a=simulcast:<configuration>.
+// The SDP serializtion code (webrtc_sdp.h) should use `SdpSerializer` to
+// serialize and deserialize the <configuration> section.
+// This class will allow testing the serialization of components without
+// having to serialize the entire SDP while hiding implementation details
+// from callers of sdp serialization (webrtc_sdp.h).
+class SimulcastSdpSerializer {
+ public:
+ // Serialization for the Simulcast description according to
+ // https://tools.ietf.org/html/draft-ietf-mmusic-sdp-simulcast-13#section-5.1
+ std::string SerializeSimulcastDescription(
+ const cricket::SimulcastDescription& simulcast) const;
+
+ // Deserialization for the SimulcastDescription according to
+ // https://tools.ietf.org/html/draft-ietf-mmusic-sdp-simulcast-13#section-5.1
+ RTCErrorOr<cricket::SimulcastDescription> DeserializeSimulcastDescription(
+ absl::string_view string) const;
+
+ // Serialization for the RID description according to
+ // https://tools.ietf.org/html/draft-ietf-mmusic-rid-15#section-10
+ std::string SerializeRidDescription(
+ const cricket::RidDescription& rid_description) const;
+
+ // Deserialization for the RidDescription according to
+ // https://tools.ietf.org/html/draft-ietf-mmusic-rid-15#section-10
+ RTCErrorOr<cricket::RidDescription> DeserializeRidDescription(
+ absl::string_view string) const;
+};
+
+} // namespace webrtc
+
+#endif // PC_SIMULCAST_SDP_SERIALIZER_H_
diff --git a/third_party/libwebrtc/pc/simulcast_sdp_serializer_unittest.cc b/third_party/libwebrtc/pc/simulcast_sdp_serializer_unittest.cc
new file mode 100644
index 0000000000..50c89c8742
--- /dev/null
+++ b/third_party/libwebrtc/pc/simulcast_sdp_serializer_unittest.cc
@@ -0,0 +1,485 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/simulcast_sdp_serializer.h"
+
+#include <stddef.h>
+
+#include <map>
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "test/gtest.h"
+
+using cricket::RidDescription;
+using cricket::RidDirection;
+using cricket::SimulcastDescription;
+using cricket::SimulcastLayer;
+using cricket::SimulcastLayerList;
+using ::testing::TestWithParam;
+using ::testing::ValuesIn;
+
+namespace webrtc {
+
+namespace {
+// Checks that two vectors have the same objects in the same order.
+template <typename TElement>
+void ExpectEqual(const std::vector<TElement>& expected,
+ const std::vector<TElement>& actual) {
+ ASSERT_EQ(expected.size(), actual.size());
+ for (size_t i = 0; i < expected.size(); i++) {
+ EXPECT_EQ(expected[i], actual[i]) << "Vectors differ at element " << i;
+ }
+}
+
+// Template specialization for vectors of SimulcastLayer objects.
+template <>
+void ExpectEqual(const std::vector<SimulcastLayer>& expected,
+ const std::vector<SimulcastLayer>& actual) {
+ EXPECT_EQ(expected.size(), actual.size());
+ for (size_t i = 0; i < expected.size(); i++) {
+ EXPECT_EQ(expected[i].rid, actual[i].rid);
+ EXPECT_EQ(expected[i].is_paused, actual[i].is_paused);
+ }
+}
+
+// Checks that two maps have the same key-value pairs.
+// Even though a map is technically ordered, the order semantics are not
+// tested because having the same key-set in both maps implies that they
+// are ordered the same because the template enforces that they have the
+// same Key-Comparer type.
+template <typename TKey, typename TValue>
+void ExpectEqual(const std::map<TKey, TValue>& expected,
+ const std::map<TKey, TValue>& actual) {
+ typedef typename std::map<TKey, TValue>::const_iterator const_iterator;
+ ASSERT_EQ(expected.size(), actual.size());
+ // Maps have unique keys, so if size is equal, it is enough to check
+ // that all the keys (and values) from one map exist in the other.
+ for (const auto& pair : expected) {
+ const_iterator iter = actual.find(pair.first);
+ EXPECT_NE(iter, actual.end()) << "Key: " << pair.first << " not found";
+ EXPECT_EQ(pair.second, iter->second);
+ }
+}
+
+// Checks that the two SimulcastLayerLists are equal.
+void ExpectEqual(const SimulcastLayerList& expected,
+ const SimulcastLayerList& actual) {
+ EXPECT_EQ(expected.size(), actual.size());
+ for (size_t i = 0; i < expected.size(); i++) {
+ ExpectEqual(expected[i], actual[i]);
+ }
+}
+
+// Checks that the two SimulcastDescriptions are equal.
+void ExpectEqual(const SimulcastDescription& expected,
+ const SimulcastDescription& actual) {
+ ExpectEqual(expected.send_layers(), actual.send_layers());
+ ExpectEqual(expected.receive_layers(), actual.receive_layers());
+}
+
+// Checks that the two RidDescriptions are equal.
+void ExpectEqual(const RidDescription& expected, const RidDescription& actual) {
+ EXPECT_EQ(expected.rid, actual.rid);
+ EXPECT_EQ(expected.direction, actual.direction);
+ ExpectEqual(expected.payload_types, actual.payload_types);
+ ExpectEqual(expected.restrictions, actual.restrictions);
+}
+} // namespace
+
+class SimulcastSdpSerializerTest : public TestWithParam<const char*> {
+ public:
+ // Runs a test for deserializing Simulcast.
+ // `str` - The serialized Simulcast to parse.
+ // `expected` - The expected output Simulcast to compare to.
+ void TestDeserialization(const std::string& str,
+ const SimulcastDescription& expected) const {
+ SimulcastSdpSerializer deserializer;
+ auto result = deserializer.DeserializeSimulcastDescription(str);
+ EXPECT_TRUE(result.ok());
+ ExpectEqual(expected, result.value());
+ }
+
+ // Runs a test for serializing Simulcast.
+ // `simulcast` - The Simulcast to serialize.
+ // `expected` - The expected output string to compare to.
+ void TestSerialization(const SimulcastDescription& simulcast,
+ const std::string& expected) const {
+ SimulcastSdpSerializer serializer;
+ auto result = serializer.SerializeSimulcastDescription(simulcast);
+ EXPECT_EQ(expected, result);
+ }
+};
+
+// Test Cases
+
+// Test simple deserialization with no alternative streams.
+TEST_F(SimulcastSdpSerializerTest, Deserialize_SimpleCaseNoAlternatives) {
+ std::string simulcast_str = "send 1;2 recv 3;4";
+ SimulcastDescription expected;
+ expected.send_layers().AddLayer(SimulcastLayer("1", false));
+ expected.send_layers().AddLayer(SimulcastLayer("2", false));
+ expected.receive_layers().AddLayer(SimulcastLayer("3", false));
+ expected.receive_layers().AddLayer(SimulcastLayer("4", false));
+ TestDeserialization(simulcast_str, expected);
+}
+
+// Test simulcast deserialization with alternative streams.
+TEST_F(SimulcastSdpSerializerTest, Deserialize_SimpleCaseWithAlternatives) {
+ std::string simulcast_str = "send 1,5;2,6 recv 3,7;4,8";
+ SimulcastDescription expected;
+ expected.send_layers().AddLayerWithAlternatives(
+ {SimulcastLayer("1", false), SimulcastLayer("5", false)});
+ expected.send_layers().AddLayerWithAlternatives(
+ {SimulcastLayer("2", false), SimulcastLayer("6", false)});
+ expected.receive_layers().AddLayerWithAlternatives(
+ {SimulcastLayer("3", false), SimulcastLayer("7", false)});
+ expected.receive_layers().AddLayerWithAlternatives(
+ {SimulcastLayer("4", false), SimulcastLayer("8", false)});
+ TestDeserialization(simulcast_str, expected);
+}
+
+// Test simulcast deserialization when only some streams have alternatives.
+TEST_F(SimulcastSdpSerializerTest, Deserialize_WithSomeAlternatives) {
+ std::string simulcast_str = "send 1;2,6 recv 3,7;4";
+ SimulcastDescription expected;
+ expected.send_layers().AddLayer(SimulcastLayer("1", false));
+ expected.send_layers().AddLayerWithAlternatives(
+ {SimulcastLayer("2", false), SimulcastLayer("6", false)});
+ expected.receive_layers().AddLayerWithAlternatives(
+ {SimulcastLayer("3", false), SimulcastLayer("7", false)});
+ expected.receive_layers().AddLayer(SimulcastLayer("4", false));
+ TestDeserialization(simulcast_str, expected);
+}
+
+// Test simulcast deserialization when only send streams are specified.
+TEST_F(SimulcastSdpSerializerTest, Deserialize_OnlySendStreams) {
+ std::string simulcast_str = "send 1;2,6;3,7;4";
+ SimulcastDescription expected;
+ expected.send_layers().AddLayer(SimulcastLayer("1", false));
+ expected.send_layers().AddLayerWithAlternatives(
+ {SimulcastLayer("2", false), SimulcastLayer("6", false)});
+ expected.send_layers().AddLayerWithAlternatives(
+ {SimulcastLayer("3", false), SimulcastLayer("7", false)});
+ expected.send_layers().AddLayer(SimulcastLayer("4", false));
+ TestDeserialization(simulcast_str, expected);
+}
+
+// Test simulcast deserialization when only receive streams are specified.
+TEST_F(SimulcastSdpSerializerTest, Deserialize_OnlyReceiveStreams) {
+ std::string simulcast_str = "recv 1;2,6;3,7;4";
+ SimulcastDescription expected;
+ expected.receive_layers().AddLayer(SimulcastLayer("1", false));
+ expected.receive_layers().AddLayerWithAlternatives(
+ {SimulcastLayer("2", false), SimulcastLayer("6", false)});
+ expected.receive_layers().AddLayerWithAlternatives(
+ {SimulcastLayer("3", false), SimulcastLayer("7", false)});
+ expected.receive_layers().AddLayer(SimulcastLayer("4", false));
+ TestDeserialization(simulcast_str, expected);
+}
+
+// Test simulcast deserialization with receive streams before send streams.
+TEST_F(SimulcastSdpSerializerTest, Deserialize_SendReceiveReversed) {
+ std::string simulcast_str = "recv 1;2,6 send 3,7;4";
+ SimulcastDescription expected;
+ expected.receive_layers().AddLayer(SimulcastLayer("1", false));
+ expected.receive_layers().AddLayerWithAlternatives(
+ {SimulcastLayer("2", false), SimulcastLayer("6", false)});
+ expected.send_layers().AddLayerWithAlternatives(
+ {SimulcastLayer("3", false), SimulcastLayer("7", false)});
+ expected.send_layers().AddLayer(SimulcastLayer("4", false));
+ TestDeserialization(simulcast_str, expected);
+}
+
+// Test simulcast deserialization with some streams set to paused state.
+TEST_F(SimulcastSdpSerializerTest, Deserialize_PausedStreams) {
+ std::string simulcast_str = "recv 1;~2,6 send 3,7;~4";
+ SimulcastDescription expected;
+ expected.receive_layers().AddLayer(SimulcastLayer("1", false));
+ expected.receive_layers().AddLayerWithAlternatives(
+ {SimulcastLayer("2", true), SimulcastLayer("6", false)});
+ expected.send_layers().AddLayerWithAlternatives(
+ {SimulcastLayer("3", false), SimulcastLayer("7", false)});
+ expected.send_layers().AddLayer(SimulcastLayer("4", true));
+ TestDeserialization(simulcast_str, expected);
+}
+
+// Parameterized negative test case for deserialization with invalid inputs.
+TEST_P(SimulcastSdpSerializerTest, SimulcastDeserializationFailed) {
+ SimulcastSdpSerializer deserializer;
+ auto result = deserializer.DeserializeSimulcastDescription(GetParam());
+ EXPECT_FALSE(result.ok());
+}
+
+// The malformed Simulcast inputs to use in the negative test case.
+const char* kSimulcastMalformedStrings[] = {
+ "send ",
+ "recv ",
+ "recv 1 send",
+ "receive 1",
+ "recv 1;~2,6 recv 3,7;~4",
+ "send 1;~2,6 send 3,7;~4",
+ "send ~;~2,6",
+ "send 1; ;~2,6",
+ "send 1,;~2,6",
+ "recv 1 send 2 3",
+ "",
+};
+
+INSTANTIATE_TEST_SUITE_P(SimulcastDeserializationErrors,
+ SimulcastSdpSerializerTest,
+ ValuesIn(kSimulcastMalformedStrings));
+
+// Test a simple serialization scenario.
+TEST_F(SimulcastSdpSerializerTest, Serialize_SimpleCase) {
+ SimulcastDescription simulcast;
+ simulcast.send_layers().AddLayer(SimulcastLayer("1", false));
+ simulcast.receive_layers().AddLayer(SimulcastLayer("2", false));
+ TestSerialization(simulcast, "send 1 recv 2");
+}
+
+// Test serialization with only send streams.
+TEST_F(SimulcastSdpSerializerTest, Serialize_OnlySend) {
+ SimulcastDescription simulcast;
+ simulcast.send_layers().AddLayer(SimulcastLayer("1", false));
+ simulcast.send_layers().AddLayer(SimulcastLayer("2", false));
+ TestSerialization(simulcast, "send 1;2");
+}
+
+// Test serialization with only receive streams
+TEST_F(SimulcastSdpSerializerTest, Serialize_OnlyReceive) {
+ SimulcastDescription simulcast;
+ simulcast.receive_layers().AddLayer(SimulcastLayer("1", false));
+ simulcast.receive_layers().AddLayer(SimulcastLayer("2", false));
+ TestSerialization(simulcast, "recv 1;2");
+}
+
+// Test a complex serialization with multiple streams, alternatives and states.
+TEST_F(SimulcastSdpSerializerTest, Serialize_ComplexSerialization) {
+ SimulcastDescription simulcast;
+ simulcast.send_layers().AddLayerWithAlternatives(
+ {SimulcastLayer("2", false), SimulcastLayer("1", true)});
+ simulcast.send_layers().AddLayerWithAlternatives(
+ {SimulcastLayer("4", false), SimulcastLayer("3", false)});
+
+ simulcast.receive_layers().AddLayerWithAlternatives(
+ {SimulcastLayer("6", false), SimulcastLayer("7", false)});
+ simulcast.receive_layers().AddLayer(SimulcastLayer("8", true));
+ simulcast.receive_layers().AddLayerWithAlternatives(
+ {SimulcastLayer("9", false), SimulcastLayer("10", true),
+ SimulcastLayer("11", false)});
+ TestSerialization(simulcast, "send 2,~1;4,3 recv 6,7;~8;9,~10,11");
+}
+
+class RidDescriptionSdpSerializerTest : public TestWithParam<const char*> {
+ public:
+ // Runs a test for deserializing Rid Descriptions.
+ // `str` - The serialized Rid Description to parse.
+ // `expected` - The expected output RidDescription to compare to.
+ void TestDeserialization(const std::string& str,
+ const RidDescription& expected) const {
+ SimulcastSdpSerializer deserializer;
+ auto result = deserializer.DeserializeRidDescription(str);
+ EXPECT_TRUE(result.ok());
+ ExpectEqual(expected, result.value());
+ }
+
+ // Runs a test for serializing RidDescriptions.
+ // `rid_description` - The RidDescription to serialize.
+ // `expected` - The expected output string to compare to.
+ void TestSerialization(const RidDescription& rid_description,
+ const std::string& expected) const {
+ SimulcastSdpSerializer serializer;
+ auto result = serializer.SerializeRidDescription(rid_description);
+ EXPECT_EQ(expected, result);
+ }
+};
+
+// Test serialization for RidDescription that only specifies send.
+TEST_F(RidDescriptionSdpSerializerTest, Serialize_OnlyDirectionSend) {
+ RidDescription rid_description("1", RidDirection::kSend);
+ TestSerialization(rid_description, "1 send");
+}
+
+// Test serialization for RidDescription that only specifies receive.
+TEST_F(RidDescriptionSdpSerializerTest, Serialize_OnlyDirectionReceive) {
+ RidDescription rid_description("2", RidDirection::kReceive);
+ TestSerialization(rid_description, "2 recv");
+}
+
+// Test serialization for RidDescription with format list.
+TEST_F(RidDescriptionSdpSerializerTest, Serialize_FormatList) {
+ RidDescription rid_description("3", RidDirection::kSend);
+ rid_description.payload_types = {102, 101};
+ TestSerialization(rid_description, "3 send pt=102,101");
+}
+
+// Test serialization for RidDescription with format list.
+TEST_F(RidDescriptionSdpSerializerTest, Serialize_FormatListSingleFormat) {
+ RidDescription rid_description("4", RidDirection::kReceive);
+ rid_description.payload_types = {100};
+ TestSerialization(rid_description, "4 recv pt=100");
+}
+
+// Test serialization for RidDescription with restriction list.
+// Note: restriction list will be sorted because it is stored in a map.
+TEST_F(RidDescriptionSdpSerializerTest, Serialize_AttributeList) {
+ RidDescription rid_description("5", RidDirection::kSend);
+ rid_description.restrictions["max-width"] = "1280";
+ rid_description.restrictions["max-height"] = "720";
+ TestSerialization(rid_description, "5 send max-height=720;max-width=1280");
+}
+
+// Test serialization for RidDescription with format list and attribute list.
+// Note: restriction list will be sorted because it is stored in a map.
+TEST_F(RidDescriptionSdpSerializerTest, Serialize_FormatAndAttributeList) {
+ RidDescription rid_description("6", RidDirection::kSend);
+ rid_description.payload_types = {103, 104};
+ rid_description.restrictions["max-mbps"] = "108000";
+ rid_description.restrictions["max-br"] = "64000";
+ TestSerialization(rid_description,
+ "6 send pt=103,104;max-br=64000;max-mbps=108000");
+}
+
+// Test serialization for attribute list that has key with no value.
+// Note: restriction list will be sorted because it is stored in a map.
+TEST_F(RidDescriptionSdpSerializerTest, Serialize_RestrictionWithoutValue) {
+ RidDescription rid_description("7", RidDirection::kReceive);
+ rid_description.payload_types = {103};
+ rid_description.restrictions["max-width"] = "1280";
+ rid_description.restrictions["max-height"] = "720";
+ rid_description.restrictions["max-myval"] = "";
+ TestSerialization(rid_description,
+ "7 recv pt=103;max-height=720;max-myval;max-width=1280");
+}
+
+// Test simulcast deserialization with simple send stream.
+TEST_F(RidDescriptionSdpSerializerTest, Deserialize_SimpleSendCase) {
+ RidDescription rid_description("1", RidDirection::kSend);
+ TestDeserialization("1 send", rid_description);
+}
+
+// Test simulcast deserialization with simple receive stream.
+TEST_F(RidDescriptionSdpSerializerTest, Deserialize_SimpleReceiveCase) {
+ RidDescription rid_description("2", RidDirection::kReceive);
+ TestDeserialization("2 recv", rid_description);
+}
+
+// Test simulcast deserialization with single format.
+TEST_F(RidDescriptionSdpSerializerTest, Deserialize_WithFormat) {
+ RidDescription rid_description("3", RidDirection::kSend);
+ rid_description.payload_types = {101};
+ TestDeserialization("3 send pt=101", rid_description);
+}
+
+// Test simulcast deserialization with multiple formats.
+TEST_F(RidDescriptionSdpSerializerTest, Deserialize_WithMultipleFormats) {
+ RidDescription rid_description("4", RidDirection::kSend);
+ rid_description.payload_types = {103, 104, 101, 102};
+ TestDeserialization("4 send pt=103,104,101,102", rid_description);
+}
+
+// Test simulcast deserialization with restriction.
+TEST_F(RidDescriptionSdpSerializerTest, Deserialize_WithRestriction) {
+ RidDescription rid_description("5", RidDirection::kReceive);
+ rid_description.restrictions["max-height"] = "720";
+ TestDeserialization("5 recv max-height=720", rid_description);
+}
+
+// Test simulcast deserialization with multiple restrictions.
+TEST_F(RidDescriptionSdpSerializerTest, Deserialize_WithMultipleRestrictions) {
+ RidDescription rid_description("6", RidDirection::kReceive);
+ rid_description.restrictions["max-height"] = "720";
+ rid_description.restrictions["max-width"] = "1920";
+ rid_description.restrictions["max-fr"] = "60";
+ rid_description.restrictions["max-bps"] = "14000";
+ TestDeserialization(
+ "6 recv max-height=720;max-width=1920;max-bps=14000;max-fr=60",
+ rid_description);
+}
+
+// Test simulcast deserialization with custom (non-standard) restriction.
+TEST_F(RidDescriptionSdpSerializerTest, Deserialize_WithCustomRestrictions) {
+ RidDescription rid_description("7", RidDirection::kSend);
+ rid_description.restrictions["foo"] = "bar";
+ rid_description.restrictions["max-height"] = "720";
+ TestDeserialization("7 send max-height=720;foo=bar", rid_description);
+}
+
+// Test simulcast deserialization with multiple formats and restrictions.
+TEST_F(RidDescriptionSdpSerializerTest, Deserialize_WithFormatAndRestrictions) {
+ RidDescription rid_description("8", RidDirection::kSend);
+ rid_description.payload_types = {104, 103};
+ rid_description.restrictions["max-height"] = "720";
+ rid_description.restrictions["max-width"] = "1920";
+ TestDeserialization("8 send pt=104,103;max-height=720;max-width=1920",
+ rid_description);
+}
+
+// Test simulcast deserialization with restriction that has no value.
+TEST_F(RidDescriptionSdpSerializerTest, Deserialize_RestrictionHasNoValue) {
+ RidDescription rid_description("9", RidDirection::kReceive);
+ rid_description.payload_types = {104};
+ rid_description.restrictions["max-height"];
+ rid_description.restrictions["max-width"] = "1920";
+ TestDeserialization("9 recv pt=104;max-height;max-width=1920",
+ rid_description);
+}
+
+// Add this test to explicitly indicate that this is not an error.
+// The following string "1 send recv" looks malformed because it specifies
+// two directions, but in fact, the recv can be interpreted as a parameter
+// without a value. While such a use case is dubious, the input string is
+// not malformed.
+TEST_F(RidDescriptionSdpSerializerTest, Deserialize_AmbiguousCase) {
+ RidDescription rid_description("1", RidDirection::kSend);
+ rid_description.restrictions["recv"]; // No value.
+ TestDeserialization("1 send recv", rid_description);
+}
+
+// Parameterized negative test case for deserialization with invalid inputs.
+TEST_P(RidDescriptionSdpSerializerTest, RidDescriptionDeserializationFailed) {
+ SimulcastSdpSerializer deserializer;
+ auto result = deserializer.DeserializeRidDescription(GetParam());
+ EXPECT_FALSE(result.ok());
+}
+
+// The malformed Rid Description inputs to use in the negative test case.
+const char* kRidDescriptionMalformedStrings[] = {
+ "1",
+ "recv",
+ "send",
+ "recv 1",
+ "send 1",
+ "1 receive",
+ "one direction",
+ "1 send pt=1 max-width=720", // The ' ' should be ';' in restriction list.
+ "1 recv ;",
+ "1 recv =",
+ "1 recv a=b=c",
+ "1 send max-width=720;pt=101", // pt= should appear first.
+ "1 send pt=101;pt=102",
+ "1 send pt=101,101",
+ "1 recv max-width=720;max-width=720",
+ "1 send pt=",
+ "1 send pt=abc",
+ "1 recv ;;",
+ "~1 recv",
+ "1$2 send",
+ "1=2 send",
+ "1* send",
+};
+
+INSTANTIATE_TEST_SUITE_P(RidDescriptionDeserializationErrors,
+ RidDescriptionSdpSerializerTest,
+ ValuesIn(kRidDescriptionMalformedStrings));
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/slow_peer_connection_integration_test.cc b/third_party/libwebrtc/pc/slow_peer_connection_integration_test.cc
new file mode 100644
index 0000000000..fd9d3417df
--- /dev/null
+++ b/third_party/libwebrtc/pc/slow_peer_connection_integration_test.cc
@@ -0,0 +1,506 @@
+/*
+ * Copyright 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This file is intended for PeerConnection integration tests that are
+// slow to execute (currently defined as more than 5 seconds per test).
+
+#include <stdint.h>
+
+#include <memory>
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "absl/algorithm/container.h"
+#include "absl/strings/string_view.h"
+#include "absl/types/optional.h"
+#include "api/dtmf_sender_interface.h"
+#include "api/peer_connection_interface.h"
+#include "api/rtp_receiver_interface.h"
+#include "api/scoped_refptr.h"
+#include "api/units/time_delta.h"
+#include "p2p/base/port_allocator.h"
+#include "p2p/base/port_interface.h"
+#include "p2p/base/stun_server.h"
+#include "p2p/base/test_stun_server.h"
+#include "pc/test/integration_test_helpers.h"
+#include "pc/test/mock_peer_connection_observers.h"
+#include "rtc_base/fake_clock.h"
+#include "rtc_base/fake_network.h"
+#include "rtc_base/firewall_socket_server.h"
+#include "rtc_base/gunit.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/socket_address.h"
+#include "rtc_base/ssl_certificate.h"
+#include "rtc_base/test_certificate_verifier.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+
+namespace {
+
+class PeerConnectionIntegrationTest
+ : public PeerConnectionIntegrationBaseTest,
+ public ::testing::WithParamInterface<SdpSemantics> {
+ protected:
+ PeerConnectionIntegrationTest()
+ : PeerConnectionIntegrationBaseTest(GetParam()) {}
+};
+
+// Fake clock must be set before threads are started to prevent race on
+// Set/GetClockForTesting().
+// To achieve that, multiple inheritance is used as a mixin pattern
+// where order of construction is finely controlled.
+// This also ensures peerconnection is closed before switching back to non-fake
+// clock, avoiding other races and DCHECK failures such as in rtp_sender.cc.
+class FakeClockForTest : public rtc::ScopedFakeClock {
+ protected:
+ FakeClockForTest() {
+ // Some things use a time of "0" as a special value, so we need to start out
+ // the fake clock at a nonzero time.
+ // TODO(deadbeef): Fix this.
+ AdvanceTime(webrtc::TimeDelta::Seconds(1000));
+ }
+
+ // Explicit handle.
+ ScopedFakeClock& FakeClock() { return *this; }
+};
+
+// Ensure FakeClockForTest is constructed first (see class for rationale).
+class PeerConnectionIntegrationTestWithFakeClock
+ : public FakeClockForTest,
+ public PeerConnectionIntegrationTest {};
+
+class PeerConnectionIntegrationTestPlanB
+ : public PeerConnectionIntegrationBaseTest {
+ protected:
+ PeerConnectionIntegrationTestPlanB()
+ : PeerConnectionIntegrationBaseTest(SdpSemantics::kPlanB_DEPRECATED) {}
+};
+
+class PeerConnectionIntegrationTestUnifiedPlan
+ : public PeerConnectionIntegrationBaseTest {
+ protected:
+ PeerConnectionIntegrationTestUnifiedPlan()
+ : PeerConnectionIntegrationBaseTest(SdpSemantics::kUnifiedPlan) {}
+};
+
+// Test the OnFirstPacketReceived callback from audio/video RtpReceivers. This
+// includes testing that the callback is invoked if an observer is connected
+// after the first packet has already been received.
+TEST_P(PeerConnectionIntegrationTest,
+ RtpReceiverObserverOnFirstPacketReceived) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ caller()->AddAudioVideoTracks();
+ callee()->AddAudioVideoTracks();
+ // Start offer/answer exchange and wait for it to complete.
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ // Should be one receiver each for audio/video.
+ EXPECT_EQ(2U, caller()->rtp_receiver_observers().size());
+ EXPECT_EQ(2U, callee()->rtp_receiver_observers().size());
+ // Wait for all "first packet received" callbacks to be fired.
+ EXPECT_TRUE_WAIT(
+ absl::c_all_of(caller()->rtp_receiver_observers(),
+ [](const std::unique_ptr<MockRtpReceiverObserver>& o) {
+ return o->first_packet_received();
+ }),
+ kMaxWaitForFramesMs);
+ EXPECT_TRUE_WAIT(
+ absl::c_all_of(callee()->rtp_receiver_observers(),
+ [](const std::unique_ptr<MockRtpReceiverObserver>& o) {
+ return o->first_packet_received();
+ }),
+ kMaxWaitForFramesMs);
+ // If new observers are set after the first packet was already received, the
+ // callback should still be invoked.
+ caller()->ResetRtpReceiverObservers();
+ callee()->ResetRtpReceiverObservers();
+ EXPECT_EQ(2U, caller()->rtp_receiver_observers().size());
+ EXPECT_EQ(2U, callee()->rtp_receiver_observers().size());
+ EXPECT_TRUE(
+ absl::c_all_of(caller()->rtp_receiver_observers(),
+ [](const std::unique_ptr<MockRtpReceiverObserver>& o) {
+ return o->first_packet_received();
+ }));
+ EXPECT_TRUE(
+ absl::c_all_of(callee()->rtp_receiver_observers(),
+ [](const std::unique_ptr<MockRtpReceiverObserver>& o) {
+ return o->first_packet_received();
+ }));
+}
+
+class DummyDtmfObserver : public DtmfSenderObserverInterface {
+ public:
+ DummyDtmfObserver() : completed_(false) {}
+
+ // Implements DtmfSenderObserverInterface.
+ void OnToneChange(const std::string& tone) override {
+ tones_.push_back(tone);
+ if (tone.empty()) {
+ completed_ = true;
+ }
+ }
+
+ const std::vector<std::string>& tones() const { return tones_; }
+ bool completed() const { return completed_; }
+
+ private:
+ bool completed_;
+ std::vector<std::string> tones_;
+};
+
+TEST_P(PeerConnectionIntegrationTest,
+ SSLCertificateVerifierFailureUsedForTurnConnectionsFailsConnection) {
+ static const rtc::SocketAddress turn_server_internal_address{"88.88.88.0",
+ 3478};
+ static const rtc::SocketAddress turn_server_external_address{"88.88.88.1", 0};
+
+ // Enable TCP-TLS for the fake turn server. We need to pass in 88.88.88.0 so
+ // that host name verification passes on the fake certificate.
+ CreateTurnServer(turn_server_internal_address, turn_server_external_address,
+ cricket::PROTO_TLS, "88.88.88.0");
+
+ webrtc::PeerConnectionInterface::IceServer ice_server;
+ ice_server.urls.push_back("turns:88.88.88.0:3478?transport=tcp");
+ ice_server.username = "test";
+ ice_server.password = "test";
+
+ PeerConnectionInterface::RTCConfiguration client_1_config;
+ client_1_config.servers.push_back(ice_server);
+ client_1_config.type = webrtc::PeerConnectionInterface::kRelay;
+
+ PeerConnectionInterface::RTCConfiguration client_2_config;
+ client_2_config.servers.push_back(ice_server);
+ // Setting the type to kRelay forces the connection to go through a TURN
+ // server.
+ client_2_config.type = webrtc::PeerConnectionInterface::kRelay;
+
+ // Get a copy to the pointer so we can verify calls later.
+ rtc::TestCertificateVerifier* client_1_cert_verifier =
+ new rtc::TestCertificateVerifier();
+ client_1_cert_verifier->verify_certificate_ = false;
+ rtc::TestCertificateVerifier* client_2_cert_verifier =
+ new rtc::TestCertificateVerifier();
+ client_2_cert_verifier->verify_certificate_ = false;
+
+ // Create the dependencies with the test certificate verifier.
+ webrtc::PeerConnectionDependencies client_1_deps(nullptr);
+ client_1_deps.tls_cert_verifier =
+ std::unique_ptr<rtc::TestCertificateVerifier>(client_1_cert_verifier);
+ webrtc::PeerConnectionDependencies client_2_deps(nullptr);
+ client_2_deps.tls_cert_verifier =
+ std::unique_ptr<rtc::TestCertificateVerifier>(client_2_cert_verifier);
+
+ ASSERT_TRUE(CreatePeerConnectionWrappersWithConfigAndDeps(
+ client_1_config, std::move(client_1_deps), client_2_config,
+ std::move(client_2_deps)));
+ ConnectFakeSignaling();
+
+ // Set "offer to receive audio/video" without adding any tracks, so we just
+ // set up ICE/DTLS with no media.
+ PeerConnectionInterface::RTCOfferAnswerOptions options;
+ options.offer_to_receive_audio = 1;
+ options.offer_to_receive_video = 1;
+ caller()->SetOfferAnswerOptions(options);
+ caller()->CreateAndSetAndSignalOffer();
+ bool wait_res = true;
+ // TODO(bugs.webrtc.org/9219): When IceConnectionState is implemented
+ // properly, should be able to just wait for a state of "failed" instead of
+ // waiting a fixed 10 seconds.
+ WAIT_(DtlsConnected(), kDefaultTimeout, wait_res);
+ ASSERT_FALSE(wait_res);
+
+ EXPECT_GT(client_1_cert_verifier->call_count_, 0u);
+ EXPECT_GT(client_2_cert_verifier->call_count_, 0u);
+}
+
+// Test that we can get capture start ntp time.
+TEST_P(PeerConnectionIntegrationTest, GetCaptureStartNtpTimeWithOldStatsApi) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ caller()->AddAudioTrack();
+
+ callee()->AddAudioTrack();
+
+ // Do offer/answer, wait for the callee to receive some frames.
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+
+ // Get the remote audio track created on the receiver, so they can be used as
+ // GetStats filters.
+ auto receivers = callee()->pc()->GetReceivers();
+ ASSERT_EQ(1u, receivers.size());
+ auto remote_audio_track = receivers[0]->track();
+
+ // Get the audio output level stats. Note that the level is not available
+ // until an RTCP packet has been received.
+ EXPECT_TRUE_WAIT(callee()->OldGetStatsForTrack(remote_audio_track.get())
+ ->CaptureStartNtpTime() > 0,
+ 2 * kMaxWaitForFramesMs);
+}
+
+// Test that firewalling the ICE connection causes the clients to identify the
+// disconnected state and then removing the firewall causes them to reconnect.
+class PeerConnectionIntegrationIceStatesTest
+ : public PeerConnectionIntegrationBaseTest,
+ public ::testing::WithParamInterface<
+ std::tuple<SdpSemantics, std::tuple<std::string, uint32_t>>> {
+ protected:
+ PeerConnectionIntegrationIceStatesTest()
+ : PeerConnectionIntegrationBaseTest(std::get<0>(GetParam())) {
+ port_allocator_flags_ = std::get<1>(std::get<1>(GetParam()));
+ }
+
+ void StartStunServer(const SocketAddress& server_address) {
+ stun_server_.reset(
+ cricket::TestStunServer::Create(firewall(), server_address));
+ }
+
+ bool TestIPv6() {
+ return (port_allocator_flags_ & cricket::PORTALLOCATOR_ENABLE_IPV6);
+ }
+
+ void SetPortAllocatorFlags() {
+ PeerConnectionIntegrationBaseTest::SetPortAllocatorFlags(
+ port_allocator_flags_, port_allocator_flags_);
+ }
+
+ std::vector<SocketAddress> CallerAddresses() {
+ std::vector<SocketAddress> addresses;
+ addresses.push_back(SocketAddress("1.1.1.1", 0));
+ if (TestIPv6()) {
+ addresses.push_back(SocketAddress("1111:0:a:b:c:d:e:f", 0));
+ }
+ return addresses;
+ }
+
+ std::vector<SocketAddress> CalleeAddresses() {
+ std::vector<SocketAddress> addresses;
+ addresses.push_back(SocketAddress("2.2.2.2", 0));
+ if (TestIPv6()) {
+ addresses.push_back(SocketAddress("2222:0:a:b:c:d:e:f", 0));
+ }
+ return addresses;
+ }
+
+ void SetUpNetworkInterfaces() {
+ // Remove the default interfaces added by the test infrastructure.
+ caller()->network_manager()->RemoveInterface(kDefaultLocalAddress);
+ callee()->network_manager()->RemoveInterface(kDefaultLocalAddress);
+
+ // Add network addresses for test.
+ for (const auto& caller_address : CallerAddresses()) {
+ caller()->network_manager()->AddInterface(caller_address);
+ }
+ for (const auto& callee_address : CalleeAddresses()) {
+ callee()->network_manager()->AddInterface(callee_address);
+ }
+ }
+
+ private:
+ uint32_t port_allocator_flags_;
+ std::unique_ptr<cricket::TestStunServer> stun_server_;
+};
+
+// Ensure FakeClockForTest is constructed first (see class for rationale).
+class PeerConnectionIntegrationIceStatesTestWithFakeClock
+ : public FakeClockForTest,
+ public PeerConnectionIntegrationIceStatesTest {};
+
+#if !defined(THREAD_SANITIZER)
+// This test provokes TSAN errors. bugs.webrtc.org/11282
+
+// Tests that the PeerConnection goes through all the ICE gathering/connection
+// states over the duration of the call. This includes Disconnected and Failed
+// states, induced by putting a firewall between the peers and waiting for them
+// to time out.
+TEST_P(PeerConnectionIntegrationIceStatesTestWithFakeClock, VerifyIceStates) {
+ const SocketAddress kStunServerAddress =
+ SocketAddress("99.99.99.1", cricket::STUN_SERVER_PORT);
+ StartStunServer(kStunServerAddress);
+
+ PeerConnectionInterface::RTCConfiguration config;
+ PeerConnectionInterface::IceServer ice_stun_server;
+ ice_stun_server.urls.push_back(
+ "stun:" + kStunServerAddress.HostAsURIString() + ":" +
+ kStunServerAddress.PortAsString());
+ config.servers.push_back(ice_stun_server);
+
+ ASSERT_TRUE(CreatePeerConnectionWrappersWithConfig(config, config));
+ ConnectFakeSignaling();
+ SetPortAllocatorFlags();
+ SetUpNetworkInterfaces();
+ caller()->AddAudioVideoTracks();
+ callee()->AddAudioVideoTracks();
+
+ // Initial state before anything happens.
+ ASSERT_EQ(PeerConnectionInterface::kIceGatheringNew,
+ caller()->ice_gathering_state());
+ ASSERT_EQ(PeerConnectionInterface::kIceConnectionNew,
+ caller()->ice_connection_state());
+ ASSERT_EQ(PeerConnectionInterface::kIceConnectionNew,
+ caller()->standardized_ice_connection_state());
+
+ // Start the call by creating the offer, setting it as the local description,
+ // then sending it to the peer who will respond with an answer. This happens
+ // asynchronously so that we can watch the states as it runs in the
+ // background.
+ caller()->CreateAndSetAndSignalOffer();
+
+ ASSERT_EQ_SIMULATED_WAIT(PeerConnectionInterface::kIceConnectionCompleted,
+ caller()->ice_connection_state(), kDefaultTimeout,
+ FakeClock());
+ ASSERT_EQ_SIMULATED_WAIT(PeerConnectionInterface::kIceConnectionCompleted,
+ caller()->standardized_ice_connection_state(),
+ kDefaultTimeout, FakeClock());
+
+ // Verify that the observer was notified of the intermediate transitions.
+ EXPECT_THAT(caller()->ice_connection_state_history(),
+ ElementsAre(PeerConnectionInterface::kIceConnectionChecking,
+ PeerConnectionInterface::kIceConnectionConnected,
+ PeerConnectionInterface::kIceConnectionCompleted));
+ EXPECT_THAT(caller()->standardized_ice_connection_state_history(),
+ ElementsAre(PeerConnectionInterface::kIceConnectionChecking,
+ PeerConnectionInterface::kIceConnectionConnected,
+ PeerConnectionInterface::kIceConnectionCompleted));
+ EXPECT_THAT(
+ caller()->peer_connection_state_history(),
+ ElementsAre(PeerConnectionInterface::PeerConnectionState::kConnecting,
+ PeerConnectionInterface::PeerConnectionState::kConnected));
+ EXPECT_THAT(caller()->ice_gathering_state_history(),
+ ElementsAre(PeerConnectionInterface::kIceGatheringGathering,
+ PeerConnectionInterface::kIceGatheringComplete));
+
+ // Block connections to/from the caller and wait for ICE to become
+ // disconnected.
+ for (const auto& caller_address : CallerAddresses()) {
+ firewall()->AddRule(false, rtc::FP_ANY, rtc::FD_ANY, caller_address);
+ }
+ RTC_LOG(LS_INFO) << "Firewall rules applied";
+ ASSERT_EQ_SIMULATED_WAIT(PeerConnectionInterface::kIceConnectionDisconnected,
+ caller()->ice_connection_state(), kDefaultTimeout,
+ FakeClock());
+ ASSERT_EQ_SIMULATED_WAIT(PeerConnectionInterface::kIceConnectionDisconnected,
+ caller()->standardized_ice_connection_state(),
+ kDefaultTimeout, FakeClock());
+
+ // Let ICE re-establish by removing the firewall rules.
+ firewall()->ClearRules();
+ RTC_LOG(LS_INFO) << "Firewall rules cleared";
+ ASSERT_EQ_SIMULATED_WAIT(PeerConnectionInterface::kIceConnectionCompleted,
+ caller()->ice_connection_state(), kDefaultTimeout,
+ FakeClock());
+ ASSERT_EQ_SIMULATED_WAIT(PeerConnectionInterface::kIceConnectionCompleted,
+ caller()->standardized_ice_connection_state(),
+ kDefaultTimeout, FakeClock());
+
+ // According to RFC7675, if there is no response within 30 seconds then the
+ // peer should consider the other side to have rejected the connection. This
+ // is signaled by the state transitioning to "failed".
+ constexpr int kConsentTimeout = 30000;
+ for (const auto& caller_address : CallerAddresses()) {
+ firewall()->AddRule(false, rtc::FP_ANY, rtc::FD_ANY, caller_address);
+ }
+ RTC_LOG(LS_INFO) << "Firewall rules applied again";
+ ASSERT_EQ_SIMULATED_WAIT(PeerConnectionInterface::kIceConnectionFailed,
+ caller()->ice_connection_state(), kConsentTimeout,
+ FakeClock());
+ ASSERT_EQ_SIMULATED_WAIT(PeerConnectionInterface::kIceConnectionFailed,
+ caller()->standardized_ice_connection_state(),
+ kConsentTimeout, FakeClock());
+}
+#endif
+
+// This test sets up a call that's transferred to a new caller with a different
+// DTLS fingerprint.
+TEST_P(PeerConnectionIntegrationTest, CallTransferredForCallee) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ caller()->AddAudioVideoTracks();
+ callee()->AddAudioVideoTracks();
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+
+ // Keep the original peer around which will still send packets to the
+ // receiving client. These SRTP packets will be dropped.
+ std::unique_ptr<PeerConnectionIntegrationWrapper> original_peer(
+ SetCallerPcWrapperAndReturnCurrent(
+ CreatePeerConnectionWrapperWithAlternateKey().release()));
+ // TODO(deadbeef): Why do we call Close here? That goes against the comment
+ // directly above.
+ original_peer->pc()->Close();
+
+ ConnectFakeSignaling();
+ caller()->AddAudioVideoTracks();
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ // Wait for some additional frames to be transmitted end-to-end.
+ MediaExpectations media_expectations;
+ media_expectations.ExpectBidirectionalAudioAndVideo();
+ ASSERT_TRUE(ExpectNewFrames(media_expectations));
+}
+
+// This test sets up a call that's transferred to a new callee with a different
+// DTLS fingerprint.
+TEST_P(PeerConnectionIntegrationTest, CallTransferredForCaller) {
+ ASSERT_TRUE(CreatePeerConnectionWrappers());
+ ConnectFakeSignaling();
+ caller()->AddAudioVideoTracks();
+ callee()->AddAudioVideoTracks();
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+
+ // Keep the original peer around which will still send packets to the
+ // receiving client. These SRTP packets will be dropped.
+ std::unique_ptr<PeerConnectionIntegrationWrapper> original_peer(
+ SetCalleePcWrapperAndReturnCurrent(
+ CreatePeerConnectionWrapperWithAlternateKey().release()));
+ // TODO(deadbeef): Why do we call Close here? That goes against the comment
+ // directly above.
+ original_peer->pc()->Close();
+
+ ConnectFakeSignaling();
+ callee()->AddAudioVideoTracks();
+ caller()->SetOfferAnswerOptions(IceRestartOfferAnswerOptions());
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+ // Wait for some additional frames to be transmitted end-to-end.
+ MediaExpectations media_expectations;
+ media_expectations.ExpectBidirectionalAudioAndVideo();
+ ASSERT_TRUE(ExpectNewFrames(media_expectations));
+}
+
+INSTANTIATE_TEST_SUITE_P(PeerConnectionIntegrationTest,
+ PeerConnectionIntegrationTest,
+ Values(SdpSemantics::kPlanB_DEPRECATED,
+ SdpSemantics::kUnifiedPlan));
+
+constexpr uint32_t kFlagsIPv4NoStun = cricket::PORTALLOCATOR_DISABLE_TCP |
+ cricket::PORTALLOCATOR_DISABLE_STUN |
+ cricket::PORTALLOCATOR_DISABLE_RELAY;
+constexpr uint32_t kFlagsIPv6NoStun =
+ cricket::PORTALLOCATOR_DISABLE_TCP | cricket::PORTALLOCATOR_DISABLE_STUN |
+ cricket::PORTALLOCATOR_ENABLE_IPV6 | cricket::PORTALLOCATOR_DISABLE_RELAY;
+constexpr uint32_t kFlagsIPv4Stun =
+ cricket::PORTALLOCATOR_DISABLE_TCP | cricket::PORTALLOCATOR_DISABLE_RELAY;
+
+INSTANTIATE_TEST_SUITE_P(
+ PeerConnectionIntegrationTest,
+ PeerConnectionIntegrationIceStatesTestWithFakeClock,
+ Combine(Values(SdpSemantics::kPlanB_DEPRECATED, SdpSemantics::kUnifiedPlan),
+ Values(std::make_pair("IPv4 no STUN", kFlagsIPv4NoStun),
+ std::make_pair("IPv6 no STUN", kFlagsIPv6NoStun),
+ std::make_pair("IPv4 with STUN", kFlagsIPv4Stun))));
+
+} // namespace
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/srtp_filter.cc b/third_party/libwebrtc/pc/srtp_filter.cc
new file mode 100644
index 0000000000..b8be63cd22
--- /dev/null
+++ b/third_party/libwebrtc/pc/srtp_filter.cc
@@ -0,0 +1,280 @@
+/*
+ * Copyright 2009 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/srtp_filter.h"
+
+#include <string.h>
+
+#include <string>
+
+#include "absl/strings/match.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/ssl_stream_adapter.h"
+#include "rtc_base/third_party/base64/base64.h"
+#include "rtc_base/zero_memory.h"
+
+namespace cricket {
+
+SrtpFilter::SrtpFilter() {}
+
+SrtpFilter::~SrtpFilter() {}
+
+bool SrtpFilter::IsActive() const {
+ return state_ >= ST_ACTIVE;
+}
+
+bool SrtpFilter::Process(const std::vector<CryptoParams>& cryptos,
+ webrtc::SdpType type,
+ ContentSource source) {
+ bool ret = false;
+ switch (type) {
+ case webrtc::SdpType::kOffer:
+ ret = SetOffer(cryptos, source);
+ break;
+ case webrtc::SdpType::kPrAnswer:
+ ret = SetProvisionalAnswer(cryptos, source);
+ break;
+ case webrtc::SdpType::kAnswer:
+ ret = SetAnswer(cryptos, source);
+ break;
+ default:
+ break;
+ }
+
+ if (!ret) {
+ return false;
+ }
+
+ return true;
+}
+
+bool SrtpFilter::SetOffer(const std::vector<CryptoParams>& offer_params,
+ ContentSource source) {
+ if (!ExpectOffer(source)) {
+ RTC_LOG(LS_ERROR) << "Wrong state to update SRTP offer";
+ return false;
+ }
+ return StoreParams(offer_params, source);
+}
+
+bool SrtpFilter::SetAnswer(const std::vector<CryptoParams>& answer_params,
+ ContentSource source) {
+ return DoSetAnswer(answer_params, source, true);
+}
+
+bool SrtpFilter::SetProvisionalAnswer(
+ const std::vector<CryptoParams>& answer_params,
+ ContentSource source) {
+ return DoSetAnswer(answer_params, source, false);
+}
+
+bool SrtpFilter::ExpectOffer(ContentSource source) {
+ return ((state_ == ST_INIT) || (state_ == ST_ACTIVE) ||
+ (state_ == ST_SENTOFFER && source == CS_LOCAL) ||
+ (state_ == ST_SENTUPDATEDOFFER && source == CS_LOCAL) ||
+ (state_ == ST_RECEIVEDOFFER && source == CS_REMOTE) ||
+ (state_ == ST_RECEIVEDUPDATEDOFFER && source == CS_REMOTE));
+}
+
+bool SrtpFilter::StoreParams(const std::vector<CryptoParams>& params,
+ ContentSource source) {
+ offer_params_ = params;
+ if (state_ == ST_INIT) {
+ state_ = (source == CS_LOCAL) ? ST_SENTOFFER : ST_RECEIVEDOFFER;
+ } else if (state_ == ST_ACTIVE) {
+ state_ =
+ (source == CS_LOCAL) ? ST_SENTUPDATEDOFFER : ST_RECEIVEDUPDATEDOFFER;
+ }
+ return true;
+}
+
+bool SrtpFilter::ExpectAnswer(ContentSource source) {
+ return ((state_ == ST_SENTOFFER && source == CS_REMOTE) ||
+ (state_ == ST_RECEIVEDOFFER && source == CS_LOCAL) ||
+ (state_ == ST_SENTUPDATEDOFFER && source == CS_REMOTE) ||
+ (state_ == ST_RECEIVEDUPDATEDOFFER && source == CS_LOCAL) ||
+ (state_ == ST_SENTPRANSWER_NO_CRYPTO && source == CS_LOCAL) ||
+ (state_ == ST_SENTPRANSWER && source == CS_LOCAL) ||
+ (state_ == ST_RECEIVEDPRANSWER_NO_CRYPTO && source == CS_REMOTE) ||
+ (state_ == ST_RECEIVEDPRANSWER && source == CS_REMOTE));
+}
+
+bool SrtpFilter::DoSetAnswer(const std::vector<CryptoParams>& answer_params,
+ ContentSource source,
+ bool final) {
+ if (!ExpectAnswer(source)) {
+ RTC_LOG(LS_ERROR) << "Invalid state for SRTP answer";
+ return false;
+ }
+
+ // If the answer doesn't requests crypto complete the negotiation of an
+ // unencrypted session.
+ // Otherwise, finalize the parameters and apply them.
+ if (answer_params.empty()) {
+ if (final) {
+ return ResetParams();
+ } else {
+ // Need to wait for the final answer to decide if
+ // we should go to Active state.
+ state_ = (source == CS_LOCAL) ? ST_SENTPRANSWER_NO_CRYPTO
+ : ST_RECEIVEDPRANSWER_NO_CRYPTO;
+ return true;
+ }
+ }
+ CryptoParams selected_params;
+ if (!NegotiateParams(answer_params, &selected_params))
+ return false;
+
+ const CryptoParams& new_send_params =
+ (source == CS_REMOTE) ? selected_params : answer_params[0];
+ const CryptoParams& new_recv_params =
+ (source == CS_REMOTE) ? answer_params[0] : selected_params;
+ if (!ApplySendParams(new_send_params) || !ApplyRecvParams(new_recv_params)) {
+ return false;
+ }
+ applied_send_params_ = new_send_params;
+ applied_recv_params_ = new_recv_params;
+
+ if (final) {
+ offer_params_.clear();
+ state_ = ST_ACTIVE;
+ } else {
+ state_ = (source == CS_LOCAL) ? ST_SENTPRANSWER : ST_RECEIVEDPRANSWER;
+ }
+ return true;
+}
+
+bool SrtpFilter::NegotiateParams(const std::vector<CryptoParams>& answer_params,
+ CryptoParams* selected_params) {
+ // We're processing an accept. We should have exactly one set of params,
+ // unless the offer didn't mention crypto, in which case we shouldn't be here.
+ bool ret = (answer_params.size() == 1U && !offer_params_.empty());
+ if (ret) {
+ // We should find a match between the answer params and the offered params.
+ std::vector<CryptoParams>::const_iterator it;
+ for (it = offer_params_.begin(); it != offer_params_.end(); ++it) {
+ if (answer_params[0].Matches(*it)) {
+ break;
+ }
+ }
+
+ if (it != offer_params_.end()) {
+ *selected_params = *it;
+ } else {
+ ret = false;
+ }
+ }
+
+ if (!ret) {
+ RTC_LOG(LS_WARNING) << "Invalid parameters in SRTP answer";
+ }
+ return ret;
+}
+
+bool SrtpFilter::ResetParams() {
+ offer_params_.clear();
+ applied_send_params_ = CryptoParams();
+ applied_recv_params_ = CryptoParams();
+ send_crypto_suite_ = absl::nullopt;
+ recv_crypto_suite_ = absl::nullopt;
+ send_key_.Clear();
+ recv_key_.Clear();
+ state_ = ST_INIT;
+ return true;
+}
+
+bool SrtpFilter::ApplySendParams(const CryptoParams& send_params) {
+ if (applied_send_params_.crypto_suite == send_params.crypto_suite &&
+ applied_send_params_.key_params == send_params.key_params) {
+ RTC_LOG(LS_INFO) << "Applying the same SRTP send parameters again. No-op.";
+
+ // We do not want to reset the ROC if the keys are the same. So just return.
+ return true;
+ }
+
+ send_crypto_suite_ = rtc::SrtpCryptoSuiteFromName(send_params.crypto_suite);
+ if (send_crypto_suite_ == rtc::kSrtpInvalidCryptoSuite) {
+ RTC_LOG(LS_WARNING) << "Unknown crypto suite(s) received:"
+ " send crypto_suite "
+ << send_params.crypto_suite;
+ return false;
+ }
+
+ int send_key_len, send_salt_len;
+ if (!rtc::GetSrtpKeyAndSaltLengths(*send_crypto_suite_, &send_key_len,
+ &send_salt_len)) {
+ RTC_LOG(LS_ERROR) << "Could not get lengths for crypto suite(s):"
+ " send crypto_suite "
+ << send_params.crypto_suite;
+ return false;
+ }
+
+ send_key_ = rtc::ZeroOnFreeBuffer<uint8_t>(send_key_len + send_salt_len);
+ return ParseKeyParams(send_params.key_params, send_key_.data(),
+ send_key_.size());
+}
+
+bool SrtpFilter::ApplyRecvParams(const CryptoParams& recv_params) {
+ if (applied_recv_params_.crypto_suite == recv_params.crypto_suite &&
+ applied_recv_params_.key_params == recv_params.key_params) {
+ RTC_LOG(LS_INFO) << "Applying the same SRTP recv parameters again. No-op.";
+
+ // We do not want to reset the ROC if the keys are the same. So just return.
+ return true;
+ }
+
+ recv_crypto_suite_ = rtc::SrtpCryptoSuiteFromName(recv_params.crypto_suite);
+ if (recv_crypto_suite_ == rtc::kSrtpInvalidCryptoSuite) {
+ RTC_LOG(LS_WARNING) << "Unknown crypto suite(s) received:"
+ " recv crypto_suite "
+ << recv_params.crypto_suite;
+ return false;
+ }
+
+ int recv_key_len, recv_salt_len;
+ if (!rtc::GetSrtpKeyAndSaltLengths(*recv_crypto_suite_, &recv_key_len,
+ &recv_salt_len)) {
+ RTC_LOG(LS_ERROR) << "Could not get lengths for crypto suite(s):"
+ " recv crypto_suite "
+ << recv_params.crypto_suite;
+ return false;
+ }
+
+ recv_key_ = rtc::ZeroOnFreeBuffer<uint8_t>(recv_key_len + recv_salt_len);
+ return ParseKeyParams(recv_params.key_params, recv_key_.data(),
+ recv_key_.size());
+}
+
+bool SrtpFilter::ParseKeyParams(const std::string& key_params,
+ uint8_t* key,
+ size_t len) {
+ // example key_params: "inline:YUJDZGVmZ2hpSktMbW9QUXJzVHVWd3l6MTIzNDU2"
+
+ // Fail if key-method is wrong.
+ if (!absl::StartsWith(key_params, "inline:")) {
+ return false;
+ }
+
+ // Fail if base64 decode fails, or the key is the wrong size.
+ std::string key_b64(key_params.substr(7)), key_str;
+ if (!rtc::Base64::Decode(key_b64, rtc::Base64::DO_STRICT, &key_str,
+ nullptr) ||
+ key_str.size() != len) {
+ return false;
+ }
+
+ memcpy(key, key_str.c_str(), len);
+ // TODO(bugs.webrtc.org/8905): Switch to ZeroOnFreeBuffer for storing
+ // sensitive data.
+ rtc::ExplicitZeroMemory(&key_str[0], key_str.size());
+ return true;
+}
+
+} // namespace cricket
diff --git a/third_party/libwebrtc/pc/srtp_filter.h b/third_party/libwebrtc/pc/srtp_filter.h
new file mode 100644
index 0000000000..59c43f624b
--- /dev/null
+++ b/third_party/libwebrtc/pc/srtp_filter.h
@@ -0,0 +1,147 @@
+/*
+ * Copyright 2009 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_SRTP_FILTER_H_
+#define PC_SRTP_FILTER_H_
+
+#include <stddef.h>
+#include <stdint.h>
+
+#include <list>
+#include <map>
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "api/array_view.h"
+#include "api/crypto_params.h"
+#include "api/jsep.h"
+#include "api/sequence_checker.h"
+#include "pc/session_description.h"
+#include "rtc_base/buffer.h"
+#include "rtc_base/ssl_stream_adapter.h"
+
+// Forward declaration to avoid pulling in libsrtp headers here
+struct srtp_event_data_t;
+struct srtp_ctx_t_;
+
+namespace cricket {
+
+// A helper class used to negotiate SDES crypto params.
+// TODO(zhihuang): Find a better name for this class, like "SdesNegotiator".
+class SrtpFilter {
+ public:
+ enum Mode { PROTECT, UNPROTECT };
+ enum Error {
+ ERROR_NONE,
+ ERROR_FAIL,
+ ERROR_AUTH,
+ ERROR_REPLAY,
+ };
+
+ SrtpFilter();
+ ~SrtpFilter();
+
+ // Whether the filter is active (i.e. crypto has been properly negotiated).
+ bool IsActive() const;
+
+ // Handle the offer/answer negotiation of the crypto parameters internally.
+ // TODO(zhihuang): Make SetOffer/ProvisionalAnswer/Answer private as helper
+ // methods once start using Process.
+ bool Process(const std::vector<CryptoParams>& cryptos,
+ webrtc::SdpType type,
+ ContentSource source);
+
+ // Indicates which crypto algorithms and keys were contained in the offer.
+ // offer_params should contain a list of available parameters to use, or none,
+ // if crypto is not desired. This must be called before SetAnswer.
+ bool SetOffer(const std::vector<CryptoParams>& offer_params,
+ ContentSource source);
+ // Same as SetAnwer. But multiple calls are allowed to SetProvisionalAnswer
+ // after a call to SetOffer.
+ bool SetProvisionalAnswer(const std::vector<CryptoParams>& answer_params,
+ ContentSource source);
+ // Indicates which crypto algorithms and keys were contained in the answer.
+ // answer_params should contain the negotiated parameters, which may be none,
+ // if crypto was not desired or could not be negotiated (and not required).
+ // This must be called after SetOffer. If crypto negotiation completes
+ // successfully, this will advance the filter to the active state.
+ bool SetAnswer(const std::vector<CryptoParams>& answer_params,
+ ContentSource source);
+
+ bool ResetParams();
+
+ static bool ParseKeyParams(const std::string& params,
+ uint8_t* key,
+ size_t len);
+
+ absl::optional<int> send_crypto_suite() { return send_crypto_suite_; }
+ absl::optional<int> recv_crypto_suite() { return recv_crypto_suite_; }
+
+ rtc::ArrayView<const uint8_t> send_key() { return send_key_; }
+ rtc::ArrayView<const uint8_t> recv_key() { return recv_key_; }
+
+ protected:
+ bool ExpectOffer(ContentSource source);
+
+ bool StoreParams(const std::vector<CryptoParams>& params,
+ ContentSource source);
+
+ bool ExpectAnswer(ContentSource source);
+
+ bool DoSetAnswer(const std::vector<CryptoParams>& answer_params,
+ ContentSource source,
+ bool final);
+
+ bool NegotiateParams(const std::vector<CryptoParams>& answer_params,
+ CryptoParams* selected_params);
+
+ private:
+ bool ApplySendParams(const CryptoParams& send_params);
+
+ bool ApplyRecvParams(const CryptoParams& recv_params);
+
+ enum State {
+ ST_INIT, // SRTP filter unused.
+ ST_SENTOFFER, // Offer with SRTP parameters sent.
+ ST_RECEIVEDOFFER, // Offer with SRTP parameters received.
+ ST_SENTPRANSWER_NO_CRYPTO, // Sent provisional answer without crypto.
+ // Received provisional answer without crypto.
+ ST_RECEIVEDPRANSWER_NO_CRYPTO,
+ ST_ACTIVE, // Offer and answer set.
+ // SRTP filter is active but new parameters are offered.
+ // When the answer is set, the state transitions to ST_ACTIVE or ST_INIT.
+ ST_SENTUPDATEDOFFER,
+ // SRTP filter is active but new parameters are received.
+ // When the answer is set, the state transitions back to ST_ACTIVE.
+ ST_RECEIVEDUPDATEDOFFER,
+ // SRTP filter is active but the sent answer is only provisional.
+ // When the final answer is set, the state transitions to ST_ACTIVE or
+ // ST_INIT.
+ ST_SENTPRANSWER,
+ // SRTP filter is active but the received answer is only provisional.
+ // When the final answer is set, the state transitions to ST_ACTIVE or
+ // ST_INIT.
+ ST_RECEIVEDPRANSWER
+ };
+ State state_ = ST_INIT;
+ std::vector<CryptoParams> offer_params_;
+ CryptoParams applied_send_params_;
+ CryptoParams applied_recv_params_;
+ absl::optional<int> send_crypto_suite_;
+ absl::optional<int> recv_crypto_suite_;
+ rtc::ZeroOnFreeBuffer<uint8_t> send_key_;
+ rtc::ZeroOnFreeBuffer<uint8_t> recv_key_;
+};
+
+} // namespace cricket
+
+#endif // PC_SRTP_FILTER_H_
diff --git a/third_party/libwebrtc/pc/srtp_filter_unittest.cc b/third_party/libwebrtc/pc/srtp_filter_unittest.cc
new file mode 100644
index 0000000000..fed023199f
--- /dev/null
+++ b/third_party/libwebrtc/pc/srtp_filter_unittest.cc
@@ -0,0 +1,472 @@
+/*
+ * Copyright 2004 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/srtp_filter.h"
+
+#include <string.h>
+
+#include "api/crypto_params.h"
+#include "rtc_base/ssl_stream_adapter.h"
+#include "test/gtest.h"
+
+using cricket::CryptoParams;
+using cricket::CS_LOCAL;
+using cricket::CS_REMOTE;
+
+namespace rtc {
+
+static const char kTestKeyParams1[] =
+ "inline:WVNfX19zZW1jdGwgKCkgewkyMjA7fQp9CnVubGVz";
+static const char kTestKeyParams2[] =
+ "inline:PS1uQCVeeCFCanVmcjkpPywjNWhcYD0mXXtxaVBR";
+static const char kTestKeyParams3[] =
+ "inline:1234X19zZW1jdGwgKCkgewkyMjA7fQp9CnVubGVz";
+static const char kTestKeyParams4[] =
+ "inline:4567QCVeeCFCanVmcjkpPywjNWhcYD0mXXtxaVBR";
+static const char kTestKeyParamsGcm1[] =
+ "inline:e166KFlKzJsGW0d5apX+rrI05vxbrvMJEzFI14aTDCa63IRTlLK4iH66uOI=";
+static const char kTestKeyParamsGcm2[] =
+ "inline:6X0oCd55zfz4VgtOwsuqcFq61275PDYN5uwuu3p7ZUHbfUY2FMpdP4m2PEo=";
+static const char kTestKeyParamsGcm3[] =
+ "inline:YKlABGZWMgX32xuMotrG0v0T7G83veegaVzubQ==";
+static const char kTestKeyParamsGcm4[] =
+ "inline:gJ6tWoUym2v+/F6xjr7xaxiS3QbJJozl3ZD/0A==";
+static const cricket::CryptoParams kTestCryptoParams1(1,
+ "AES_CM_128_HMAC_SHA1_80",
+ kTestKeyParams1,
+ "");
+static const cricket::CryptoParams kTestCryptoParams2(1,
+ "AES_CM_128_HMAC_SHA1_80",
+ kTestKeyParams2,
+ "");
+static const cricket::CryptoParams kTestCryptoParamsGcm1(1,
+ "AEAD_AES_256_GCM",
+ kTestKeyParamsGcm1,
+ "");
+static const cricket::CryptoParams kTestCryptoParamsGcm2(1,
+ "AEAD_AES_256_GCM",
+ kTestKeyParamsGcm2,
+ "");
+static const cricket::CryptoParams kTestCryptoParamsGcm3(1,
+ "AEAD_AES_128_GCM",
+ kTestKeyParamsGcm3,
+ "");
+static const cricket::CryptoParams kTestCryptoParamsGcm4(1,
+ "AEAD_AES_128_GCM",
+ kTestKeyParamsGcm4,
+ "");
+
+class SrtpFilterTest : public ::testing::Test {
+ protected:
+ SrtpFilterTest() {}
+ static std::vector<CryptoParams> MakeVector(const CryptoParams& params) {
+ std::vector<CryptoParams> vec;
+ vec.push_back(params);
+ return vec;
+ }
+
+ void TestSetParams(const std::vector<CryptoParams>& params1,
+ const std::vector<CryptoParams>& params2) {
+ EXPECT_TRUE(f1_.SetOffer(params1, CS_LOCAL));
+ EXPECT_TRUE(f2_.SetOffer(params1, CS_REMOTE));
+ EXPECT_FALSE(f1_.IsActive());
+ EXPECT_FALSE(f2_.IsActive());
+ EXPECT_TRUE(f2_.SetAnswer(params2, CS_LOCAL));
+ EXPECT_TRUE(f1_.SetAnswer(params2, CS_REMOTE));
+ EXPECT_TRUE(f1_.IsActive());
+ EXPECT_TRUE(f2_.IsActive());
+ }
+
+ void VerifyKeysAreEqual(ArrayView<const uint8_t> key1,
+ ArrayView<const uint8_t> key2) {
+ EXPECT_EQ(key1.size(), key2.size());
+ EXPECT_EQ(0, memcmp(key1.data(), key2.data(), key1.size()));
+ }
+
+ void VerifyCryptoParamsMatch(const std::string& cs1, const std::string& cs2) {
+ EXPECT_EQ(rtc::SrtpCryptoSuiteFromName(cs1), f1_.send_crypto_suite());
+ EXPECT_EQ(rtc::SrtpCryptoSuiteFromName(cs2), f2_.send_crypto_suite());
+ VerifyKeysAreEqual(f1_.send_key(), f2_.recv_key());
+ VerifyKeysAreEqual(f2_.send_key(), f1_.recv_key());
+ }
+
+ cricket::SrtpFilter f1_;
+ cricket::SrtpFilter f2_;
+};
+
+// Test that we can set up the session and keys properly.
+TEST_F(SrtpFilterTest, TestGoodSetupOneCryptoSuite) {
+ EXPECT_TRUE(f1_.SetOffer(MakeVector(kTestCryptoParams1), CS_LOCAL));
+ EXPECT_FALSE(f1_.IsActive());
+ EXPECT_TRUE(f1_.SetAnswer(MakeVector(kTestCryptoParams2), CS_REMOTE));
+ EXPECT_TRUE(f1_.IsActive());
+}
+
+TEST_F(SrtpFilterTest, TestGoodSetupOneCryptoSuiteGcm) {
+ EXPECT_TRUE(f1_.SetOffer(MakeVector(kTestCryptoParamsGcm1), CS_LOCAL));
+ EXPECT_FALSE(f1_.IsActive());
+ EXPECT_TRUE(f1_.SetAnswer(MakeVector(kTestCryptoParamsGcm2), CS_REMOTE));
+ EXPECT_TRUE(f1_.IsActive());
+}
+
+// Test that we can set up things with multiple params.
+TEST_F(SrtpFilterTest, TestGoodSetupMultipleCryptoSuites) {
+ std::vector<CryptoParams> offer(MakeVector(kTestCryptoParams1));
+ std::vector<CryptoParams> answer(MakeVector(kTestCryptoParams2));
+ offer.push_back(kTestCryptoParams1);
+ offer[1].tag = 2;
+ offer[1].crypto_suite = kCsAesCm128HmacSha1_32;
+ answer[0].tag = 2;
+ answer[0].crypto_suite = kCsAesCm128HmacSha1_32;
+ EXPECT_TRUE(f1_.SetOffer(offer, CS_LOCAL));
+ EXPECT_FALSE(f1_.IsActive());
+ EXPECT_TRUE(f1_.SetAnswer(answer, CS_REMOTE));
+ EXPECT_TRUE(f1_.IsActive());
+}
+
+TEST_F(SrtpFilterTest, TestGoodSetupMultipleCryptoSuitesGcm) {
+ std::vector<CryptoParams> offer(MakeVector(kTestCryptoParamsGcm1));
+ std::vector<CryptoParams> answer(MakeVector(kTestCryptoParamsGcm3));
+ offer.push_back(kTestCryptoParamsGcm4);
+ offer[1].tag = 2;
+ answer[0].tag = 2;
+ EXPECT_TRUE(f1_.SetOffer(offer, CS_LOCAL));
+ EXPECT_FALSE(f1_.IsActive());
+ EXPECT_TRUE(f1_.SetAnswer(answer, CS_REMOTE));
+ EXPECT_TRUE(f1_.IsActive());
+}
+
+// Test that we handle the cases where crypto is not desired.
+TEST_F(SrtpFilterTest, TestGoodSetupNoCryptoSuites) {
+ std::vector<CryptoParams> offer, answer;
+ EXPECT_TRUE(f1_.SetOffer(offer, CS_LOCAL));
+ EXPECT_TRUE(f1_.SetAnswer(answer, CS_REMOTE));
+ EXPECT_FALSE(f1_.IsActive());
+}
+
+// Test that we handle the cases where crypto is not desired by the remote side.
+TEST_F(SrtpFilterTest, TestGoodSetupNoAnswerCryptoSuites) {
+ std::vector<CryptoParams> answer;
+ EXPECT_TRUE(f1_.SetOffer(MakeVector(kTestCryptoParams1), CS_LOCAL));
+ EXPECT_TRUE(f1_.SetAnswer(answer, CS_REMOTE));
+ EXPECT_FALSE(f1_.IsActive());
+}
+
+// Test that we fail if we call the functions the wrong way.
+TEST_F(SrtpFilterTest, TestBadSetup) {
+ std::vector<CryptoParams> offer(MakeVector(kTestCryptoParams1));
+ std::vector<CryptoParams> answer(MakeVector(kTestCryptoParams2));
+ EXPECT_FALSE(f1_.SetAnswer(answer, CS_LOCAL));
+ EXPECT_FALSE(f1_.SetAnswer(answer, CS_REMOTE));
+ EXPECT_TRUE(f1_.SetOffer(offer, CS_LOCAL));
+ EXPECT_FALSE(f1_.SetAnswer(answer, CS_LOCAL));
+ EXPECT_FALSE(f1_.IsActive());
+}
+
+// Test that we can set offer multiple times from the same source.
+TEST_F(SrtpFilterTest, TestGoodSetupMultipleOffers) {
+ EXPECT_TRUE(f1_.SetOffer(MakeVector(kTestCryptoParams1), CS_LOCAL));
+ EXPECT_TRUE(f1_.SetOffer(MakeVector(kTestCryptoParams2), CS_LOCAL));
+ EXPECT_FALSE(f1_.IsActive());
+ EXPECT_TRUE(f1_.SetAnswer(MakeVector(kTestCryptoParams2), CS_REMOTE));
+ EXPECT_TRUE(f1_.IsActive());
+ EXPECT_TRUE(f1_.SetOffer(MakeVector(kTestCryptoParams1), CS_LOCAL));
+ EXPECT_TRUE(f1_.SetOffer(MakeVector(kTestCryptoParams2), CS_LOCAL));
+ EXPECT_TRUE(f1_.SetAnswer(MakeVector(kTestCryptoParams2), CS_REMOTE));
+
+ EXPECT_TRUE(f2_.SetOffer(MakeVector(kTestCryptoParams1), CS_REMOTE));
+ EXPECT_TRUE(f2_.SetOffer(MakeVector(kTestCryptoParams2), CS_REMOTE));
+ EXPECT_FALSE(f2_.IsActive());
+ EXPECT_TRUE(f2_.SetAnswer(MakeVector(kTestCryptoParams2), CS_LOCAL));
+ EXPECT_TRUE(f2_.IsActive());
+ EXPECT_TRUE(f2_.SetOffer(MakeVector(kTestCryptoParams1), CS_REMOTE));
+ EXPECT_TRUE(f2_.SetOffer(MakeVector(kTestCryptoParams2), CS_REMOTE));
+ EXPECT_TRUE(f2_.SetAnswer(MakeVector(kTestCryptoParams2), CS_LOCAL));
+}
+// Test that we can't set offer multiple times from different sources.
+TEST_F(SrtpFilterTest, TestBadSetupMultipleOffers) {
+ EXPECT_TRUE(f1_.SetOffer(MakeVector(kTestCryptoParams1), CS_LOCAL));
+ EXPECT_FALSE(f1_.SetOffer(MakeVector(kTestCryptoParams2), CS_REMOTE));
+ EXPECT_FALSE(f1_.IsActive());
+ EXPECT_TRUE(f1_.SetAnswer(MakeVector(kTestCryptoParams1), CS_REMOTE));
+ EXPECT_TRUE(f1_.IsActive());
+ EXPECT_TRUE(f1_.SetOffer(MakeVector(kTestCryptoParams2), CS_LOCAL));
+ EXPECT_FALSE(f1_.SetOffer(MakeVector(kTestCryptoParams1), CS_REMOTE));
+ EXPECT_TRUE(f1_.SetAnswer(MakeVector(kTestCryptoParams2), CS_REMOTE));
+
+ EXPECT_TRUE(f2_.SetOffer(MakeVector(kTestCryptoParams2), CS_REMOTE));
+ EXPECT_FALSE(f2_.SetOffer(MakeVector(kTestCryptoParams1), CS_LOCAL));
+ EXPECT_FALSE(f2_.IsActive());
+ EXPECT_TRUE(f2_.SetAnswer(MakeVector(kTestCryptoParams2), CS_LOCAL));
+ EXPECT_TRUE(f2_.IsActive());
+ EXPECT_TRUE(f2_.SetOffer(MakeVector(kTestCryptoParams2), CS_REMOTE));
+ EXPECT_FALSE(f2_.SetOffer(MakeVector(kTestCryptoParams1), CS_LOCAL));
+ EXPECT_TRUE(f2_.SetAnswer(MakeVector(kTestCryptoParams2), CS_LOCAL));
+}
+
+// Test that we fail if we have params in the answer when none were offered.
+TEST_F(SrtpFilterTest, TestNoAnswerCryptoSuites) {
+ std::vector<CryptoParams> offer;
+ EXPECT_TRUE(f1_.SetOffer(offer, CS_LOCAL));
+ EXPECT_FALSE(f1_.SetAnswer(MakeVector(kTestCryptoParams2), CS_REMOTE));
+ EXPECT_FALSE(f1_.IsActive());
+}
+
+// Test that we fail if we have too many params in our answer.
+TEST_F(SrtpFilterTest, TestMultipleAnswerCryptoSuites) {
+ std::vector<CryptoParams> answer(MakeVector(kTestCryptoParams2));
+ answer.push_back(kTestCryptoParams2);
+ answer[1].tag = 2;
+ answer[1].crypto_suite = kCsAesCm128HmacSha1_32;
+ EXPECT_TRUE(f1_.SetOffer(MakeVector(kTestCryptoParams1), CS_LOCAL));
+ EXPECT_FALSE(f1_.SetAnswer(answer, CS_REMOTE));
+ EXPECT_FALSE(f1_.IsActive());
+}
+
+// Test that we fail if we don't support the crypto suite.
+TEST_F(SrtpFilterTest, TestInvalidCryptoSuite) {
+ std::vector<CryptoParams> offer(MakeVector(kTestCryptoParams1));
+ std::vector<CryptoParams> answer(MakeVector(kTestCryptoParams2));
+ offer[0].crypto_suite = answer[0].crypto_suite = "FOO";
+ EXPECT_TRUE(f1_.SetOffer(offer, CS_LOCAL));
+ EXPECT_FALSE(f1_.SetAnswer(answer, CS_REMOTE));
+ EXPECT_FALSE(f1_.IsActive());
+}
+
+// Test that we fail if we can't agree on a tag.
+TEST_F(SrtpFilterTest, TestNoMatchingTag) {
+ std::vector<CryptoParams> offer(MakeVector(kTestCryptoParams1));
+ std::vector<CryptoParams> answer(MakeVector(kTestCryptoParams2));
+ answer[0].tag = 99;
+ EXPECT_TRUE(f1_.SetOffer(offer, CS_LOCAL));
+ EXPECT_FALSE(f1_.SetAnswer(answer, CS_REMOTE));
+ EXPECT_FALSE(f1_.IsActive());
+}
+
+// Test that we fail if we can't agree on a crypto suite.
+TEST_F(SrtpFilterTest, TestNoMatchingCryptoSuite) {
+ std::vector<CryptoParams> offer(MakeVector(kTestCryptoParams1));
+ std::vector<CryptoParams> answer(MakeVector(kTestCryptoParams2));
+ answer[0].tag = 2;
+ answer[0].crypto_suite = "FOO";
+ EXPECT_TRUE(f1_.SetOffer(offer, CS_LOCAL));
+ EXPECT_FALSE(f1_.SetAnswer(answer, CS_REMOTE));
+ EXPECT_FALSE(f1_.IsActive());
+}
+
+// Test that we fail keys with bad base64 content.
+TEST_F(SrtpFilterTest, TestInvalidKeyData) {
+ std::vector<CryptoParams> offer(MakeVector(kTestCryptoParams1));
+ std::vector<CryptoParams> answer(MakeVector(kTestCryptoParams2));
+ answer[0].key_params = "inline:!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!";
+ EXPECT_TRUE(f1_.SetOffer(offer, CS_LOCAL));
+ EXPECT_FALSE(f1_.SetAnswer(answer, CS_REMOTE));
+ EXPECT_FALSE(f1_.IsActive());
+}
+
+// Test that we fail keys with the wrong key-method.
+TEST_F(SrtpFilterTest, TestWrongKeyMethod) {
+ std::vector<CryptoParams> offer(MakeVector(kTestCryptoParams1));
+ std::vector<CryptoParams> answer(MakeVector(kTestCryptoParams2));
+ answer[0].key_params = "outline:PS1uQCVeeCFCanVmcjkpPywjNWhcYD0mXXtxaVBR";
+ EXPECT_TRUE(f1_.SetOffer(offer, CS_LOCAL));
+ EXPECT_FALSE(f1_.SetAnswer(answer, CS_REMOTE));
+ EXPECT_FALSE(f1_.IsActive());
+}
+
+// Test that we fail keys of the wrong length.
+TEST_F(SrtpFilterTest, TestKeyTooShort) {
+ std::vector<CryptoParams> offer(MakeVector(kTestCryptoParams1));
+ std::vector<CryptoParams> answer(MakeVector(kTestCryptoParams2));
+ answer[0].key_params = "inline:PS1uQCVeeCFCanVmcjkpPywjNWhcYD0mXXtx";
+ EXPECT_TRUE(f1_.SetOffer(offer, CS_LOCAL));
+ EXPECT_FALSE(f1_.SetAnswer(answer, CS_REMOTE));
+ EXPECT_FALSE(f1_.IsActive());
+}
+
+// Test that we fail keys of the wrong length.
+TEST_F(SrtpFilterTest, TestKeyTooLong) {
+ std::vector<CryptoParams> offer(MakeVector(kTestCryptoParams1));
+ std::vector<CryptoParams> answer(MakeVector(kTestCryptoParams2));
+ answer[0].key_params = "inline:PS1uQCVeeCFCanVmcjkpPywjNWhcYD0mXXtxaVBRABCD";
+ EXPECT_TRUE(f1_.SetOffer(offer, CS_LOCAL));
+ EXPECT_FALSE(f1_.SetAnswer(answer, CS_REMOTE));
+ EXPECT_FALSE(f1_.IsActive());
+}
+
+// Test that we fail keys with lifetime or MKI set (since we don't support)
+TEST_F(SrtpFilterTest, TestUnsupportedOptions) {
+ std::vector<CryptoParams> offer(MakeVector(kTestCryptoParams1));
+ std::vector<CryptoParams> answer(MakeVector(kTestCryptoParams2));
+ answer[0].key_params =
+ "inline:PS1uQCVeeCFCanVmcjkpPywjNWhcYD0mXXtxaVBR|2^20|1:4";
+ EXPECT_TRUE(f1_.SetOffer(offer, CS_LOCAL));
+ EXPECT_FALSE(f1_.SetAnswer(answer, CS_REMOTE));
+ EXPECT_FALSE(f1_.IsActive());
+}
+
+// Test that we can encrypt/decrypt after negotiating AES_CM_128_HMAC_SHA1_80.
+TEST_F(SrtpFilterTest, TestProtect_AES_CM_128_HMAC_SHA1_80) {
+ std::vector<CryptoParams> offer(MakeVector(kTestCryptoParams1));
+ std::vector<CryptoParams> answer(MakeVector(kTestCryptoParams2));
+ offer.push_back(kTestCryptoParams1);
+ offer[1].tag = 2;
+ offer[1].crypto_suite = kCsAesCm128HmacSha1_32;
+ TestSetParams(offer, answer);
+ VerifyCryptoParamsMatch(kCsAesCm128HmacSha1_80, kCsAesCm128HmacSha1_80);
+}
+
+// Test that we can encrypt/decrypt after negotiating AES_CM_128_HMAC_SHA1_32.
+TEST_F(SrtpFilterTest, TestProtect_AES_CM_128_HMAC_SHA1_32) {
+ std::vector<CryptoParams> offer(MakeVector(kTestCryptoParams1));
+ std::vector<CryptoParams> answer(MakeVector(kTestCryptoParams2));
+ offer.push_back(kTestCryptoParams1);
+ offer[1].tag = 2;
+ offer[1].crypto_suite = kCsAesCm128HmacSha1_32;
+ answer[0].tag = 2;
+ answer[0].crypto_suite = kCsAesCm128HmacSha1_32;
+ TestSetParams(offer, answer);
+ VerifyCryptoParamsMatch(kCsAesCm128HmacSha1_32, kCsAesCm128HmacSha1_32);
+}
+
+// Test that we can change encryption parameters.
+TEST_F(SrtpFilterTest, TestChangeParameters) {
+ std::vector<CryptoParams> offer(MakeVector(kTestCryptoParams1));
+ std::vector<CryptoParams> answer(MakeVector(kTestCryptoParams2));
+
+ TestSetParams(offer, answer);
+ VerifyCryptoParamsMatch(kCsAesCm128HmacSha1_80, kCsAesCm128HmacSha1_80);
+
+ // Change the key parameters and crypto_suite.
+ offer[0].key_params = kTestKeyParams3;
+ offer[0].crypto_suite = kCsAesCm128HmacSha1_32;
+ answer[0].key_params = kTestKeyParams4;
+ answer[0].crypto_suite = kCsAesCm128HmacSha1_32;
+
+ EXPECT_TRUE(f1_.SetOffer(offer, CS_LOCAL));
+ EXPECT_TRUE(f2_.SetOffer(offer, CS_REMOTE));
+ EXPECT_TRUE(f1_.IsActive());
+ EXPECT_TRUE(f1_.IsActive());
+
+ // Test that the old keys are valid until the negotiation is complete.
+ VerifyCryptoParamsMatch(kCsAesCm128HmacSha1_80, kCsAesCm128HmacSha1_80);
+
+ // Complete the negotiation and test that we can still understand each other.
+ EXPECT_TRUE(f2_.SetAnswer(answer, CS_LOCAL));
+ EXPECT_TRUE(f1_.SetAnswer(answer, CS_REMOTE));
+
+ VerifyCryptoParamsMatch(kCsAesCm128HmacSha1_32, kCsAesCm128HmacSha1_32);
+}
+
+// Test that we can send and receive provisional answers with crypto enabled.
+// Also test that we can change the crypto.
+TEST_F(SrtpFilterTest, TestProvisionalAnswer) {
+ std::vector<CryptoParams> offer(MakeVector(kTestCryptoParams1));
+ offer.push_back(kTestCryptoParams1);
+ offer[1].tag = 2;
+ offer[1].crypto_suite = kCsAesCm128HmacSha1_32;
+ std::vector<CryptoParams> answer(MakeVector(kTestCryptoParams2));
+
+ EXPECT_TRUE(f1_.SetOffer(offer, CS_LOCAL));
+ EXPECT_TRUE(f2_.SetOffer(offer, CS_REMOTE));
+ EXPECT_FALSE(f1_.IsActive());
+ EXPECT_FALSE(f2_.IsActive());
+ EXPECT_TRUE(f2_.SetProvisionalAnswer(answer, CS_LOCAL));
+ EXPECT_TRUE(f1_.SetProvisionalAnswer(answer, CS_REMOTE));
+ EXPECT_TRUE(f1_.IsActive());
+ EXPECT_TRUE(f2_.IsActive());
+ VerifyCryptoParamsMatch(kCsAesCm128HmacSha1_80, kCsAesCm128HmacSha1_80);
+
+ answer[0].key_params = kTestKeyParams4;
+ answer[0].tag = 2;
+ answer[0].crypto_suite = kCsAesCm128HmacSha1_32;
+ EXPECT_TRUE(f2_.SetAnswer(answer, CS_LOCAL));
+ EXPECT_TRUE(f1_.SetAnswer(answer, CS_REMOTE));
+ EXPECT_TRUE(f1_.IsActive());
+ EXPECT_TRUE(f2_.IsActive());
+ VerifyCryptoParamsMatch(kCsAesCm128HmacSha1_32, kCsAesCm128HmacSha1_32);
+}
+
+// Test that a provisional answer doesn't need to contain a crypto.
+TEST_F(SrtpFilterTest, TestProvisionalAnswerWithoutCrypto) {
+ std::vector<CryptoParams> offer(MakeVector(kTestCryptoParams1));
+ std::vector<CryptoParams> answer;
+
+ EXPECT_TRUE(f1_.SetOffer(offer, CS_LOCAL));
+ EXPECT_TRUE(f2_.SetOffer(offer, CS_REMOTE));
+ EXPECT_FALSE(f1_.IsActive());
+ EXPECT_FALSE(f2_.IsActive());
+ EXPECT_TRUE(f2_.SetProvisionalAnswer(answer, CS_LOCAL));
+ EXPECT_TRUE(f1_.SetProvisionalAnswer(answer, CS_REMOTE));
+ EXPECT_FALSE(f1_.IsActive());
+ EXPECT_FALSE(f2_.IsActive());
+
+ answer.push_back(kTestCryptoParams2);
+ EXPECT_TRUE(f2_.SetAnswer(answer, CS_LOCAL));
+ EXPECT_TRUE(f1_.SetAnswer(answer, CS_REMOTE));
+ EXPECT_TRUE(f1_.IsActive());
+ EXPECT_TRUE(f2_.IsActive());
+ VerifyCryptoParamsMatch(kCsAesCm128HmacSha1_80, kCsAesCm128HmacSha1_80);
+}
+
+// Test that if we get a new local offer after a provisional answer
+// with no crypto, that we are in an inactive state.
+TEST_F(SrtpFilterTest, TestLocalOfferAfterProvisionalAnswerWithoutCrypto) {
+ std::vector<CryptoParams> offer(MakeVector(kTestCryptoParams1));
+ std::vector<CryptoParams> answer;
+
+ EXPECT_TRUE(f1_.SetOffer(offer, CS_LOCAL));
+ EXPECT_TRUE(f2_.SetOffer(offer, CS_REMOTE));
+ EXPECT_TRUE(f1_.SetProvisionalAnswer(answer, CS_REMOTE));
+ EXPECT_TRUE(f2_.SetProvisionalAnswer(answer, CS_LOCAL));
+ EXPECT_FALSE(f1_.IsActive());
+ EXPECT_FALSE(f2_.IsActive());
+ // The calls to set an offer after a provisional answer fail, so the
+ // state doesn't change.
+ EXPECT_FALSE(f1_.SetOffer(offer, CS_LOCAL));
+ EXPECT_FALSE(f2_.SetOffer(offer, CS_REMOTE));
+ EXPECT_FALSE(f1_.IsActive());
+ EXPECT_FALSE(f2_.IsActive());
+
+ answer.push_back(kTestCryptoParams2);
+ EXPECT_TRUE(f2_.SetAnswer(answer, CS_LOCAL));
+ EXPECT_TRUE(f1_.SetAnswer(answer, CS_REMOTE));
+ EXPECT_TRUE(f1_.IsActive());
+ EXPECT_TRUE(f2_.IsActive());
+ VerifyCryptoParamsMatch(kCsAesCm128HmacSha1_80, kCsAesCm128HmacSha1_80);
+}
+
+// Test that we can disable encryption.
+TEST_F(SrtpFilterTest, TestDisableEncryption) {
+ std::vector<CryptoParams> offer(MakeVector(kTestCryptoParams1));
+ std::vector<CryptoParams> answer(MakeVector(kTestCryptoParams2));
+
+ TestSetParams(offer, answer);
+ VerifyCryptoParamsMatch(kCsAesCm128HmacSha1_80, kCsAesCm128HmacSha1_80);
+
+ offer.clear();
+ answer.clear();
+ EXPECT_TRUE(f1_.SetOffer(offer, CS_LOCAL));
+ EXPECT_TRUE(f2_.SetOffer(offer, CS_REMOTE));
+ EXPECT_TRUE(f1_.IsActive());
+ EXPECT_TRUE(f2_.IsActive());
+
+ // Test that the old keys are valid until the negotiation is complete.
+ VerifyCryptoParamsMatch(kCsAesCm128HmacSha1_80, kCsAesCm128HmacSha1_80);
+
+ // Complete the negotiation.
+ EXPECT_TRUE(f2_.SetAnswer(answer, CS_LOCAL));
+ EXPECT_TRUE(f1_.SetAnswer(answer, CS_REMOTE));
+
+ EXPECT_FALSE(f1_.IsActive());
+ EXPECT_FALSE(f2_.IsActive());
+}
+
+} // namespace rtc
diff --git a/third_party/libwebrtc/pc/srtp_session.cc b/third_party/libwebrtc/pc/srtp_session.cc
new file mode 100644
index 0000000000..5408d3e0da
--- /dev/null
+++ b/third_party/libwebrtc/pc/srtp_session.cc
@@ -0,0 +1,520 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/srtp_session.h"
+
+#include <string.h>
+
+#include <iomanip>
+#include <string>
+
+#include "absl/base/attributes.h"
+#include "absl/base/const_init.h"
+#include "absl/strings/string_view.h"
+#include "api/array_view.h"
+#include "api/field_trials_view.h"
+#include "modules/rtp_rtcp/source/rtp_util.h"
+#include "pc/external_hmac.h"
+#include "rtc_base/byte_order.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/ssl_stream_adapter.h"
+#include "rtc_base/string_encode.h"
+#include "rtc_base/thread_annotations.h"
+#include "rtc_base/time_utils.h"
+#include "system_wrappers/include/metrics.h"
+#include "third_party/libsrtp/include/srtp.h"
+#include "third_party/libsrtp/include/srtp_priv.h"
+
+namespace cricket {
+
+namespace {
+class LibSrtpInitializer {
+ public:
+ // Returns singleton instance of this class. Instance created on first use,
+ // and never destroyed.
+ static LibSrtpInitializer& Get() {
+ static LibSrtpInitializer* const instance = new LibSrtpInitializer();
+ return *instance;
+ }
+ void ProhibitLibsrtpInitialization();
+
+ // These methods are responsible for initializing libsrtp (if the usage count
+ // is incremented from 0 to 1) or deinitializing it (when decremented from 1
+ // to 0).
+ //
+ // Returns true if successful (will always be successful if already inited).
+ bool IncrementLibsrtpUsageCountAndMaybeInit(
+ srtp_event_handler_func_t* handler);
+ void DecrementLibsrtpUsageCountAndMaybeDeinit();
+
+ private:
+ LibSrtpInitializer() = default;
+
+ webrtc::Mutex mutex_;
+ int usage_count_ RTC_GUARDED_BY(mutex_) = 0;
+};
+
+void LibSrtpInitializer::ProhibitLibsrtpInitialization() {
+ webrtc::MutexLock lock(&mutex_);
+ ++usage_count_;
+}
+
+bool LibSrtpInitializer::IncrementLibsrtpUsageCountAndMaybeInit(
+ srtp_event_handler_func_t* handler) {
+ webrtc::MutexLock lock(&mutex_);
+
+ RTC_DCHECK_GE(usage_count_, 0);
+ if (usage_count_ == 0) {
+ int err;
+ err = srtp_init();
+ if (err != srtp_err_status_ok) {
+ RTC_LOG(LS_ERROR) << "Failed to init SRTP, err=" << err;
+ return false;
+ }
+
+ err = srtp_install_event_handler(handler);
+ if (err != srtp_err_status_ok) {
+ RTC_LOG(LS_ERROR) << "Failed to install SRTP event handler, err=" << err;
+ return false;
+ }
+
+ err = external_crypto_init();
+ if (err != srtp_err_status_ok) {
+ RTC_LOG(LS_ERROR) << "Failed to initialize fake auth, err=" << err;
+ return false;
+ }
+ }
+ ++usage_count_;
+ return true;
+}
+
+void LibSrtpInitializer::DecrementLibsrtpUsageCountAndMaybeDeinit() {
+ webrtc::MutexLock lock(&mutex_);
+
+ RTC_DCHECK_GE(usage_count_, 1);
+ if (--usage_count_ == 0) {
+ int err = srtp_shutdown();
+ if (err) {
+ RTC_LOG(LS_ERROR) << "srtp_shutdown failed. err=" << err;
+ }
+ }
+}
+
+} // namespace
+
+using ::webrtc::ParseRtpSequenceNumber;
+
+// One more than the maximum libsrtp error code. Required by
+// RTC_HISTOGRAM_ENUMERATION. Keep this in sync with srtp_error_status_t defined
+// in srtp.h.
+constexpr int kSrtpErrorCodeBoundary = 28;
+
+SrtpSession::SrtpSession() {}
+
+SrtpSession::SrtpSession(const webrtc::FieldTrialsView& field_trials) {
+ dump_plain_rtp_ = field_trials.IsEnabled("WebRTC-Debugging-RtpDump");
+}
+
+SrtpSession::~SrtpSession() {
+ if (session_) {
+ srtp_set_user_data(session_, nullptr);
+ srtp_dealloc(session_);
+ }
+ if (inited_) {
+ LibSrtpInitializer::Get().DecrementLibsrtpUsageCountAndMaybeDeinit();
+ }
+}
+
+bool SrtpSession::SetSend(int crypto_suite,
+ const uint8_t* key,
+ size_t len,
+ const std::vector<int>& extension_ids) {
+ return SetKey(ssrc_any_outbound, crypto_suite, key, len, extension_ids);
+}
+
+bool SrtpSession::UpdateSend(int crypto_suite,
+ const uint8_t* key,
+ size_t len,
+ const std::vector<int>& extension_ids) {
+ return UpdateKey(ssrc_any_outbound, crypto_suite, key, len, extension_ids);
+}
+
+bool SrtpSession::SetRecv(int crypto_suite,
+ const uint8_t* key,
+ size_t len,
+ const std::vector<int>& extension_ids) {
+ return SetKey(ssrc_any_inbound, crypto_suite, key, len, extension_ids);
+}
+
+bool SrtpSession::UpdateRecv(int crypto_suite,
+ const uint8_t* key,
+ size_t len,
+ const std::vector<int>& extension_ids) {
+ return UpdateKey(ssrc_any_inbound, crypto_suite, key, len, extension_ids);
+}
+
+bool SrtpSession::ProtectRtp(void* p, int in_len, int max_len, int* out_len) {
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ if (!session_) {
+ RTC_LOG(LS_WARNING) << "Failed to protect SRTP packet: no SRTP Session";
+ return false;
+ }
+
+ // Note: the need_len differs from the libsrtp recommendatіon to ensure
+ // SRTP_MAX_TRAILER_LEN bytes of free space after the data. WebRTC
+ // never includes a MKI, therefore the amount of bytes added by the
+ // srtp_protect call is known in advance and depends on the cipher suite.
+ int need_len = in_len + rtp_auth_tag_len_; // NOLINT
+ if (max_len < need_len) {
+ RTC_LOG(LS_WARNING) << "Failed to protect SRTP packet: The buffer length "
+ << max_len << " is less than the needed " << need_len;
+ return false;
+ }
+ if (dump_plain_rtp_) {
+ DumpPacket(p, in_len, /*outbound=*/true);
+ }
+
+ *out_len = in_len;
+ int err = srtp_protect(session_, p, out_len);
+ int seq_num = ParseRtpSequenceNumber(
+ rtc::MakeArrayView(reinterpret_cast<const uint8_t*>(p), in_len));
+ if (err != srtp_err_status_ok) {
+ RTC_LOG(LS_WARNING) << "Failed to protect SRTP packet, seqnum=" << seq_num
+ << ", err=" << err
+ << ", last seqnum=" << last_send_seq_num_;
+ return false;
+ }
+ last_send_seq_num_ = seq_num;
+ return true;
+}
+
+bool SrtpSession::ProtectRtp(void* p,
+ int in_len,
+ int max_len,
+ int* out_len,
+ int64_t* index) {
+ if (!ProtectRtp(p, in_len, max_len, out_len)) {
+ return false;
+ }
+ return (index) ? GetSendStreamPacketIndex(p, in_len, index) : true;
+}
+
+bool SrtpSession::ProtectRtcp(void* p, int in_len, int max_len, int* out_len) {
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ if (!session_) {
+ RTC_LOG(LS_WARNING) << "Failed to protect SRTCP packet: no SRTP Session";
+ return false;
+ }
+
+ // Note: the need_len differs from the libsrtp recommendatіon to ensure
+ // SRTP_MAX_TRAILER_LEN bytes of free space after the data. WebRTC
+ // never includes a MKI, therefore the amount of bytes added by the
+ // srtp_protect_rtp call is known in advance and depends on the cipher suite.
+ int need_len = in_len + sizeof(uint32_t) + rtcp_auth_tag_len_; // NOLINT
+ if (max_len < need_len) {
+ RTC_LOG(LS_WARNING) << "Failed to protect SRTCP packet: The buffer length "
+ << max_len << " is less than the needed " << need_len;
+ return false;
+ }
+ if (dump_plain_rtp_) {
+ DumpPacket(p, in_len, /*outbound=*/true);
+ }
+
+ *out_len = in_len;
+ int err = srtp_protect_rtcp(session_, p, out_len);
+ if (err != srtp_err_status_ok) {
+ RTC_LOG(LS_WARNING) << "Failed to protect SRTCP packet, err=" << err;
+ return false;
+ }
+ return true;
+}
+
+bool SrtpSession::UnprotectRtp(void* p, int in_len, int* out_len) {
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ if (!session_) {
+ RTC_LOG(LS_WARNING) << "Failed to unprotect SRTP packet: no SRTP Session";
+ return false;
+ }
+
+ *out_len = in_len;
+ int err = srtp_unprotect(session_, p, out_len);
+ if (err != srtp_err_status_ok) {
+ // Limit the error logging to avoid excessive logs when there are lots of
+ // bad packets.
+ const int kFailureLogThrottleCount = 100;
+ if (decryption_failure_count_ % kFailureLogThrottleCount == 0) {
+ RTC_LOG(LS_WARNING) << "Failed to unprotect SRTP packet, err=" << err
+ << ", previous failure count: "
+ << decryption_failure_count_;
+ }
+ ++decryption_failure_count_;
+ RTC_HISTOGRAM_ENUMERATION("WebRTC.PeerConnection.SrtpUnprotectError",
+ static_cast<int>(err), kSrtpErrorCodeBoundary);
+ return false;
+ }
+ if (dump_plain_rtp_) {
+ DumpPacket(p, *out_len, /*outbound=*/false);
+ }
+ return true;
+}
+
+bool SrtpSession::UnprotectRtcp(void* p, int in_len, int* out_len) {
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ if (!session_) {
+ RTC_LOG(LS_WARNING) << "Failed to unprotect SRTCP packet: no SRTP Session";
+ return false;
+ }
+
+ *out_len = in_len;
+ int err = srtp_unprotect_rtcp(session_, p, out_len);
+ if (err != srtp_err_status_ok) {
+ RTC_LOG(LS_WARNING) << "Failed to unprotect SRTCP packet, err=" << err;
+ RTC_HISTOGRAM_ENUMERATION("WebRTC.PeerConnection.SrtcpUnprotectError",
+ static_cast<int>(err), kSrtpErrorCodeBoundary);
+ return false;
+ }
+ if (dump_plain_rtp_) {
+ DumpPacket(p, *out_len, /*outbound=*/false);
+ }
+ return true;
+}
+
+bool SrtpSession::GetRtpAuthParams(uint8_t** key, int* key_len, int* tag_len) {
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ RTC_DCHECK(IsExternalAuthActive());
+ if (!IsExternalAuthActive()) {
+ return false;
+ }
+
+ ExternalHmacContext* external_hmac = nullptr;
+ // stream_template will be the reference context for other streams.
+ // Let's use it for getting the keys.
+ srtp_stream_ctx_t* srtp_context = session_->stream_template;
+ if (srtp_context && srtp_context->session_keys &&
+ srtp_context->session_keys->rtp_auth) {
+ external_hmac = reinterpret_cast<ExternalHmacContext*>(
+ srtp_context->session_keys->rtp_auth->state);
+ }
+
+ if (!external_hmac) {
+ RTC_LOG(LS_ERROR) << "Failed to get auth keys from libsrtp!.";
+ return false;
+ }
+
+ *key = external_hmac->key;
+ *key_len = external_hmac->key_length;
+ *tag_len = rtp_auth_tag_len_;
+ return true;
+}
+
+int SrtpSession::GetSrtpOverhead() const {
+ return rtp_auth_tag_len_;
+}
+
+void SrtpSession::EnableExternalAuth() {
+ RTC_DCHECK(!session_);
+ external_auth_enabled_ = true;
+}
+
+bool SrtpSession::IsExternalAuthEnabled() const {
+ return external_auth_enabled_;
+}
+
+bool SrtpSession::IsExternalAuthActive() const {
+ return external_auth_active_;
+}
+
+bool SrtpSession::GetSendStreamPacketIndex(void* p,
+ int in_len,
+ int64_t* index) {
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ srtp_hdr_t* hdr = reinterpret_cast<srtp_hdr_t*>(p);
+ srtp_stream_ctx_t* stream = srtp_get_stream(session_, hdr->ssrc);
+ if (!stream) {
+ return false;
+ }
+
+ // Shift packet index, put into network byte order
+ *index = static_cast<int64_t>(rtc::NetworkToHost64(
+ srtp_rdbx_get_packet_index(&stream->rtp_rdbx) << 16));
+ return true;
+}
+
+bool SrtpSession::DoSetKey(int type,
+ int crypto_suite,
+ const uint8_t* key,
+ size_t len,
+ const std::vector<int>& extension_ids) {
+ RTC_DCHECK(thread_checker_.IsCurrent());
+
+ srtp_policy_t policy;
+ memset(&policy, 0, sizeof(policy));
+ if (!(srtp_crypto_policy_set_from_profile_for_rtp(
+ &policy.rtp, (srtp_profile_t)crypto_suite) == srtp_err_status_ok &&
+ srtp_crypto_policy_set_from_profile_for_rtcp(
+ &policy.rtcp, (srtp_profile_t)crypto_suite) ==
+ srtp_err_status_ok)) {
+ RTC_LOG(LS_ERROR) << "Failed to " << (session_ ? "update" : "create")
+ << " SRTP session: unsupported cipher_suite "
+ << crypto_suite;
+ return false;
+ }
+
+ if (!key || len != static_cast<size_t>(policy.rtp.cipher_key_len)) {
+ RTC_LOG(LS_ERROR) << "Failed to " << (session_ ? "update" : "create")
+ << " SRTP session: invalid key";
+ return false;
+ }
+
+ policy.ssrc.type = static_cast<srtp_ssrc_type_t>(type);
+ policy.ssrc.value = 0;
+ policy.key = const_cast<uint8_t*>(key);
+ // TODO(astor) parse window size from WSH session-param
+ policy.window_size = 1024;
+ policy.allow_repeat_tx = 1;
+ // If external authentication option is enabled, supply custom auth module
+ // id EXTERNAL_HMAC_SHA1 in the policy structure.
+ // We want to set this option only for rtp packets.
+ // By default policy structure is initialized to HMAC_SHA1.
+ // Enable external HMAC authentication only for outgoing streams and only
+ // for cipher suites that support it (i.e. only non-GCM cipher suites).
+ if (type == ssrc_any_outbound && IsExternalAuthEnabled() &&
+ !rtc::IsGcmCryptoSuite(crypto_suite)) {
+ policy.rtp.auth_type = EXTERNAL_HMAC_SHA1;
+ }
+ if (!extension_ids.empty()) {
+ policy.enc_xtn_hdr = const_cast<int*>(&extension_ids[0]);
+ policy.enc_xtn_hdr_count = static_cast<int>(extension_ids.size());
+ }
+ policy.next = nullptr;
+
+ if (!session_) {
+ int err = srtp_create(&session_, &policy);
+ if (err != srtp_err_status_ok) {
+ session_ = nullptr;
+ RTC_LOG(LS_ERROR) << "Failed to create SRTP session, err=" << err;
+ return false;
+ }
+ srtp_set_user_data(session_, this);
+ } else {
+ int err = srtp_update(session_, &policy);
+ if (err != srtp_err_status_ok) {
+ RTC_LOG(LS_ERROR) << "Failed to update SRTP session, err=" << err;
+ return false;
+ }
+ }
+
+ rtp_auth_tag_len_ = policy.rtp.auth_tag_len;
+ rtcp_auth_tag_len_ = policy.rtcp.auth_tag_len;
+ external_auth_active_ = (policy.rtp.auth_type == EXTERNAL_HMAC_SHA1);
+ return true;
+}
+
+bool SrtpSession::SetKey(int type,
+ int crypto_suite,
+ const uint8_t* key,
+ size_t len,
+ const std::vector<int>& extension_ids) {
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ if (session_) {
+ RTC_LOG(LS_ERROR) << "Failed to create SRTP session: "
+ "SRTP session already created";
+ return false;
+ }
+
+ // This is the first time we need to actually interact with libsrtp, so
+ // initialize it if needed.
+ if (LibSrtpInitializer::Get().IncrementLibsrtpUsageCountAndMaybeInit(
+ &SrtpSession::HandleEventThunk)) {
+ inited_ = true;
+ } else {
+ return false;
+ }
+
+ return DoSetKey(type, crypto_suite, key, len, extension_ids);
+}
+
+bool SrtpSession::UpdateKey(int type,
+ int crypto_suite,
+ const uint8_t* key,
+ size_t len,
+ const std::vector<int>& extension_ids) {
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ if (!session_) {
+ RTC_LOG(LS_ERROR) << "Failed to update non-existing SRTP session";
+ return false;
+ }
+
+ return DoSetKey(type, crypto_suite, key, len, extension_ids);
+}
+
+void ProhibitLibsrtpInitialization() {
+ LibSrtpInitializer::Get().ProhibitLibsrtpInitialization();
+}
+
+void SrtpSession::HandleEvent(const srtp_event_data_t* ev) {
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ switch (ev->event) {
+ case event_ssrc_collision:
+ RTC_LOG(LS_INFO) << "SRTP event: SSRC collision";
+ break;
+ case event_key_soft_limit:
+ RTC_LOG(LS_INFO) << "SRTP event: reached soft key usage limit";
+ break;
+ case event_key_hard_limit:
+ RTC_LOG(LS_INFO) << "SRTP event: reached hard key usage limit";
+ break;
+ case event_packet_index_limit:
+ RTC_LOG(LS_INFO)
+ << "SRTP event: reached hard packet limit (2^48 packets)";
+ break;
+ default:
+ RTC_LOG(LS_INFO) << "SRTP event: unknown " << ev->event;
+ break;
+ }
+}
+
+void SrtpSession::HandleEventThunk(srtp_event_data_t* ev) {
+ // Callback will be executed from same thread that calls the "srtp_protect"
+ // and "srtp_unprotect" functions.
+ SrtpSession* session =
+ static_cast<SrtpSession*>(srtp_get_user_data(ev->session));
+ if (session) {
+ session->HandleEvent(ev);
+ }
+}
+
+// Logs the unencrypted packet in text2pcap format. This can then be
+// extracted by searching for RTP_DUMP
+// grep RTP_DUMP chrome_debug.log > in.txt
+// and converted to pcap using
+// text2pcap -D -u 1000,2000 -t %H:%M:%S. in.txt out.pcap
+// The resulting file can be replayed using the WebRTC video_replay tool and
+// be inspected in Wireshark using the RTP, VP8 and H264 dissectors.
+void SrtpSession::DumpPacket(const void* buf, int len, bool outbound) {
+ int64_t time_of_day = rtc::TimeUTCMillis() % (24 * 3600 * 1000);
+ int64_t hours = time_of_day / (3600 * 1000);
+ int64_t minutes = (time_of_day / (60 * 1000)) % 60;
+ int64_t seconds = (time_of_day / 1000) % 60;
+ int64_t millis = time_of_day % 1000;
+ RTC_LOG(LS_VERBOSE) << "\n"
+ << (outbound ? "O" : "I") << " " << std::setfill('0')
+ << std::setw(2) << hours << ":" << std::setfill('0')
+ << std::setw(2) << minutes << ":" << std::setfill('0')
+ << std::setw(2) << seconds << "." << std::setfill('0')
+ << std::setw(3) << millis << " "
+ << "000000 "
+ << rtc::hex_encode_with_delimiter(
+ absl::string_view((const char*)buf, len), ' ')
+ << " # RTP_DUMP";
+}
+
+} // namespace cricket
diff --git a/third_party/libwebrtc/pc/srtp_session.h b/third_party/libwebrtc/pc/srtp_session.h
new file mode 100644
index 0000000000..60f1860ada
--- /dev/null
+++ b/third_party/libwebrtc/pc/srtp_session.h
@@ -0,0 +1,146 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_SRTP_SESSION_H_
+#define PC_SRTP_SESSION_H_
+
+#include <stddef.h>
+#include <stdint.h>
+
+#include <vector>
+
+#include "api/field_trials_view.h"
+#include "api/scoped_refptr.h"
+#include "api/sequence_checker.h"
+#include "rtc_base/synchronization/mutex.h"
+
+// Forward declaration to avoid pulling in libsrtp headers here
+struct srtp_event_data_t;
+struct srtp_ctx_t_;
+
+namespace cricket {
+
+// Prohibits webrtc from initializing libsrtp. This can be used if libsrtp is
+// initialized by another library or explicitly. Note that this must be called
+// before creating an SRTP session with WebRTC.
+void ProhibitLibsrtpInitialization();
+
+// Class that wraps a libSRTP session.
+class SrtpSession {
+ public:
+ SrtpSession();
+ explicit SrtpSession(const webrtc::FieldTrialsView& field_trials);
+ ~SrtpSession();
+
+ SrtpSession(const SrtpSession&) = delete;
+ SrtpSession& operator=(const SrtpSession&) = delete;
+
+ // Configures the session for sending data using the specified
+ // crypto suite and key. Receiving must be done by a separate session.
+ bool SetSend(int crypto_suite,
+ const uint8_t* key,
+ size_t len,
+ const std::vector<int>& extension_ids);
+ bool UpdateSend(int crypto_suite,
+ const uint8_t* key,
+ size_t len,
+ const std::vector<int>& extension_ids);
+
+ // Configures the session for receiving data using the specified
+ // crypto suite and key. Sending must be done by a separate session.
+ bool SetRecv(int crypto_suite,
+ const uint8_t* key,
+ size_t len,
+ const std::vector<int>& extension_ids);
+ bool UpdateRecv(int crypto_suite,
+ const uint8_t* key,
+ size_t len,
+ const std::vector<int>& extension_ids);
+
+ // Encrypts/signs an individual RTP/RTCP packet, in-place.
+ // If an HMAC is used, this will increase the packet size.
+ bool ProtectRtp(void* data, int in_len, int max_len, int* out_len);
+ // Overloaded version, outputs packet index.
+ bool ProtectRtp(void* data,
+ int in_len,
+ int max_len,
+ int* out_len,
+ int64_t* index);
+ bool ProtectRtcp(void* data, int in_len, int max_len, int* out_len);
+ // Decrypts/verifies an invidiual RTP/RTCP packet.
+ // If an HMAC is used, this will decrease the packet size.
+ bool UnprotectRtp(void* data, int in_len, int* out_len);
+ bool UnprotectRtcp(void* data, int in_len, int* out_len);
+
+ // Helper method to get authentication params.
+ bool GetRtpAuthParams(uint8_t** key, int* key_len, int* tag_len);
+
+ int GetSrtpOverhead() const;
+
+ // If external auth is enabled, SRTP will write a dummy auth tag that then
+ // later must get replaced before the packet is sent out. Only supported for
+ // non-GCM cipher suites and can be checked through "IsExternalAuthActive"
+ // if it is actually used. This method is only valid before the RTP params
+ // have been set.
+ void EnableExternalAuth();
+ bool IsExternalAuthEnabled() const;
+
+ // A SRTP session supports external creation of the auth tag if a non-GCM
+ // cipher is used. This method is only valid after the RTP params have
+ // been set.
+ bool IsExternalAuthActive() const;
+
+ private:
+ bool DoSetKey(int type,
+ int crypto_suite,
+ const uint8_t* key,
+ size_t len,
+ const std::vector<int>& extension_ids);
+ bool SetKey(int type,
+ int crypto_suite,
+ const uint8_t* key,
+ size_t len,
+ const std::vector<int>& extension_ids);
+ bool UpdateKey(int type,
+ int crypto_suite,
+ const uint8_t* key,
+ size_t len,
+ const std::vector<int>& extension_ids);
+ // Returns send stream current packet index from srtp db.
+ bool GetSendStreamPacketIndex(void* data, int in_len, int64_t* index);
+
+ // Writes unencrypted packets in text2pcap format to the log file
+ // for debugging.
+ void DumpPacket(const void* buf, int len, bool outbound);
+
+ void HandleEvent(const srtp_event_data_t* ev);
+ static void HandleEventThunk(srtp_event_data_t* ev);
+
+ webrtc::SequenceChecker thread_checker_;
+ srtp_ctx_t_* session_ = nullptr;
+
+ // Overhead of the SRTP auth tag for RTP and RTCP in bytes.
+ // Depends on the cipher suite used and is usually the same with the exception
+ // of the kCsAesCm128HmacSha1_32 cipher suite. The additional four bytes
+ // required for RTCP protection are not included.
+ int rtp_auth_tag_len_ = 0;
+ int rtcp_auth_tag_len_ = 0;
+
+ bool inited_ = false;
+ int last_send_seq_num_ = -1;
+ bool external_auth_active_ = false;
+ bool external_auth_enabled_ = false;
+ int decryption_failure_count_ = 0;
+ bool dump_plain_rtp_ = false;
+};
+
+} // namespace cricket
+
+#endif // PC_SRTP_SESSION_H_
diff --git a/third_party/libwebrtc/pc/srtp_session_unittest.cc b/third_party/libwebrtc/pc/srtp_session_unittest.cc
new file mode 100644
index 0000000000..16a840a307
--- /dev/null
+++ b/third_party/libwebrtc/pc/srtp_session_unittest.cc
@@ -0,0 +1,254 @@
+/*
+ * Copyright 2004 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/srtp_session.h"
+
+#include <string.h>
+
+#include <string>
+
+#include "media/base/fake_rtp.h"
+#include "pc/test/srtp_test_util.h"
+#include "rtc_base/byte_order.h"
+#include "rtc_base/ssl_stream_adapter.h" // For rtc::SRTP_*
+#include "system_wrappers/include/metrics.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+#include "test/scoped_key_value_config.h"
+#include "third_party/libsrtp/include/srtp.h"
+
+using ::testing::ElementsAre;
+using ::testing::Pair;
+
+namespace rtc {
+
+std::vector<int> kEncryptedHeaderExtensionIds;
+
+class SrtpSessionTest : public ::testing::Test {
+ public:
+ SrtpSessionTest() : s1_(field_trials_), s2_(field_trials_) {
+ webrtc::metrics::Reset();
+ }
+
+ protected:
+ virtual void SetUp() {
+ rtp_len_ = sizeof(kPcmuFrame);
+ rtcp_len_ = sizeof(kRtcpReport);
+ memcpy(rtp_packet_, kPcmuFrame, rtp_len_);
+ memcpy(rtcp_packet_, kRtcpReport, rtcp_len_);
+ }
+ void TestProtectRtp(const std::string& cs) {
+ int out_len = 0;
+ EXPECT_TRUE(
+ s1_.ProtectRtp(rtp_packet_, rtp_len_, sizeof(rtp_packet_), &out_len));
+ EXPECT_EQ(out_len, rtp_len_ + rtp_auth_tag_len(cs));
+ EXPECT_NE(0, memcmp(rtp_packet_, kPcmuFrame, rtp_len_));
+ rtp_len_ = out_len;
+ }
+ void TestProtectRtcp(const std::string& cs) {
+ int out_len = 0;
+ EXPECT_TRUE(s1_.ProtectRtcp(rtcp_packet_, rtcp_len_, sizeof(rtcp_packet_),
+ &out_len));
+ EXPECT_EQ(out_len, rtcp_len_ + 4 + rtcp_auth_tag_len(cs)); // NOLINT
+ EXPECT_NE(0, memcmp(rtcp_packet_, kRtcpReport, rtcp_len_));
+ rtcp_len_ = out_len;
+ }
+ void TestUnprotectRtp(const std::string& cs) {
+ int out_len = 0, expected_len = sizeof(kPcmuFrame);
+ EXPECT_TRUE(s2_.UnprotectRtp(rtp_packet_, rtp_len_, &out_len));
+ EXPECT_EQ(expected_len, out_len);
+ EXPECT_EQ(0, memcmp(rtp_packet_, kPcmuFrame, out_len));
+ }
+ void TestUnprotectRtcp(const std::string& cs) {
+ int out_len = 0, expected_len = sizeof(kRtcpReport);
+ EXPECT_TRUE(s2_.UnprotectRtcp(rtcp_packet_, rtcp_len_, &out_len));
+ EXPECT_EQ(expected_len, out_len);
+ EXPECT_EQ(0, memcmp(rtcp_packet_, kRtcpReport, out_len));
+ }
+ webrtc::test::ScopedKeyValueConfig field_trials_;
+ cricket::SrtpSession s1_;
+ cricket::SrtpSession s2_;
+ char rtp_packet_[sizeof(kPcmuFrame) + 10];
+ char rtcp_packet_[sizeof(kRtcpReport) + 4 + 10];
+ int rtp_len_;
+ int rtcp_len_;
+};
+
+// Test that we can set up the session and keys properly.
+TEST_F(SrtpSessionTest, TestGoodSetup) {
+ EXPECT_TRUE(s1_.SetSend(kSrtpAes128CmSha1_80, kTestKey1, kTestKeyLen,
+ kEncryptedHeaderExtensionIds));
+ EXPECT_TRUE(s2_.SetRecv(kSrtpAes128CmSha1_80, kTestKey1, kTestKeyLen,
+ kEncryptedHeaderExtensionIds));
+}
+
+// Test that we can't change the keys once set.
+TEST_F(SrtpSessionTest, TestBadSetup) {
+ EXPECT_TRUE(s1_.SetSend(kSrtpAes128CmSha1_80, kTestKey1, kTestKeyLen,
+ kEncryptedHeaderExtensionIds));
+ EXPECT_TRUE(s2_.SetRecv(kSrtpAes128CmSha1_80, kTestKey1, kTestKeyLen,
+ kEncryptedHeaderExtensionIds));
+ EXPECT_FALSE(s1_.SetSend(kSrtpAes128CmSha1_80, kTestKey2, kTestKeyLen,
+ kEncryptedHeaderExtensionIds));
+ EXPECT_FALSE(s2_.SetRecv(kSrtpAes128CmSha1_80, kTestKey2, kTestKeyLen,
+ kEncryptedHeaderExtensionIds));
+}
+
+// Test that we fail keys of the wrong length.
+TEST_F(SrtpSessionTest, TestKeysTooShort) {
+ EXPECT_FALSE(s1_.SetSend(kSrtpAes128CmSha1_80, kTestKey1, 1,
+ kEncryptedHeaderExtensionIds));
+ EXPECT_FALSE(s2_.SetRecv(kSrtpAes128CmSha1_80, kTestKey1, 1,
+ kEncryptedHeaderExtensionIds));
+}
+
+// Test that we can encrypt and decrypt RTP/RTCP using AES_CM_128_HMAC_SHA1_80.
+TEST_F(SrtpSessionTest, TestProtect_AES_CM_128_HMAC_SHA1_80) {
+ EXPECT_TRUE(s1_.SetSend(kSrtpAes128CmSha1_80, kTestKey1, kTestKeyLen,
+ kEncryptedHeaderExtensionIds));
+ EXPECT_TRUE(s2_.SetRecv(kSrtpAes128CmSha1_80, kTestKey1, kTestKeyLen,
+ kEncryptedHeaderExtensionIds));
+ TestProtectRtp(kCsAesCm128HmacSha1_80);
+ TestProtectRtcp(kCsAesCm128HmacSha1_80);
+ TestUnprotectRtp(kCsAesCm128HmacSha1_80);
+ TestUnprotectRtcp(kCsAesCm128HmacSha1_80);
+}
+
+// Test that we can encrypt and decrypt RTP/RTCP using AES_CM_128_HMAC_SHA1_32.
+TEST_F(SrtpSessionTest, TestProtect_AES_CM_128_HMAC_SHA1_32) {
+ EXPECT_TRUE(s1_.SetSend(kSrtpAes128CmSha1_32, kTestKey1, kTestKeyLen,
+ kEncryptedHeaderExtensionIds));
+ EXPECT_TRUE(s2_.SetRecv(kSrtpAes128CmSha1_32, kTestKey1, kTestKeyLen,
+ kEncryptedHeaderExtensionIds));
+ TestProtectRtp(kCsAesCm128HmacSha1_32);
+ TestProtectRtcp(kCsAesCm128HmacSha1_32);
+ TestUnprotectRtp(kCsAesCm128HmacSha1_32);
+ TestUnprotectRtcp(kCsAesCm128HmacSha1_32);
+}
+
+TEST_F(SrtpSessionTest, TestGetSendStreamPacketIndex) {
+ EXPECT_TRUE(s1_.SetSend(kSrtpAes128CmSha1_32, kTestKey1, kTestKeyLen,
+ kEncryptedHeaderExtensionIds));
+ int64_t index;
+ int out_len = 0;
+ EXPECT_TRUE(s1_.ProtectRtp(rtp_packet_, rtp_len_, sizeof(rtp_packet_),
+ &out_len, &index));
+ // `index` will be shifted by 16.
+ int64_t be64_index = static_cast<int64_t>(NetworkToHost64(1 << 16));
+ EXPECT_EQ(be64_index, index);
+}
+
+// Test that we fail to unprotect if someone tampers with the RTP/RTCP paylaods.
+TEST_F(SrtpSessionTest, TestTamperReject) {
+ int out_len;
+ EXPECT_TRUE(s1_.SetSend(kSrtpAes128CmSha1_80, kTestKey1, kTestKeyLen,
+ kEncryptedHeaderExtensionIds));
+ EXPECT_TRUE(s2_.SetRecv(kSrtpAes128CmSha1_80, kTestKey1, kTestKeyLen,
+ kEncryptedHeaderExtensionIds));
+ TestProtectRtp(kCsAesCm128HmacSha1_80);
+ TestProtectRtcp(kCsAesCm128HmacSha1_80);
+ rtp_packet_[0] = 0x12;
+ rtcp_packet_[1] = 0x34;
+ EXPECT_FALSE(s2_.UnprotectRtp(rtp_packet_, rtp_len_, &out_len));
+ EXPECT_METRIC_THAT(
+ webrtc::metrics::Samples("WebRTC.PeerConnection.SrtpUnprotectError"),
+ ElementsAre(Pair(srtp_err_status_bad_param, 1)));
+ EXPECT_FALSE(s2_.UnprotectRtcp(rtcp_packet_, rtcp_len_, &out_len));
+ EXPECT_METRIC_THAT(
+ webrtc::metrics::Samples("WebRTC.PeerConnection.SrtcpUnprotectError"),
+ ElementsAre(Pair(srtp_err_status_auth_fail, 1)));
+}
+
+// Test that we fail to unprotect if the payloads are not authenticated.
+TEST_F(SrtpSessionTest, TestUnencryptReject) {
+ int out_len;
+ EXPECT_TRUE(s1_.SetSend(kSrtpAes128CmSha1_80, kTestKey1, kTestKeyLen,
+ kEncryptedHeaderExtensionIds));
+ EXPECT_TRUE(s2_.SetRecv(kSrtpAes128CmSha1_80, kTestKey1, kTestKeyLen,
+ kEncryptedHeaderExtensionIds));
+ EXPECT_FALSE(s2_.UnprotectRtp(rtp_packet_, rtp_len_, &out_len));
+ EXPECT_METRIC_THAT(
+ webrtc::metrics::Samples("WebRTC.PeerConnection.SrtpUnprotectError"),
+ ElementsAre(Pair(srtp_err_status_auth_fail, 1)));
+ EXPECT_FALSE(s2_.UnprotectRtcp(rtcp_packet_, rtcp_len_, &out_len));
+ EXPECT_METRIC_THAT(
+ webrtc::metrics::Samples("WebRTC.PeerConnection.SrtcpUnprotectError"),
+ ElementsAre(Pair(srtp_err_status_cant_check, 1)));
+}
+
+// Test that we fail when using buffers that are too small.
+TEST_F(SrtpSessionTest, TestBuffersTooSmall) {
+ int out_len;
+ EXPECT_TRUE(s1_.SetSend(kSrtpAes128CmSha1_80, kTestKey1, kTestKeyLen,
+ kEncryptedHeaderExtensionIds));
+ EXPECT_FALSE(s1_.ProtectRtp(rtp_packet_, rtp_len_, sizeof(rtp_packet_) - 10,
+ &out_len));
+ EXPECT_FALSE(s1_.ProtectRtcp(rtcp_packet_, rtcp_len_,
+ sizeof(rtcp_packet_) - 14, &out_len));
+}
+
+TEST_F(SrtpSessionTest, TestReplay) {
+ static const uint16_t kMaxSeqnum = static_cast<uint16_t>(-1);
+ static const uint16_t seqnum_big = 62275;
+ static const uint16_t seqnum_small = 10;
+ static const uint16_t replay_window = 1024;
+ int out_len;
+
+ EXPECT_TRUE(s1_.SetSend(kSrtpAes128CmSha1_80, kTestKey1, kTestKeyLen,
+ kEncryptedHeaderExtensionIds));
+ EXPECT_TRUE(s2_.SetRecv(kSrtpAes128CmSha1_80, kTestKey1, kTestKeyLen,
+ kEncryptedHeaderExtensionIds));
+
+ // Initial sequence number.
+ SetBE16(reinterpret_cast<uint8_t*>(rtp_packet_) + 2, seqnum_big);
+ EXPECT_TRUE(
+ s1_.ProtectRtp(rtp_packet_, rtp_len_, sizeof(rtp_packet_), &out_len));
+
+ // Replay within the 1024 window should succeed.
+ SetBE16(reinterpret_cast<uint8_t*>(rtp_packet_) + 2,
+ seqnum_big - replay_window + 1);
+ EXPECT_TRUE(
+ s1_.ProtectRtp(rtp_packet_, rtp_len_, sizeof(rtp_packet_), &out_len));
+
+ // Replay out side of the 1024 window should fail.
+ SetBE16(reinterpret_cast<uint8_t*>(rtp_packet_) + 2,
+ seqnum_big - replay_window - 1);
+ EXPECT_FALSE(
+ s1_.ProtectRtp(rtp_packet_, rtp_len_, sizeof(rtp_packet_), &out_len));
+
+ // Increment sequence number to a small number.
+ SetBE16(reinterpret_cast<uint8_t*>(rtp_packet_) + 2, seqnum_small);
+ EXPECT_TRUE(
+ s1_.ProtectRtp(rtp_packet_, rtp_len_, sizeof(rtp_packet_), &out_len));
+
+ // Replay around 0 but out side of the 1024 window should fail.
+ SetBE16(reinterpret_cast<uint8_t*>(rtp_packet_) + 2,
+ kMaxSeqnum + seqnum_small - replay_window - 1);
+ EXPECT_FALSE(
+ s1_.ProtectRtp(rtp_packet_, rtp_len_, sizeof(rtp_packet_), &out_len));
+
+ // Replay around 0 but within the 1024 window should succeed.
+ for (uint16_t seqnum = 65000; seqnum < 65003; ++seqnum) {
+ SetBE16(reinterpret_cast<uint8_t*>(rtp_packet_) + 2, seqnum);
+ EXPECT_TRUE(
+ s1_.ProtectRtp(rtp_packet_, rtp_len_, sizeof(rtp_packet_), &out_len));
+ }
+
+ // Go back to normal sequence nubmer.
+ // NOTE: without the fix in libsrtp, this would fail. This is because
+ // without the fix, the loop above would keep incrementing local sequence
+ // number in libsrtp, eventually the new sequence number would go out side
+ // of the window.
+ SetBE16(reinterpret_cast<uint8_t*>(rtp_packet_) + 2, seqnum_small + 1);
+ EXPECT_TRUE(
+ s1_.ProtectRtp(rtp_packet_, rtp_len_, sizeof(rtp_packet_), &out_len));
+}
+
+} // namespace rtc
diff --git a/third_party/libwebrtc/pc/srtp_transport.cc b/third_party/libwebrtc/pc/srtp_transport.cc
new file mode 100644
index 0000000000..cc20216672
--- /dev/null
+++ b/third_party/libwebrtc/pc/srtp_transport.cc
@@ -0,0 +1,522 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/srtp_transport.h"
+
+#include <string.h>
+
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "absl/strings/match.h"
+#include "media/base/rtp_utils.h"
+#include "modules/rtp_rtcp/source/rtp_util.h"
+#include "pc/rtp_transport.h"
+#include "pc/srtp_session.h"
+#include "rtc_base/async_packet_socket.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/copy_on_write_buffer.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/numerics/safe_conversions.h"
+#include "rtc_base/ssl_stream_adapter.h"
+#include "rtc_base/third_party/base64/base64.h"
+#include "rtc_base/trace_event.h"
+#include "rtc_base/zero_memory.h"
+
+namespace webrtc {
+
+SrtpTransport::SrtpTransport(bool rtcp_mux_enabled,
+ const FieldTrialsView& field_trials)
+ : RtpTransport(rtcp_mux_enabled), field_trials_(field_trials) {}
+
+RTCError SrtpTransport::SetSrtpSendKey(const cricket::CryptoParams& params) {
+ if (send_params_) {
+ LOG_AND_RETURN_ERROR(
+ webrtc::RTCErrorType::UNSUPPORTED_OPERATION,
+ "Setting the SRTP send key twice is currently unsupported.");
+ }
+ if (recv_params_ && recv_params_->crypto_suite != params.crypto_suite) {
+ LOG_AND_RETURN_ERROR(
+ webrtc::RTCErrorType::UNSUPPORTED_OPERATION,
+ "The send key and receive key must have the same cipher suite.");
+ }
+
+ send_crypto_suite_ = rtc::SrtpCryptoSuiteFromName(params.crypto_suite);
+ if (*send_crypto_suite_ == rtc::kSrtpInvalidCryptoSuite) {
+ return RTCError(RTCErrorType::INVALID_PARAMETER,
+ "Invalid SRTP crypto suite");
+ }
+
+ int send_key_len, send_salt_len;
+ if (!rtc::GetSrtpKeyAndSaltLengths(*send_crypto_suite_, &send_key_len,
+ &send_salt_len)) {
+ return RTCError(RTCErrorType::INVALID_PARAMETER,
+ "Could not get lengths for crypto suite(s):"
+ " send crypto_suite ");
+ }
+
+ send_key_ = rtc::ZeroOnFreeBuffer<uint8_t>(send_key_len + send_salt_len);
+ if (!ParseKeyParams(params.key_params, send_key_.data(), send_key_.size())) {
+ return RTCError(RTCErrorType::INVALID_PARAMETER,
+ "Failed to parse the crypto key params");
+ }
+
+ if (!MaybeSetKeyParams()) {
+ return RTCError(RTCErrorType::INVALID_PARAMETER,
+ "Failed to set the crypto key params");
+ }
+ send_params_ = params;
+ return RTCError::OK();
+}
+
+RTCError SrtpTransport::SetSrtpReceiveKey(const cricket::CryptoParams& params) {
+ if (recv_params_) {
+ LOG_AND_RETURN_ERROR(
+ webrtc::RTCErrorType::UNSUPPORTED_OPERATION,
+ "Setting the SRTP send key twice is currently unsupported.");
+ }
+ if (send_params_ && send_params_->crypto_suite != params.crypto_suite) {
+ LOG_AND_RETURN_ERROR(
+ webrtc::RTCErrorType::UNSUPPORTED_OPERATION,
+ "The send key and receive key must have the same cipher suite.");
+ }
+
+ recv_crypto_suite_ = rtc::SrtpCryptoSuiteFromName(params.crypto_suite);
+ if (*recv_crypto_suite_ == rtc::kSrtpInvalidCryptoSuite) {
+ return RTCError(RTCErrorType::INVALID_PARAMETER,
+ "Invalid SRTP crypto suite");
+ }
+
+ int recv_key_len, recv_salt_len;
+ if (!rtc::GetSrtpKeyAndSaltLengths(*recv_crypto_suite_, &recv_key_len,
+ &recv_salt_len)) {
+ return RTCError(RTCErrorType::INVALID_PARAMETER,
+ "Could not get lengths for crypto suite(s):"
+ " recv crypto_suite ");
+ }
+
+ recv_key_ = rtc::ZeroOnFreeBuffer<uint8_t>(recv_key_len + recv_salt_len);
+ if (!ParseKeyParams(params.key_params, recv_key_.data(), recv_key_.size())) {
+ return RTCError(RTCErrorType::INVALID_PARAMETER,
+ "Failed to parse the crypto key params");
+ }
+
+ if (!MaybeSetKeyParams()) {
+ return RTCError(RTCErrorType::INVALID_PARAMETER,
+ "Failed to set the crypto key params");
+ }
+ recv_params_ = params;
+ return RTCError::OK();
+}
+
+bool SrtpTransport::SendRtpPacket(rtc::CopyOnWriteBuffer* packet,
+ const rtc::PacketOptions& options,
+ int flags) {
+ if (!IsSrtpActive()) {
+ RTC_LOG(LS_ERROR)
+ << "Failed to send the packet because SRTP transport is inactive.";
+ return false;
+ }
+ rtc::PacketOptions updated_options = options;
+ TRACE_EVENT0("webrtc", "SRTP Encode");
+ bool res;
+ uint8_t* data = packet->MutableData();
+ int len = rtc::checked_cast<int>(packet->size());
+// If ENABLE_EXTERNAL_AUTH flag is on then packet authentication is not done
+// inside libsrtp for a RTP packet. A external HMAC module will be writing
+// a fake HMAC value. This is ONLY done for a RTP packet.
+// Socket layer will update rtp sendtime extension header if present in
+// packet with current time before updating the HMAC.
+#if !defined(ENABLE_EXTERNAL_AUTH)
+ res = ProtectRtp(data, len, static_cast<int>(packet->capacity()), &len);
+#else
+ if (!IsExternalAuthActive()) {
+ res = ProtectRtp(data, len, static_cast<int>(packet->capacity()), &len);
+ } else {
+ updated_options.packet_time_params.rtp_sendtime_extension_id =
+ rtp_abs_sendtime_extn_id_;
+ res = ProtectRtp(data, len, static_cast<int>(packet->capacity()), &len,
+ &updated_options.packet_time_params.srtp_packet_index);
+ // If protection succeeds, let's get auth params from srtp.
+ if (res) {
+ uint8_t* auth_key = nullptr;
+ int key_len = 0;
+ res = GetRtpAuthParams(
+ &auth_key, &key_len,
+ &updated_options.packet_time_params.srtp_auth_tag_len);
+ if (res) {
+ updated_options.packet_time_params.srtp_auth_key.resize(key_len);
+ updated_options.packet_time_params.srtp_auth_key.assign(
+ auth_key, auth_key + key_len);
+ }
+ }
+ }
+#endif
+ if (!res) {
+ uint16_t seq_num = ParseRtpSequenceNumber(*packet);
+ uint32_t ssrc = ParseRtpSsrc(*packet);
+ RTC_LOG(LS_ERROR) << "Failed to protect RTP packet: size=" << len
+ << ", seqnum=" << seq_num << ", SSRC=" << ssrc;
+ return false;
+ }
+
+ // Update the length of the packet now that we've added the auth tag.
+ packet->SetSize(len);
+ return SendPacket(/*rtcp=*/false, packet, updated_options, flags);
+}
+
+bool SrtpTransport::SendRtcpPacket(rtc::CopyOnWriteBuffer* packet,
+ const rtc::PacketOptions& options,
+ int flags) {
+ if (!IsSrtpActive()) {
+ RTC_LOG(LS_ERROR)
+ << "Failed to send the packet because SRTP transport is inactive.";
+ return false;
+ }
+
+ TRACE_EVENT0("webrtc", "SRTP Encode");
+ uint8_t* data = packet->MutableData();
+ int len = rtc::checked_cast<int>(packet->size());
+ if (!ProtectRtcp(data, len, static_cast<int>(packet->capacity()), &len)) {
+ int type = -1;
+ cricket::GetRtcpType(data, len, &type);
+ RTC_LOG(LS_ERROR) << "Failed to protect RTCP packet: size=" << len
+ << ", type=" << type;
+ return false;
+ }
+ // Update the length of the packet now that we've added the auth tag.
+ packet->SetSize(len);
+
+ return SendPacket(/*rtcp=*/true, packet, options, flags);
+}
+
+void SrtpTransport::OnRtpPacketReceived(rtc::CopyOnWriteBuffer packet,
+ int64_t packet_time_us) {
+ TRACE_EVENT0("webrtc", "SrtpTransport::OnRtpPacketReceived");
+ if (!IsSrtpActive()) {
+ RTC_LOG(LS_WARNING)
+ << "Inactive SRTP transport received an RTP packet. Drop it.";
+ return;
+ }
+ char* data = packet.MutableData<char>();
+ int len = rtc::checked_cast<int>(packet.size());
+ if (!UnprotectRtp(data, len, &len)) {
+ // Limit the error logging to avoid excessive logs when there are lots of
+ // bad packets.
+ const int kFailureLogThrottleCount = 100;
+ if (decryption_failure_count_ % kFailureLogThrottleCount == 0) {
+ RTC_LOG(LS_ERROR) << "Failed to unprotect RTP packet: size=" << len
+ << ", seqnum=" << ParseRtpSequenceNumber(packet)
+ << ", SSRC=" << ParseRtpSsrc(packet)
+ << ", previous failure count: "
+ << decryption_failure_count_;
+ }
+ ++decryption_failure_count_;
+ return;
+ }
+ packet.SetSize(len);
+ DemuxPacket(std::move(packet), packet_time_us);
+}
+
+void SrtpTransport::OnRtcpPacketReceived(rtc::CopyOnWriteBuffer packet,
+ int64_t packet_time_us) {
+ TRACE_EVENT0("webrtc", "SrtpTransport::OnRtcpPacketReceived");
+ if (!IsSrtpActive()) {
+ RTC_LOG(LS_WARNING)
+ << "Inactive SRTP transport received an RTCP packet. Drop it.";
+ return;
+ }
+ char* data = packet.MutableData<char>();
+ int len = rtc::checked_cast<int>(packet.size());
+ if (!UnprotectRtcp(data, len, &len)) {
+ int type = -1;
+ cricket::GetRtcpType(data, len, &type);
+ RTC_LOG(LS_ERROR) << "Failed to unprotect RTCP packet: size=" << len
+ << ", type=" << type;
+ return;
+ }
+ packet.SetSize(len);
+ SendRtcpPacketReceived(&packet, packet_time_us);
+}
+
+void SrtpTransport::OnNetworkRouteChanged(
+ absl::optional<rtc::NetworkRoute> network_route) {
+ // Only append the SRTP overhead when there is a selected network route.
+ if (network_route) {
+ int srtp_overhead = 0;
+ if (IsSrtpActive()) {
+ GetSrtpOverhead(&srtp_overhead);
+ }
+ network_route->packet_overhead += srtp_overhead;
+ }
+ SendNetworkRouteChanged(network_route);
+}
+
+void SrtpTransport::OnWritableState(
+ rtc::PacketTransportInternal* packet_transport) {
+ SendWritableState(IsWritable(/*rtcp=*/false) && IsWritable(/*rtcp=*/true));
+}
+
+bool SrtpTransport::SetRtpParams(int send_crypto_suite,
+ const uint8_t* send_key,
+ int send_key_len,
+ const std::vector<int>& send_extension_ids,
+ int recv_crypto_suite,
+ const uint8_t* recv_key,
+ int recv_key_len,
+ const std::vector<int>& recv_extension_ids) {
+ // If parameters are being set for the first time, we should create new SRTP
+ // sessions and call "SetSend/SetRecv". Otherwise we should call
+ // "UpdateSend"/"UpdateRecv" on the existing sessions, which will internally
+ // call "srtp_update".
+ bool new_sessions = false;
+ if (!send_session_) {
+ RTC_DCHECK(!recv_session_);
+ CreateSrtpSessions();
+ new_sessions = true;
+ }
+ bool ret = new_sessions
+ ? send_session_->SetSend(send_crypto_suite, send_key,
+ send_key_len, send_extension_ids)
+ : send_session_->UpdateSend(send_crypto_suite, send_key,
+ send_key_len, send_extension_ids);
+ if (!ret) {
+ ResetParams();
+ return false;
+ }
+
+ ret = new_sessions
+ ? recv_session_->SetRecv(recv_crypto_suite, recv_key, recv_key_len,
+ recv_extension_ids)
+ : recv_session_->UpdateRecv(recv_crypto_suite, recv_key,
+ recv_key_len, recv_extension_ids);
+ if (!ret) {
+ ResetParams();
+ return false;
+ }
+
+ RTC_LOG(LS_INFO) << "SRTP " << (new_sessions ? "activated" : "updated")
+ << " with negotiated parameters: send crypto_suite "
+ << send_crypto_suite << " recv crypto_suite "
+ << recv_crypto_suite;
+ MaybeUpdateWritableState();
+ return true;
+}
+
+bool SrtpTransport::SetRtcpParams(int send_crypto_suite,
+ const uint8_t* send_key,
+ int send_key_len,
+ const std::vector<int>& send_extension_ids,
+ int recv_crypto_suite,
+ const uint8_t* recv_key,
+ int recv_key_len,
+ const std::vector<int>& recv_extension_ids) {
+ // This can only be called once, but can be safely called after
+ // SetRtpParams
+ if (send_rtcp_session_ || recv_rtcp_session_) {
+ RTC_LOG(LS_ERROR) << "Tried to set SRTCP Params when filter already active";
+ return false;
+ }
+
+ send_rtcp_session_.reset(new cricket::SrtpSession(field_trials_));
+ if (!send_rtcp_session_->SetSend(send_crypto_suite, send_key, send_key_len,
+ send_extension_ids)) {
+ return false;
+ }
+
+ recv_rtcp_session_.reset(new cricket::SrtpSession(field_trials_));
+ if (!recv_rtcp_session_->SetRecv(recv_crypto_suite, recv_key, recv_key_len,
+ recv_extension_ids)) {
+ return false;
+ }
+
+ RTC_LOG(LS_INFO) << "SRTCP activated with negotiated parameters:"
+ " send crypto_suite "
+ << send_crypto_suite << " recv crypto_suite "
+ << recv_crypto_suite;
+ MaybeUpdateWritableState();
+ return true;
+}
+
+bool SrtpTransport::IsSrtpActive() const {
+ return send_session_ && recv_session_;
+}
+
+bool SrtpTransport::IsWritable(bool rtcp) const {
+ return IsSrtpActive() && RtpTransport::IsWritable(rtcp);
+}
+
+void SrtpTransport::ResetParams() {
+ send_session_ = nullptr;
+ recv_session_ = nullptr;
+ send_rtcp_session_ = nullptr;
+ recv_rtcp_session_ = nullptr;
+ MaybeUpdateWritableState();
+ RTC_LOG(LS_INFO) << "The params in SRTP transport are reset.";
+}
+
+void SrtpTransport::CreateSrtpSessions() {
+ send_session_.reset(new cricket::SrtpSession(field_trials_));
+ recv_session_.reset(new cricket::SrtpSession(field_trials_));
+ if (external_auth_enabled_) {
+ send_session_->EnableExternalAuth();
+ }
+}
+
+bool SrtpTransport::ProtectRtp(void* p, int in_len, int max_len, int* out_len) {
+ if (!IsSrtpActive()) {
+ RTC_LOG(LS_WARNING) << "Failed to ProtectRtp: SRTP not active";
+ return false;
+ }
+ RTC_CHECK(send_session_);
+ return send_session_->ProtectRtp(p, in_len, max_len, out_len);
+}
+
+bool SrtpTransport::ProtectRtp(void* p,
+ int in_len,
+ int max_len,
+ int* out_len,
+ int64_t* index) {
+ if (!IsSrtpActive()) {
+ RTC_LOG(LS_WARNING) << "Failed to ProtectRtp: SRTP not active";
+ return false;
+ }
+ RTC_CHECK(send_session_);
+ return send_session_->ProtectRtp(p, in_len, max_len, out_len, index);
+}
+
+bool SrtpTransport::ProtectRtcp(void* p,
+ int in_len,
+ int max_len,
+ int* out_len) {
+ if (!IsSrtpActive()) {
+ RTC_LOG(LS_WARNING) << "Failed to ProtectRtcp: SRTP not active";
+ return false;
+ }
+ if (send_rtcp_session_) {
+ return send_rtcp_session_->ProtectRtcp(p, in_len, max_len, out_len);
+ } else {
+ RTC_CHECK(send_session_);
+ return send_session_->ProtectRtcp(p, in_len, max_len, out_len);
+ }
+}
+
+bool SrtpTransport::UnprotectRtp(void* p, int in_len, int* out_len) {
+ if (!IsSrtpActive()) {
+ RTC_LOG(LS_WARNING) << "Failed to UnprotectRtp: SRTP not active";
+ return false;
+ }
+ RTC_CHECK(recv_session_);
+ return recv_session_->UnprotectRtp(p, in_len, out_len);
+}
+
+bool SrtpTransport::UnprotectRtcp(void* p, int in_len, int* out_len) {
+ if (!IsSrtpActive()) {
+ RTC_LOG(LS_WARNING) << "Failed to UnprotectRtcp: SRTP not active";
+ return false;
+ }
+ if (recv_rtcp_session_) {
+ return recv_rtcp_session_->UnprotectRtcp(p, in_len, out_len);
+ } else {
+ RTC_CHECK(recv_session_);
+ return recv_session_->UnprotectRtcp(p, in_len, out_len);
+ }
+}
+
+bool SrtpTransport::GetRtpAuthParams(uint8_t** key,
+ int* key_len,
+ int* tag_len) {
+ if (!IsSrtpActive()) {
+ RTC_LOG(LS_WARNING) << "Failed to GetRtpAuthParams: SRTP not active";
+ return false;
+ }
+
+ RTC_CHECK(send_session_);
+ return send_session_->GetRtpAuthParams(key, key_len, tag_len);
+}
+
+bool SrtpTransport::GetSrtpOverhead(int* srtp_overhead) const {
+ if (!IsSrtpActive()) {
+ RTC_LOG(LS_WARNING) << "Failed to GetSrtpOverhead: SRTP not active";
+ return false;
+ }
+
+ RTC_CHECK(send_session_);
+ *srtp_overhead = send_session_->GetSrtpOverhead();
+ return true;
+}
+
+void SrtpTransport::EnableExternalAuth() {
+ RTC_DCHECK(!IsSrtpActive());
+ external_auth_enabled_ = true;
+}
+
+bool SrtpTransport::IsExternalAuthEnabled() const {
+ return external_auth_enabled_;
+}
+
+bool SrtpTransport::IsExternalAuthActive() const {
+ if (!IsSrtpActive()) {
+ RTC_LOG(LS_WARNING)
+ << "Failed to check IsExternalAuthActive: SRTP not active";
+ return false;
+ }
+
+ RTC_CHECK(send_session_);
+ return send_session_->IsExternalAuthActive();
+}
+
+bool SrtpTransport::MaybeSetKeyParams() {
+ if (!send_crypto_suite_ || !recv_crypto_suite_) {
+ return true;
+ }
+
+ return SetRtpParams(*send_crypto_suite_, send_key_.data(),
+ static_cast<int>(send_key_.size()), std::vector<int>(),
+ *recv_crypto_suite_, recv_key_.data(),
+ static_cast<int>(recv_key_.size()), std::vector<int>());
+}
+
+bool SrtpTransport::ParseKeyParams(const std::string& key_params,
+ uint8_t* key,
+ size_t len) {
+ // example key_params: "inline:YUJDZGVmZ2hpSktMbW9QUXJzVHVWd3l6MTIzNDU2"
+
+ // Fail if key-method is wrong.
+ if (!absl::StartsWith(key_params, "inline:")) {
+ return false;
+ }
+
+ // Fail if base64 decode fails, or the key is the wrong size.
+ std::string key_b64(key_params.substr(7)), key_str;
+ if (!rtc::Base64::Decode(key_b64, rtc::Base64::DO_STRICT, &key_str,
+ nullptr) ||
+ key_str.size() != len) {
+ return false;
+ }
+
+ memcpy(key, key_str.c_str(), len);
+ // TODO(bugs.webrtc.org/8905): Switch to ZeroOnFreeBuffer for storing
+ // sensitive data.
+ rtc::ExplicitZeroMemory(&key_str[0], key_str.size());
+ return true;
+}
+
+void SrtpTransport::MaybeUpdateWritableState() {
+ bool writable = IsWritable(/*rtcp=*/true) && IsWritable(/*rtcp=*/false);
+ // Only fire the signal if the writable state changes.
+ if (writable_ != writable) {
+ writable_ = writable;
+ SendWritableState(writable_);
+ }
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/srtp_transport.h b/third_party/libwebrtc/pc/srtp_transport.h
new file mode 100644
index 0000000000..46c11ed56d
--- /dev/null
+++ b/third_party/libwebrtc/pc/srtp_transport.h
@@ -0,0 +1,176 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_SRTP_TRANSPORT_H_
+#define PC_SRTP_TRANSPORT_H_
+
+#include <stddef.h>
+
+#include <cstdint>
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "api/crypto_params.h"
+#include "api/field_trials_view.h"
+#include "api/rtc_error.h"
+#include "p2p/base/packet_transport_internal.h"
+#include "pc/rtp_transport.h"
+#include "pc/srtp_session.h"
+#include "rtc_base/async_packet_socket.h"
+#include "rtc_base/buffer.h"
+#include "rtc_base/copy_on_write_buffer.h"
+#include "rtc_base/network_route.h"
+
+namespace webrtc {
+
+// This subclass of the RtpTransport is used for SRTP which is reponsible for
+// protecting/unprotecting the packets. It provides interfaces to set the crypto
+// parameters for the SrtpSession underneath.
+class SrtpTransport : public RtpTransport {
+ public:
+ SrtpTransport(bool rtcp_mux_enabled, const FieldTrialsView& field_trials);
+
+ virtual ~SrtpTransport() = default;
+
+ virtual RTCError SetSrtpSendKey(const cricket::CryptoParams& params);
+ virtual RTCError SetSrtpReceiveKey(const cricket::CryptoParams& params);
+
+ bool SendRtpPacket(rtc::CopyOnWriteBuffer* packet,
+ const rtc::PacketOptions& options,
+ int flags) override;
+
+ bool SendRtcpPacket(rtc::CopyOnWriteBuffer* packet,
+ const rtc::PacketOptions& options,
+ int flags) override;
+
+ // The transport becomes active if the send_session_ and recv_session_ are
+ // created.
+ bool IsSrtpActive() const override;
+
+ bool IsWritable(bool rtcp) const override;
+
+ // Create new send/recv sessions and set the negotiated crypto keys for RTP
+ // packet encryption. The keys can either come from SDES negotiation or DTLS
+ // handshake.
+ bool SetRtpParams(int send_crypto_suite,
+ const uint8_t* send_key,
+ int send_key_len,
+ const std::vector<int>& send_extension_ids,
+ int recv_crypto_suite,
+ const uint8_t* recv_key,
+ int recv_key_len,
+ const std::vector<int>& recv_extension_ids);
+
+ // Create new send/recv sessions and set the negotiated crypto keys for RTCP
+ // packet encryption. The keys can either come from SDES negotiation or DTLS
+ // handshake.
+ bool SetRtcpParams(int send_crypto_suite,
+ const uint8_t* send_key,
+ int send_key_len,
+ const std::vector<int>& send_extension_ids,
+ int recv_crypto_suite,
+ const uint8_t* recv_key,
+ int recv_key_len,
+ const std::vector<int>& recv_extension_ids);
+
+ void ResetParams();
+
+ // If external auth is enabled, SRTP will write a dummy auth tag that then
+ // later must get replaced before the packet is sent out. Only supported for
+ // non-GCM crypto suites and can be checked through "IsExternalAuthActive"
+ // if it is actually used. This method is only valid before the RTP params
+ // have been set.
+ void EnableExternalAuth();
+ bool IsExternalAuthEnabled() const;
+
+ // A SrtpTransport supports external creation of the auth tag if a non-GCM
+ // cipher is used. This method is only valid after the RTP params have
+ // been set.
+ bool IsExternalAuthActive() const;
+
+ // Returns srtp overhead for rtp packets.
+ bool GetSrtpOverhead(int* srtp_overhead) const;
+
+ // Returns rtp auth params from srtp context.
+ bool GetRtpAuthParams(uint8_t** key, int* key_len, int* tag_len);
+
+ // Cache RTP Absoulute SendTime extension header ID. This is only used when
+ // external authentication is enabled.
+ void CacheRtpAbsSendTimeHeaderExtension(int rtp_abs_sendtime_extn_id) {
+ rtp_abs_sendtime_extn_id_ = rtp_abs_sendtime_extn_id;
+ }
+
+ protected:
+ // If the writable state changed, fire the SignalWritableState.
+ void MaybeUpdateWritableState();
+
+ private:
+ void ConnectToRtpTransport();
+ void CreateSrtpSessions();
+
+ void OnRtpPacketReceived(rtc::CopyOnWriteBuffer packet,
+ int64_t packet_time_us) override;
+ void OnRtcpPacketReceived(rtc::CopyOnWriteBuffer packet,
+ int64_t packet_time_us) override;
+ void OnNetworkRouteChanged(
+ absl::optional<rtc::NetworkRoute> network_route) override;
+
+ // Override the RtpTransport::OnWritableState.
+ void OnWritableState(rtc::PacketTransportInternal* packet_transport) override;
+
+ bool ProtectRtp(void* data, int in_len, int max_len, int* out_len);
+
+ // Overloaded version, outputs packet index.
+ bool ProtectRtp(void* data,
+ int in_len,
+ int max_len,
+ int* out_len,
+ int64_t* index);
+ bool ProtectRtcp(void* data, int in_len, int max_len, int* out_len);
+
+ // Decrypts/verifies an invidiual RTP/RTCP packet.
+ // If an HMAC is used, this will decrease the packet size.
+ bool UnprotectRtp(void* data, int in_len, int* out_len);
+
+ bool UnprotectRtcp(void* data, int in_len, int* out_len);
+
+ bool MaybeSetKeyParams();
+ bool ParseKeyParams(const std::string& key_params, uint8_t* key, size_t len);
+
+ const std::string content_name_;
+
+ std::unique_ptr<cricket::SrtpSession> send_session_;
+ std::unique_ptr<cricket::SrtpSession> recv_session_;
+ std::unique_ptr<cricket::SrtpSession> send_rtcp_session_;
+ std::unique_ptr<cricket::SrtpSession> recv_rtcp_session_;
+
+ absl::optional<cricket::CryptoParams> send_params_;
+ absl::optional<cricket::CryptoParams> recv_params_;
+ absl::optional<int> send_crypto_suite_;
+ absl::optional<int> recv_crypto_suite_;
+ rtc::ZeroOnFreeBuffer<uint8_t> send_key_;
+ rtc::ZeroOnFreeBuffer<uint8_t> recv_key_;
+
+ bool writable_ = false;
+
+ bool external_auth_enabled_ = false;
+
+ int rtp_abs_sendtime_extn_id_ = -1;
+
+ int decryption_failure_count_ = 0;
+
+ const FieldTrialsView& field_trials_;
+};
+
+} // namespace webrtc
+
+#endif // PC_SRTP_TRANSPORT_H_
diff --git a/third_party/libwebrtc/pc/srtp_transport_unittest.cc b/third_party/libwebrtc/pc/srtp_transport_unittest.cc
new file mode 100644
index 0000000000..ac8be8762b
--- /dev/null
+++ b/third_party/libwebrtc/pc/srtp_transport_unittest.cc
@@ -0,0 +1,428 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/srtp_transport.h"
+
+#include <string.h>
+
+#include <vector>
+
+#include "call/rtp_demuxer.h"
+#include "media/base/fake_rtp.h"
+#include "p2p/base/dtls_transport_internal.h"
+#include "p2p/base/fake_packet_transport.h"
+#include "pc/test/rtp_transport_test_util.h"
+#include "pc/test/srtp_test_util.h"
+#include "rtc_base/async_packet_socket.h"
+#include "rtc_base/byte_order.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/containers/flat_set.h"
+#include "rtc_base/ssl_stream_adapter.h"
+#include "rtc_base/third_party/sigslot/sigslot.h"
+#include "test/gtest.h"
+#include "test/scoped_key_value_config.h"
+
+using rtc::kSrtpAeadAes128Gcm;
+using rtc::kTestKey1;
+using rtc::kTestKey2;
+using rtc::kTestKeyLen;
+
+namespace webrtc {
+static const uint8_t kTestKeyGcm128_1[] = "ABCDEFGHIJKLMNOPQRSTUVWXYZ12";
+static const uint8_t kTestKeyGcm128_2[] = "21ZYXWVUTSRQPONMLKJIHGFEDCBA";
+static const int kTestKeyGcm128Len = 28; // 128 bits key + 96 bits salt.
+static const uint8_t kTestKeyGcm256_1[] =
+ "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqr";
+static const uint8_t kTestKeyGcm256_2[] =
+ "rqponmlkjihgfedcbaZYXWVUTSRQPONMLKJIHGFEDCBA";
+static const int kTestKeyGcm256Len = 44; // 256 bits key + 96 bits salt.
+
+class SrtpTransportTest : public ::testing::Test, public sigslot::has_slots<> {
+ protected:
+ SrtpTransportTest() {
+ bool rtcp_mux_enabled = true;
+
+ rtp_packet_transport1_ =
+ std::make_unique<rtc::FakePacketTransport>("fake_packet_transport1");
+ rtp_packet_transport2_ =
+ std::make_unique<rtc::FakePacketTransport>("fake_packet_transport2");
+
+ bool asymmetric = false;
+ rtp_packet_transport1_->SetDestination(rtp_packet_transport2_.get(),
+ asymmetric);
+
+ srtp_transport1_ =
+ std::make_unique<SrtpTransport>(rtcp_mux_enabled, field_trials_);
+ srtp_transport2_ =
+ std::make_unique<SrtpTransport>(rtcp_mux_enabled, field_trials_);
+
+ srtp_transport1_->SetRtpPacketTransport(rtp_packet_transport1_.get());
+ srtp_transport2_->SetRtpPacketTransport(rtp_packet_transport2_.get());
+
+ srtp_transport1_->SubscribeRtcpPacketReceived(
+ &rtp_sink1_,
+ [this](rtc::CopyOnWriteBuffer* buffer, int64_t packet_time_ms) {
+ rtp_sink1_.OnRtcpPacketReceived(buffer, packet_time_ms);
+ });
+ srtp_transport2_->SubscribeRtcpPacketReceived(
+ &rtp_sink2_,
+ [this](rtc::CopyOnWriteBuffer* buffer, int64_t packet_time_ms) {
+ rtp_sink2_.OnRtcpPacketReceived(buffer, packet_time_ms);
+ });
+
+ RtpDemuxerCriteria demuxer_criteria;
+ // 0x00 is the payload type used in kPcmuFrame.
+ demuxer_criteria.payload_types().insert(0x00);
+
+ srtp_transport1_->RegisterRtpDemuxerSink(demuxer_criteria, &rtp_sink1_);
+ srtp_transport2_->RegisterRtpDemuxerSink(demuxer_criteria, &rtp_sink2_);
+ }
+
+ ~SrtpTransportTest() {
+ if (srtp_transport1_) {
+ srtp_transport1_->UnregisterRtpDemuxerSink(&rtp_sink1_);
+ }
+ if (srtp_transport2_) {
+ srtp_transport2_->UnregisterRtpDemuxerSink(&rtp_sink2_);
+ }
+ }
+
+ // With external auth enabled, SRTP doesn't write the auth tag and
+ // unprotect would fail. Check accessing the information about the
+ // tag instead, similar to what the actual code would do that relies
+ // on external auth.
+ void TestRtpAuthParams(SrtpTransport* transport, const std::string& cs) {
+ int overhead;
+ EXPECT_TRUE(transport->GetSrtpOverhead(&overhead));
+ switch (rtc::SrtpCryptoSuiteFromName(cs)) {
+ case rtc::kSrtpAes128CmSha1_32:
+ EXPECT_EQ(32 / 8, overhead); // 32-bit tag.
+ break;
+ case rtc::kSrtpAes128CmSha1_80:
+ EXPECT_EQ(80 / 8, overhead); // 80-bit tag.
+ break;
+ default:
+ RTC_DCHECK_NOTREACHED();
+ break;
+ }
+
+ uint8_t* auth_key = nullptr;
+ int key_len = 0;
+ int tag_len = 0;
+ EXPECT_TRUE(transport->GetRtpAuthParams(&auth_key, &key_len, &tag_len));
+ EXPECT_NE(nullptr, auth_key);
+ EXPECT_EQ(160 / 8, key_len); // Length of SHA-1 is 160 bits.
+ EXPECT_EQ(overhead, tag_len);
+ }
+
+ void TestSendRecvRtpPacket(const std::string& cipher_suite_name) {
+ size_t rtp_len = sizeof(kPcmuFrame);
+ size_t packet_size = rtp_len + rtc::rtp_auth_tag_len(cipher_suite_name);
+ rtc::Buffer rtp_packet_buffer(packet_size);
+ char* rtp_packet_data = rtp_packet_buffer.data<char>();
+ memcpy(rtp_packet_data, kPcmuFrame, rtp_len);
+ // In order to be able to run this test function multiple times we can not
+ // use the same sequence number twice. Increase the sequence number by one.
+ rtc::SetBE16(reinterpret_cast<uint8_t*>(rtp_packet_data) + 2,
+ ++sequence_number_);
+ rtc::CopyOnWriteBuffer rtp_packet1to2(rtp_packet_data, rtp_len,
+ packet_size);
+ rtc::CopyOnWriteBuffer rtp_packet2to1(rtp_packet_data, rtp_len,
+ packet_size);
+
+ char original_rtp_data[sizeof(kPcmuFrame)];
+ memcpy(original_rtp_data, rtp_packet_data, rtp_len);
+
+ rtc::PacketOptions options;
+ // Send a packet from `srtp_transport1_` to `srtp_transport2_` and verify
+ // that the packet can be successfully received and decrypted.
+ ASSERT_TRUE(srtp_transport1_->SendRtpPacket(&rtp_packet1to2, options,
+ cricket::PF_SRTP_BYPASS));
+ if (srtp_transport1_->IsExternalAuthActive()) {
+ TestRtpAuthParams(srtp_transport1_.get(), cipher_suite_name);
+ } else {
+ ASSERT_TRUE(rtp_sink2_.last_recv_rtp_packet().data());
+ EXPECT_EQ(0, memcmp(rtp_sink2_.last_recv_rtp_packet().data(),
+ original_rtp_data, rtp_len));
+ // Get the encrypted packet from underneath packet transport and verify
+ // the data is actually encrypted.
+ auto fake_rtp_packet_transport = static_cast<rtc::FakePacketTransport*>(
+ srtp_transport1_->rtp_packet_transport());
+ EXPECT_NE(0, memcmp(fake_rtp_packet_transport->last_sent_packet()->data(),
+ original_rtp_data, rtp_len));
+ }
+
+ // Do the same thing in the opposite direction;
+ ASSERT_TRUE(srtp_transport2_->SendRtpPacket(&rtp_packet2to1, options,
+ cricket::PF_SRTP_BYPASS));
+ if (srtp_transport2_->IsExternalAuthActive()) {
+ TestRtpAuthParams(srtp_transport2_.get(), cipher_suite_name);
+ } else {
+ ASSERT_TRUE(rtp_sink1_.last_recv_rtp_packet().data());
+ EXPECT_EQ(0, memcmp(rtp_sink1_.last_recv_rtp_packet().data(),
+ original_rtp_data, rtp_len));
+ auto fake_rtp_packet_transport = static_cast<rtc::FakePacketTransport*>(
+ srtp_transport2_->rtp_packet_transport());
+ EXPECT_NE(0, memcmp(fake_rtp_packet_transport->last_sent_packet()->data(),
+ original_rtp_data, rtp_len));
+ }
+ }
+
+ void TestSendRecvRtcpPacket(const std::string& cipher_suite_name) {
+ size_t rtcp_len = sizeof(::kRtcpReport);
+ size_t packet_size =
+ rtcp_len + 4 + rtc::rtcp_auth_tag_len(cipher_suite_name);
+ rtc::Buffer rtcp_packet_buffer(packet_size);
+ char* rtcp_packet_data = rtcp_packet_buffer.data<char>();
+ memcpy(rtcp_packet_data, ::kRtcpReport, rtcp_len);
+
+ rtc::CopyOnWriteBuffer rtcp_packet1to2(rtcp_packet_data, rtcp_len,
+ packet_size);
+ rtc::CopyOnWriteBuffer rtcp_packet2to1(rtcp_packet_data, rtcp_len,
+ packet_size);
+
+ rtc::PacketOptions options;
+ // Send a packet from `srtp_transport1_` to `srtp_transport2_` and verify
+ // that the packet can be successfully received and decrypted.
+ ASSERT_TRUE(srtp_transport1_->SendRtcpPacket(&rtcp_packet1to2, options,
+ cricket::PF_SRTP_BYPASS));
+ ASSERT_TRUE(rtp_sink2_.last_recv_rtcp_packet().data());
+ EXPECT_EQ(0, memcmp(rtp_sink2_.last_recv_rtcp_packet().data(),
+ rtcp_packet_data, rtcp_len));
+ // Get the encrypted packet from underneath packet transport and verify the
+ // data is actually encrypted.
+ auto fake_rtp_packet_transport = static_cast<rtc::FakePacketTransport*>(
+ srtp_transport1_->rtp_packet_transport());
+ EXPECT_NE(0, memcmp(fake_rtp_packet_transport->last_sent_packet()->data(),
+ rtcp_packet_data, rtcp_len));
+
+ // Do the same thing in the opposite direction;
+ ASSERT_TRUE(srtp_transport2_->SendRtcpPacket(&rtcp_packet2to1, options,
+ cricket::PF_SRTP_BYPASS));
+ ASSERT_TRUE(rtp_sink1_.last_recv_rtcp_packet().data());
+ EXPECT_EQ(0, memcmp(rtp_sink1_.last_recv_rtcp_packet().data(),
+ rtcp_packet_data, rtcp_len));
+ fake_rtp_packet_transport = static_cast<rtc::FakePacketTransport*>(
+ srtp_transport2_->rtp_packet_transport());
+ EXPECT_NE(0, memcmp(fake_rtp_packet_transport->last_sent_packet()->data(),
+ rtcp_packet_data, rtcp_len));
+ }
+
+ void TestSendRecvPacket(bool enable_external_auth,
+ int cs,
+ const uint8_t* key1,
+ int key1_len,
+ const uint8_t* key2,
+ int key2_len,
+ const std::string& cipher_suite_name) {
+ EXPECT_EQ(key1_len, key2_len);
+ EXPECT_EQ(cipher_suite_name, rtc::SrtpCryptoSuiteToName(cs));
+ if (enable_external_auth) {
+ srtp_transport1_->EnableExternalAuth();
+ srtp_transport2_->EnableExternalAuth();
+ }
+ std::vector<int> extension_ids;
+ EXPECT_TRUE(srtp_transport1_->SetRtpParams(
+ cs, key1, key1_len, extension_ids, cs, key2, key2_len, extension_ids));
+ EXPECT_TRUE(srtp_transport2_->SetRtpParams(
+ cs, key2, key2_len, extension_ids, cs, key1, key1_len, extension_ids));
+ EXPECT_TRUE(srtp_transport1_->SetRtcpParams(
+ cs, key1, key1_len, extension_ids, cs, key2, key2_len, extension_ids));
+ EXPECT_TRUE(srtp_transport2_->SetRtcpParams(
+ cs, key2, key2_len, extension_ids, cs, key1, key1_len, extension_ids));
+ EXPECT_TRUE(srtp_transport1_->IsSrtpActive());
+ EXPECT_TRUE(srtp_transport2_->IsSrtpActive());
+ if (rtc::IsGcmCryptoSuite(cs)) {
+ EXPECT_FALSE(srtp_transport1_->IsExternalAuthActive());
+ EXPECT_FALSE(srtp_transport2_->IsExternalAuthActive());
+ } else if (enable_external_auth) {
+ EXPECT_TRUE(srtp_transport1_->IsExternalAuthActive());
+ EXPECT_TRUE(srtp_transport2_->IsExternalAuthActive());
+ }
+ TestSendRecvRtpPacket(cipher_suite_name);
+ TestSendRecvRtcpPacket(cipher_suite_name);
+ }
+
+ void TestSendRecvPacketWithEncryptedHeaderExtension(
+ const std::string& cs,
+ const std::vector<int>& encrypted_header_ids) {
+ size_t rtp_len = sizeof(kPcmuFrameWithExtensions);
+ size_t packet_size = rtp_len + rtc::rtp_auth_tag_len(cs);
+ rtc::Buffer rtp_packet_buffer(packet_size);
+ char* rtp_packet_data = rtp_packet_buffer.data<char>();
+ memcpy(rtp_packet_data, kPcmuFrameWithExtensions, rtp_len);
+ // In order to be able to run this test function multiple times we can not
+ // use the same sequence number twice. Increase the sequence number by one.
+ rtc::SetBE16(reinterpret_cast<uint8_t*>(rtp_packet_data) + 2,
+ ++sequence_number_);
+ rtc::CopyOnWriteBuffer rtp_packet1to2(rtp_packet_data, rtp_len,
+ packet_size);
+ rtc::CopyOnWriteBuffer rtp_packet2to1(rtp_packet_data, rtp_len,
+ packet_size);
+
+ char original_rtp_data[sizeof(kPcmuFrameWithExtensions)];
+ memcpy(original_rtp_data, rtp_packet_data, rtp_len);
+
+ rtc::PacketOptions options;
+ // Send a packet from `srtp_transport1_` to `srtp_transport2_` and verify
+ // that the packet can be successfully received and decrypted.
+ ASSERT_TRUE(srtp_transport1_->SendRtpPacket(&rtp_packet1to2, options,
+ cricket::PF_SRTP_BYPASS));
+ ASSERT_TRUE(rtp_sink2_.last_recv_rtp_packet().data());
+ EXPECT_EQ(0, memcmp(rtp_sink2_.last_recv_rtp_packet().data(),
+ original_rtp_data, rtp_len));
+ // Get the encrypted packet from underneath packet transport and verify the
+ // data and header extension are actually encrypted.
+ auto fake_rtp_packet_transport = static_cast<rtc::FakePacketTransport*>(
+ srtp_transport1_->rtp_packet_transport());
+ EXPECT_NE(0, memcmp(fake_rtp_packet_transport->last_sent_packet()->data(),
+ original_rtp_data, rtp_len));
+ CompareHeaderExtensions(
+ reinterpret_cast<const char*>(
+ fake_rtp_packet_transport->last_sent_packet()->data()),
+ fake_rtp_packet_transport->last_sent_packet()->size(),
+ original_rtp_data, rtp_len, encrypted_header_ids, false);
+
+ // Do the same thing in the opposite direction;
+ ASSERT_TRUE(srtp_transport2_->SendRtpPacket(&rtp_packet2to1, options,
+ cricket::PF_SRTP_BYPASS));
+ ASSERT_TRUE(rtp_sink1_.last_recv_rtp_packet().data());
+ EXPECT_EQ(0, memcmp(rtp_sink1_.last_recv_rtp_packet().data(),
+ original_rtp_data, rtp_len));
+ fake_rtp_packet_transport = static_cast<rtc::FakePacketTransport*>(
+ srtp_transport2_->rtp_packet_transport());
+ EXPECT_NE(0, memcmp(fake_rtp_packet_transport->last_sent_packet()->data(),
+ original_rtp_data, rtp_len));
+ CompareHeaderExtensions(
+ reinterpret_cast<const char*>(
+ fake_rtp_packet_transport->last_sent_packet()->data()),
+ fake_rtp_packet_transport->last_sent_packet()->size(),
+ original_rtp_data, rtp_len, encrypted_header_ids, false);
+ }
+
+ void TestSendRecvEncryptedHeaderExtension(int cs,
+ const uint8_t* key1,
+ int key1_len,
+ const uint8_t* key2,
+ int key2_len,
+ const std::string& cs_name) {
+ std::vector<int> encrypted_headers;
+ encrypted_headers.push_back(kHeaderExtensionIDs[0]);
+ // Don't encrypt header ids 2 and 3.
+ encrypted_headers.push_back(kHeaderExtensionIDs[1]);
+ EXPECT_EQ(key1_len, key2_len);
+ EXPECT_EQ(cs_name, rtc::SrtpCryptoSuiteToName(cs));
+ EXPECT_TRUE(srtp_transport1_->SetRtpParams(cs, key1, key1_len,
+ encrypted_headers, cs, key2,
+ key2_len, encrypted_headers));
+ EXPECT_TRUE(srtp_transport2_->SetRtpParams(cs, key2, key2_len,
+ encrypted_headers, cs, key1,
+ key1_len, encrypted_headers));
+ EXPECT_TRUE(srtp_transport1_->IsSrtpActive());
+ EXPECT_TRUE(srtp_transport2_->IsSrtpActive());
+ EXPECT_FALSE(srtp_transport1_->IsExternalAuthActive());
+ EXPECT_FALSE(srtp_transport2_->IsExternalAuthActive());
+ TestSendRecvPacketWithEncryptedHeaderExtension(cs_name, encrypted_headers);
+ }
+
+ std::unique_ptr<SrtpTransport> srtp_transport1_;
+ std::unique_ptr<SrtpTransport> srtp_transport2_;
+
+ std::unique_ptr<rtc::FakePacketTransport> rtp_packet_transport1_;
+ std::unique_ptr<rtc::FakePacketTransport> rtp_packet_transport2_;
+
+ TransportObserver rtp_sink1_;
+ TransportObserver rtp_sink2_;
+
+ int sequence_number_ = 0;
+ webrtc::test::ScopedKeyValueConfig field_trials_;
+};
+
+class SrtpTransportTestWithExternalAuth
+ : public SrtpTransportTest,
+ public ::testing::WithParamInterface<bool> {};
+
+TEST_P(SrtpTransportTestWithExternalAuth,
+ SendAndRecvPacket_AES_CM_128_HMAC_SHA1_80) {
+ bool enable_external_auth = GetParam();
+ TestSendRecvPacket(enable_external_auth, rtc::kSrtpAes128CmSha1_80, kTestKey1,
+ kTestKeyLen, kTestKey2, kTestKeyLen,
+ rtc::kCsAesCm128HmacSha1_80);
+}
+
+TEST_F(SrtpTransportTest,
+ SendAndRecvPacketWithHeaderExtension_AES_CM_128_HMAC_SHA1_80) {
+ TestSendRecvEncryptedHeaderExtension(rtc::kSrtpAes128CmSha1_80, kTestKey1,
+ kTestKeyLen, kTestKey2, kTestKeyLen,
+ rtc::kCsAesCm128HmacSha1_80);
+}
+
+TEST_P(SrtpTransportTestWithExternalAuth,
+ SendAndRecvPacket_AES_CM_128_HMAC_SHA1_32) {
+ bool enable_external_auth = GetParam();
+ TestSendRecvPacket(enable_external_auth, rtc::kSrtpAes128CmSha1_32, kTestKey1,
+ kTestKeyLen, kTestKey2, kTestKeyLen,
+ rtc::kCsAesCm128HmacSha1_32);
+}
+
+TEST_F(SrtpTransportTest,
+ SendAndRecvPacketWithHeaderExtension_AES_CM_128_HMAC_SHA1_32) {
+ TestSendRecvEncryptedHeaderExtension(rtc::kSrtpAes128CmSha1_32, kTestKey1,
+ kTestKeyLen, kTestKey2, kTestKeyLen,
+ rtc::kCsAesCm128HmacSha1_32);
+}
+
+TEST_P(SrtpTransportTestWithExternalAuth,
+ SendAndRecvPacket_kSrtpAeadAes128Gcm) {
+ bool enable_external_auth = GetParam();
+ TestSendRecvPacket(enable_external_auth, rtc::kSrtpAeadAes128Gcm,
+ kTestKeyGcm128_1, kTestKeyGcm128Len, kTestKeyGcm128_2,
+ kTestKeyGcm128Len, rtc::kCsAeadAes128Gcm);
+}
+
+TEST_F(SrtpTransportTest,
+ SendAndRecvPacketWithHeaderExtension_kSrtpAeadAes128Gcm) {
+ TestSendRecvEncryptedHeaderExtension(
+ rtc::kSrtpAeadAes128Gcm, kTestKeyGcm128_1, kTestKeyGcm128Len,
+ kTestKeyGcm128_2, kTestKeyGcm128Len, rtc::kCsAeadAes128Gcm);
+}
+
+TEST_P(SrtpTransportTestWithExternalAuth,
+ SendAndRecvPacket_kSrtpAeadAes256Gcm) {
+ bool enable_external_auth = GetParam();
+ TestSendRecvPacket(enable_external_auth, rtc::kSrtpAeadAes256Gcm,
+ kTestKeyGcm256_1, kTestKeyGcm256Len, kTestKeyGcm256_2,
+ kTestKeyGcm256Len, rtc::kCsAeadAes256Gcm);
+}
+
+TEST_F(SrtpTransportTest,
+ SendAndRecvPacketWithHeaderExtension_kSrtpAeadAes256Gcm) {
+ TestSendRecvEncryptedHeaderExtension(
+ rtc::kSrtpAeadAes256Gcm, kTestKeyGcm256_1, kTestKeyGcm256Len,
+ kTestKeyGcm256_2, kTestKeyGcm256Len, rtc::kCsAeadAes256Gcm);
+}
+
+// Run all tests both with and without external auth enabled.
+INSTANTIATE_TEST_SUITE_P(ExternalAuth,
+ SrtpTransportTestWithExternalAuth,
+ ::testing::Values(true, false));
+
+// Test directly setting the params with bogus keys.
+TEST_F(SrtpTransportTest, TestSetParamsKeyTooShort) {
+ std::vector<int> extension_ids;
+ EXPECT_FALSE(srtp_transport1_->SetRtpParams(
+ rtc::kSrtpAes128CmSha1_80, kTestKey1, kTestKeyLen - 1, extension_ids,
+ rtc::kSrtpAes128CmSha1_80, kTestKey1, kTestKeyLen - 1, extension_ids));
+ EXPECT_FALSE(srtp_transport1_->SetRtcpParams(
+ rtc::kSrtpAes128CmSha1_80, kTestKey1, kTestKeyLen - 1, extension_ids,
+ rtc::kSrtpAes128CmSha1_80, kTestKey1, kTestKeyLen - 1, extension_ids));
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/stream_collection.h b/third_party/libwebrtc/pc/stream_collection.h
new file mode 100644
index 0000000000..f0f3f07b4b
--- /dev/null
+++ b/third_party/libwebrtc/pc/stream_collection.h
@@ -0,0 +1,101 @@
+/*
+ * Copyright 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_STREAM_COLLECTION_H_
+#define PC_STREAM_COLLECTION_H_
+
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "api/peer_connection_interface.h"
+
+namespace webrtc {
+
+// Implementation of StreamCollection.
+class StreamCollection : public StreamCollectionInterface {
+ public:
+ static rtc::scoped_refptr<StreamCollection> Create() {
+ return rtc::make_ref_counted<StreamCollection>();
+ }
+
+ static rtc::scoped_refptr<StreamCollection> Create(
+ StreamCollection* streams) {
+ return rtc::make_ref_counted<StreamCollection>(streams);
+ }
+
+ virtual size_t count() { return media_streams_.size(); }
+
+ virtual MediaStreamInterface* at(size_t index) {
+ return media_streams_.at(index).get();
+ }
+
+ virtual MediaStreamInterface* find(const std::string& id) {
+ for (StreamVector::iterator it = media_streams_.begin();
+ it != media_streams_.end(); ++it) {
+ if ((*it)->id().compare(id) == 0) {
+ return (*it).get();
+ }
+ }
+ return NULL;
+ }
+
+ virtual MediaStreamTrackInterface* FindAudioTrack(const std::string& id) {
+ for (size_t i = 0; i < media_streams_.size(); ++i) {
+ MediaStreamTrackInterface* track =
+ media_streams_[i]->FindAudioTrack(id).get();
+ if (track) {
+ return track;
+ }
+ }
+ return NULL;
+ }
+
+ virtual MediaStreamTrackInterface* FindVideoTrack(const std::string& id) {
+ for (size_t i = 0; i < media_streams_.size(); ++i) {
+ MediaStreamTrackInterface* track =
+ media_streams_[i]->FindVideoTrack(id).get();
+ if (track) {
+ return track;
+ }
+ }
+ return NULL;
+ }
+
+ void AddStream(rtc::scoped_refptr<MediaStreamInterface> stream) {
+ for (StreamVector::iterator it = media_streams_.begin();
+ it != media_streams_.end(); ++it) {
+ if ((*it)->id().compare(stream->id()) == 0)
+ return;
+ }
+ media_streams_.push_back(std::move(stream));
+ }
+
+ void RemoveStream(MediaStreamInterface* remove_stream) {
+ for (StreamVector::iterator it = media_streams_.begin();
+ it != media_streams_.end(); ++it) {
+ if ((*it)->id().compare(remove_stream->id()) == 0) {
+ media_streams_.erase(it);
+ break;
+ }
+ }
+ }
+
+ protected:
+ StreamCollection() {}
+ explicit StreamCollection(StreamCollection* original)
+ : media_streams_(original->media_streams_) {}
+ typedef std::vector<rtc::scoped_refptr<MediaStreamInterface> > StreamVector;
+ StreamVector media_streams_;
+};
+
+} // namespace webrtc
+
+#endif // PC_STREAM_COLLECTION_H_
diff --git a/third_party/libwebrtc/pc/test/DEPS b/third_party/libwebrtc/pc/test/DEPS
new file mode 100644
index 0000000000..33e6d94b25
--- /dev/null
+++ b/third_party/libwebrtc/pc/test/DEPS
@@ -0,0 +1,5 @@
+include_rules = [
+ # Allow include of sdk/android to allow accessing the JVM and Env in tests.
+ "+sdk/android",
+ "+modules/utility/include/jvm_android.h",
+]
diff --git a/third_party/libwebrtc/pc/test/android_test_initializer.cc b/third_party/libwebrtc/pc/test/android_test_initializer.cc
new file mode 100644
index 0000000000..963544cb4b
--- /dev/null
+++ b/third_party/libwebrtc/pc/test/android_test_initializer.cc
@@ -0,0 +1,51 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/test/android_test_initializer.h"
+
+#include <jni.h>
+#include <pthread.h>
+#include <stddef.h>
+
+#include "modules/utility/include/jvm_android.h"
+#include "rtc_base/checks.h"
+#include "sdk/android/src/jni/jvm.h"
+// TODO(phoglund): This include is to a target we can't really depend on.
+// We need to either break it out into a smaller target or find some way to
+// not use it.
+#include "rtc_base/ssl_adapter.h"
+
+namespace webrtc {
+
+namespace {
+
+static pthread_once_t g_initialize_once = PTHREAD_ONCE_INIT;
+
+// There can only be one JNI_OnLoad in each binary. So since this is a GTEST
+// C++ runner binary, we want to initialize the same global objects we normally
+// do if this had been a Java binary.
+void EnsureInitializedOnce() {
+ RTC_CHECK(::webrtc::jni::GetJVM() != nullptr);
+ JNIEnv* jni = ::webrtc::jni::AttachCurrentThreadIfNeeded();
+ JavaVM* jvm = NULL;
+ RTC_CHECK_EQ(0, jni->GetJavaVM(&jvm));
+
+ RTC_CHECK(rtc::InitializeSSL()) << "Failed to InitializeSSL()";
+
+ webrtc::JVM::Initialize(jvm);
+}
+
+} // anonymous namespace
+
+void InitializeAndroidObjects() {
+ RTC_CHECK_EQ(0, pthread_once(&g_initialize_once, &EnsureInitializedOnce));
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/test/android_test_initializer.h b/third_party/libwebrtc/pc/test/android_test_initializer.h
new file mode 100644
index 0000000000..4181dd286c
--- /dev/null
+++ b/third_party/libwebrtc/pc/test/android_test_initializer.h
@@ -0,0 +1,20 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_TEST_ANDROID_TEST_INITIALIZER_H_
+#define PC_TEST_ANDROID_TEST_INITIALIZER_H_
+
+namespace webrtc {
+
+void InitializeAndroidObjects();
+
+} // namespace webrtc
+
+#endif // PC_TEST_ANDROID_TEST_INITIALIZER_H_
diff --git a/third_party/libwebrtc/pc/test/fake_audio_capture_module.cc b/third_party/libwebrtc/pc/test/fake_audio_capture_module.cc
new file mode 100644
index 0000000000..6ffa18c886
--- /dev/null
+++ b/third_party/libwebrtc/pc/test/fake_audio_capture_module.cc
@@ -0,0 +1,519 @@
+/*
+ * Copyright 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/test/fake_audio_capture_module.h"
+
+#include <string.h>
+
+#include "api/make_ref_counted.h"
+#include "api/units/time_delta.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/thread.h"
+#include "rtc_base/time_utils.h"
+
+using ::webrtc::TimeDelta;
+
+// Audio sample value that is high enough that it doesn't occur naturally when
+// frames are being faked. E.g. NetEq will not generate this large sample value
+// unless it has received an audio frame containing a sample of this value.
+// Even simpler buffers would likely just contain audio sample values of 0.
+static const int kHighSampleValue = 10000;
+
+// Constants here are derived by running VoE using a real ADM.
+// The constants correspond to 10ms of mono audio at 44kHz.
+static const int kTimePerFrameMs = 10;
+static const uint8_t kNumberOfChannels = 1;
+static const int kSamplesPerSecond = 44000;
+static const int kTotalDelayMs = 0;
+static const int kClockDriftMs = 0;
+static const uint32_t kMaxVolume = 14392;
+
+FakeAudioCaptureModule::FakeAudioCaptureModule()
+ : audio_callback_(nullptr),
+ recording_(false),
+ playing_(false),
+ play_is_initialized_(false),
+ rec_is_initialized_(false),
+ current_mic_level_(kMaxVolume),
+ started_(false),
+ next_frame_time_(0),
+ frames_received_(0) {}
+
+FakeAudioCaptureModule::~FakeAudioCaptureModule() {
+ if (process_thread_) {
+ process_thread_->Stop();
+ }
+}
+
+rtc::scoped_refptr<FakeAudioCaptureModule> FakeAudioCaptureModule::Create() {
+ auto capture_module = rtc::make_ref_counted<FakeAudioCaptureModule>();
+ if (!capture_module->Initialize()) {
+ return nullptr;
+ }
+ return capture_module;
+}
+
+int FakeAudioCaptureModule::frames_received() const {
+ webrtc::MutexLock lock(&mutex_);
+ return frames_received_;
+}
+
+int32_t FakeAudioCaptureModule::ActiveAudioLayer(
+ AudioLayer* /*audio_layer*/) const {
+ RTC_DCHECK_NOTREACHED();
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::RegisterAudioCallback(
+ webrtc::AudioTransport* audio_callback) {
+ webrtc::MutexLock lock(&mutex_);
+ audio_callback_ = audio_callback;
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::Init() {
+ // Initialize is called by the factory method. Safe to ignore this Init call.
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::Terminate() {
+ // Clean up in the destructor. No action here, just success.
+ return 0;
+}
+
+bool FakeAudioCaptureModule::Initialized() const {
+ RTC_DCHECK_NOTREACHED();
+ return 0;
+}
+
+int16_t FakeAudioCaptureModule::PlayoutDevices() {
+ RTC_DCHECK_NOTREACHED();
+ return 0;
+}
+
+int16_t FakeAudioCaptureModule::RecordingDevices() {
+ RTC_DCHECK_NOTREACHED();
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::PlayoutDeviceName(
+ uint16_t /*index*/,
+ char /*name*/[webrtc::kAdmMaxDeviceNameSize],
+ char /*guid*/[webrtc::kAdmMaxGuidSize]) {
+ RTC_DCHECK_NOTREACHED();
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::RecordingDeviceName(
+ uint16_t /*index*/,
+ char /*name*/[webrtc::kAdmMaxDeviceNameSize],
+ char /*guid*/[webrtc::kAdmMaxGuidSize]) {
+ RTC_DCHECK_NOTREACHED();
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::SetPlayoutDevice(uint16_t /*index*/) {
+ // No playout device, just playing from file. Return success.
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::SetPlayoutDevice(WindowsDeviceType /*device*/) {
+ if (play_is_initialized_) {
+ return -1;
+ }
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::SetRecordingDevice(uint16_t /*index*/) {
+ // No recording device, just dropping audio. Return success.
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::SetRecordingDevice(
+ WindowsDeviceType /*device*/) {
+ if (rec_is_initialized_) {
+ return -1;
+ }
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::PlayoutIsAvailable(bool* /*available*/) {
+ RTC_DCHECK_NOTREACHED();
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::InitPlayout() {
+ play_is_initialized_ = true;
+ return 0;
+}
+
+bool FakeAudioCaptureModule::PlayoutIsInitialized() const {
+ return play_is_initialized_;
+}
+
+int32_t FakeAudioCaptureModule::RecordingIsAvailable(bool* /*available*/) {
+ RTC_DCHECK_NOTREACHED();
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::InitRecording() {
+ rec_is_initialized_ = true;
+ return 0;
+}
+
+bool FakeAudioCaptureModule::RecordingIsInitialized() const {
+ return rec_is_initialized_;
+}
+
+int32_t FakeAudioCaptureModule::StartPlayout() {
+ if (!play_is_initialized_) {
+ return -1;
+ }
+ {
+ webrtc::MutexLock lock(&mutex_);
+ playing_ = true;
+ }
+ bool start = true;
+ UpdateProcessing(start);
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::StopPlayout() {
+ bool start = false;
+ {
+ webrtc::MutexLock lock(&mutex_);
+ playing_ = false;
+ start = ShouldStartProcessing();
+ }
+ UpdateProcessing(start);
+ return 0;
+}
+
+bool FakeAudioCaptureModule::Playing() const {
+ webrtc::MutexLock lock(&mutex_);
+ return playing_;
+}
+
+int32_t FakeAudioCaptureModule::StartRecording() {
+ if (!rec_is_initialized_) {
+ return -1;
+ }
+ {
+ webrtc::MutexLock lock(&mutex_);
+ recording_ = true;
+ }
+ bool start = true;
+ UpdateProcessing(start);
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::StopRecording() {
+ bool start = false;
+ {
+ webrtc::MutexLock lock(&mutex_);
+ recording_ = false;
+ start = ShouldStartProcessing();
+ }
+ UpdateProcessing(start);
+ return 0;
+}
+
+bool FakeAudioCaptureModule::Recording() const {
+ webrtc::MutexLock lock(&mutex_);
+ return recording_;
+}
+
+int32_t FakeAudioCaptureModule::InitSpeaker() {
+ // No speaker, just playing from file. Return success.
+ return 0;
+}
+
+bool FakeAudioCaptureModule::SpeakerIsInitialized() const {
+ RTC_DCHECK_NOTREACHED();
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::InitMicrophone() {
+ // No microphone, just playing from file. Return success.
+ return 0;
+}
+
+bool FakeAudioCaptureModule::MicrophoneIsInitialized() const {
+ RTC_DCHECK_NOTREACHED();
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::SpeakerVolumeIsAvailable(bool* /*available*/) {
+ RTC_DCHECK_NOTREACHED();
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::SetSpeakerVolume(uint32_t /*volume*/) {
+ RTC_DCHECK_NOTREACHED();
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::SpeakerVolume(uint32_t* /*volume*/) const {
+ RTC_DCHECK_NOTREACHED();
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::MaxSpeakerVolume(
+ uint32_t* /*max_volume*/) const {
+ RTC_DCHECK_NOTREACHED();
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::MinSpeakerVolume(
+ uint32_t* /*min_volume*/) const {
+ RTC_DCHECK_NOTREACHED();
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::MicrophoneVolumeIsAvailable(
+ bool* /*available*/) {
+ RTC_DCHECK_NOTREACHED();
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::SetMicrophoneVolume(uint32_t volume) {
+ webrtc::MutexLock lock(&mutex_);
+ current_mic_level_ = volume;
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::MicrophoneVolume(uint32_t* volume) const {
+ webrtc::MutexLock lock(&mutex_);
+ *volume = current_mic_level_;
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::MaxMicrophoneVolume(
+ uint32_t* max_volume) const {
+ *max_volume = kMaxVolume;
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::MinMicrophoneVolume(
+ uint32_t* /*min_volume*/) const {
+ RTC_DCHECK_NOTREACHED();
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::SpeakerMuteIsAvailable(bool* /*available*/) {
+ RTC_DCHECK_NOTREACHED();
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::SetSpeakerMute(bool /*enable*/) {
+ RTC_DCHECK_NOTREACHED();
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::SpeakerMute(bool* /*enabled*/) const {
+ RTC_DCHECK_NOTREACHED();
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::MicrophoneMuteIsAvailable(bool* /*available*/) {
+ RTC_DCHECK_NOTREACHED();
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::SetMicrophoneMute(bool /*enable*/) {
+ RTC_DCHECK_NOTREACHED();
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::MicrophoneMute(bool* /*enabled*/) const {
+ RTC_DCHECK_NOTREACHED();
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::StereoPlayoutIsAvailable(
+ bool* available) const {
+ // No recording device, just dropping audio. Stereo can be dropped just
+ // as easily as mono.
+ *available = true;
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::SetStereoPlayout(bool /*enable*/) {
+ // No recording device, just dropping audio. Stereo can be dropped just
+ // as easily as mono.
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::StereoPlayout(bool* /*enabled*/) const {
+ RTC_DCHECK_NOTREACHED();
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::StereoRecordingIsAvailable(
+ bool* available) const {
+ // Keep thing simple. No stereo recording.
+ *available = false;
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::SetStereoRecording(bool enable) {
+ if (!enable) {
+ return 0;
+ }
+ return -1;
+}
+
+int32_t FakeAudioCaptureModule::StereoRecording(bool* /*enabled*/) const {
+ RTC_DCHECK_NOTREACHED();
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::PlayoutDelay(uint16_t* delay_ms) const {
+ // No delay since audio frames are dropped.
+ *delay_ms = 0;
+ return 0;
+}
+
+bool FakeAudioCaptureModule::Initialize() {
+ // Set the send buffer samples high enough that it would not occur on the
+ // remote side unless a packet containing a sample of that magnitude has been
+ // sent to it. Note that the audio processing pipeline will likely distort the
+ // original signal.
+ SetSendBuffer(kHighSampleValue);
+ return true;
+}
+
+void FakeAudioCaptureModule::SetSendBuffer(int value) {
+ Sample* buffer_ptr = reinterpret_cast<Sample*>(send_buffer_);
+ const size_t buffer_size_in_samples =
+ sizeof(send_buffer_) / kNumberBytesPerSample;
+ for (size_t i = 0; i < buffer_size_in_samples; ++i) {
+ buffer_ptr[i] = value;
+ }
+}
+
+void FakeAudioCaptureModule::ResetRecBuffer() {
+ memset(rec_buffer_, 0, sizeof(rec_buffer_));
+}
+
+bool FakeAudioCaptureModule::CheckRecBuffer(int value) {
+ const Sample* buffer_ptr = reinterpret_cast<const Sample*>(rec_buffer_);
+ const size_t buffer_size_in_samples =
+ sizeof(rec_buffer_) / kNumberBytesPerSample;
+ for (size_t i = 0; i < buffer_size_in_samples; ++i) {
+ if (buffer_ptr[i] >= value)
+ return true;
+ }
+ return false;
+}
+
+bool FakeAudioCaptureModule::ShouldStartProcessing() {
+ return recording_ || playing_;
+}
+
+void FakeAudioCaptureModule::UpdateProcessing(bool start) {
+ if (start) {
+ if (!process_thread_) {
+ process_thread_ = rtc::Thread::Create();
+ process_thread_->Start();
+ }
+ process_thread_->PostTask([this] { StartProcessP(); });
+ } else {
+ if (process_thread_) {
+ process_thread_->Stop();
+ process_thread_.reset(nullptr);
+ process_thread_checker_.Detach();
+ }
+ webrtc::MutexLock lock(&mutex_);
+ started_ = false;
+ }
+}
+
+void FakeAudioCaptureModule::StartProcessP() {
+ RTC_DCHECK_RUN_ON(&process_thread_checker_);
+ {
+ webrtc::MutexLock lock(&mutex_);
+ if (started_) {
+ // Already started.
+ return;
+ }
+ }
+ ProcessFrameP();
+}
+
+void FakeAudioCaptureModule::ProcessFrameP() {
+ RTC_DCHECK_RUN_ON(&process_thread_checker_);
+ {
+ webrtc::MutexLock lock(&mutex_);
+ if (!started_) {
+ next_frame_time_ = rtc::TimeMillis();
+ started_ = true;
+ }
+
+ // Receive and send frames every kTimePerFrameMs.
+ if (playing_) {
+ ReceiveFrameP();
+ }
+ if (recording_) {
+ SendFrameP();
+ }
+ }
+
+ next_frame_time_ += kTimePerFrameMs;
+ const int64_t current_time = rtc::TimeMillis();
+ const int64_t wait_time =
+ (next_frame_time_ > current_time) ? next_frame_time_ - current_time : 0;
+ process_thread_->PostDelayedTask([this] { ProcessFrameP(); },
+ TimeDelta::Millis(wait_time));
+}
+
+void FakeAudioCaptureModule::ReceiveFrameP() {
+ RTC_DCHECK_RUN_ON(&process_thread_checker_);
+ if (!audio_callback_) {
+ return;
+ }
+ ResetRecBuffer();
+ size_t nSamplesOut = 0;
+ int64_t elapsed_time_ms = 0;
+ int64_t ntp_time_ms = 0;
+ if (audio_callback_->NeedMorePlayData(kNumberSamples, kNumberBytesPerSample,
+ kNumberOfChannels, kSamplesPerSecond,
+ rec_buffer_, nSamplesOut,
+ &elapsed_time_ms, &ntp_time_ms) != 0) {
+ RTC_DCHECK_NOTREACHED();
+ }
+ RTC_CHECK(nSamplesOut == kNumberSamples);
+
+ // The SetBuffer() function ensures that after decoding, the audio buffer
+ // should contain samples of similar magnitude (there is likely to be some
+ // distortion due to the audio pipeline). If one sample is detected to
+ // have the same or greater magnitude somewhere in the frame, an actual frame
+ // has been received from the remote side (i.e. faked frames are not being
+ // pulled).
+ if (CheckRecBuffer(kHighSampleValue)) {
+ ++frames_received_;
+ }
+}
+
+void FakeAudioCaptureModule::SendFrameP() {
+ RTC_DCHECK_RUN_ON(&process_thread_checker_);
+ if (!audio_callback_) {
+ return;
+ }
+ bool key_pressed = false;
+ uint32_t current_mic_level = current_mic_level_;
+ if (audio_callback_->RecordedDataIsAvailable(
+ send_buffer_, kNumberSamples, kNumberBytesPerSample,
+ kNumberOfChannels, kSamplesPerSecond, kTotalDelayMs, kClockDriftMs,
+ current_mic_level, key_pressed, current_mic_level) != 0) {
+ RTC_DCHECK_NOTREACHED();
+ }
+ current_mic_level_ = current_mic_level;
+}
diff --git a/third_party/libwebrtc/pc/test/fake_audio_capture_module.h b/third_party/libwebrtc/pc/test/fake_audio_capture_module.h
new file mode 100644
index 0000000000..c04373cdfd
--- /dev/null
+++ b/third_party/libwebrtc/pc/test/fake_audio_capture_module.h
@@ -0,0 +1,236 @@
+/*
+ * Copyright 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This class implements an AudioCaptureModule that can be used to detect if
+// audio is being received properly if it is fed by another AudioCaptureModule
+// in some arbitrary audio pipeline where they are connected. It does not play
+// out or record any audio so it does not need access to any hardware and can
+// therefore be used in the gtest testing framework.
+
+// Note P postfix of a function indicates that it should only be called by the
+// processing thread.
+
+#ifndef PC_TEST_FAKE_AUDIO_CAPTURE_MODULE_H_
+#define PC_TEST_FAKE_AUDIO_CAPTURE_MODULE_H_
+
+#include <stddef.h>
+#include <stdint.h>
+
+#include <memory>
+
+#include "api/scoped_refptr.h"
+#include "api/sequence_checker.h"
+#include "modules/audio_device/include/audio_device.h"
+#include "modules/audio_device/include/audio_device_defines.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "rtc_base/thread_annotations.h"
+
+namespace rtc {
+class Thread;
+} // namespace rtc
+
+class FakeAudioCaptureModule : public webrtc::AudioDeviceModule {
+ public:
+ typedef uint16_t Sample;
+
+ // The value for the following constants have been derived by running VoE
+ // using a real ADM. The constants correspond to 10ms of mono audio at 44kHz.
+ static const size_t kNumberSamples = 440;
+ static const size_t kNumberBytesPerSample = sizeof(Sample);
+
+ // Creates a FakeAudioCaptureModule or returns NULL on failure.
+ static rtc::scoped_refptr<FakeAudioCaptureModule> Create();
+
+ // Returns the number of frames that have been successfully pulled by the
+ // instance. Note that correctly detecting success can only be done if the
+ // pulled frame was generated/pushed from a FakeAudioCaptureModule.
+ int frames_received() const RTC_LOCKS_EXCLUDED(mutex_);
+
+ int32_t ActiveAudioLayer(AudioLayer* audio_layer) const override;
+
+ // Note: Calling this method from a callback may result in deadlock.
+ int32_t RegisterAudioCallback(webrtc::AudioTransport* audio_callback) override
+ RTC_LOCKS_EXCLUDED(mutex_);
+
+ int32_t Init() override;
+ int32_t Terminate() override;
+ bool Initialized() const override;
+
+ int16_t PlayoutDevices() override;
+ int16_t RecordingDevices() override;
+ int32_t PlayoutDeviceName(uint16_t index,
+ char name[webrtc::kAdmMaxDeviceNameSize],
+ char guid[webrtc::kAdmMaxGuidSize]) override;
+ int32_t RecordingDeviceName(uint16_t index,
+ char name[webrtc::kAdmMaxDeviceNameSize],
+ char guid[webrtc::kAdmMaxGuidSize]) override;
+
+ int32_t SetPlayoutDevice(uint16_t index) override;
+ int32_t SetPlayoutDevice(WindowsDeviceType device) override;
+ int32_t SetRecordingDevice(uint16_t index) override;
+ int32_t SetRecordingDevice(WindowsDeviceType device) override;
+
+ int32_t PlayoutIsAvailable(bool* available) override;
+ int32_t InitPlayout() override;
+ bool PlayoutIsInitialized() const override;
+ int32_t RecordingIsAvailable(bool* available) override;
+ int32_t InitRecording() override;
+ bool RecordingIsInitialized() const override;
+
+ int32_t StartPlayout() RTC_LOCKS_EXCLUDED(mutex_) override;
+ int32_t StopPlayout() RTC_LOCKS_EXCLUDED(mutex_) override;
+ bool Playing() const RTC_LOCKS_EXCLUDED(mutex_) override;
+ int32_t StartRecording() RTC_LOCKS_EXCLUDED(mutex_) override;
+ int32_t StopRecording() RTC_LOCKS_EXCLUDED(mutex_) override;
+ bool Recording() const RTC_LOCKS_EXCLUDED(mutex_) override;
+
+ int32_t InitSpeaker() override;
+ bool SpeakerIsInitialized() const override;
+ int32_t InitMicrophone() override;
+ bool MicrophoneIsInitialized() const override;
+
+ int32_t SpeakerVolumeIsAvailable(bool* available) override;
+ int32_t SetSpeakerVolume(uint32_t volume) override;
+ int32_t SpeakerVolume(uint32_t* volume) const override;
+ int32_t MaxSpeakerVolume(uint32_t* max_volume) const override;
+ int32_t MinSpeakerVolume(uint32_t* min_volume) const override;
+
+ int32_t MicrophoneVolumeIsAvailable(bool* available) override;
+ int32_t SetMicrophoneVolume(uint32_t volume)
+ RTC_LOCKS_EXCLUDED(mutex_) override;
+ int32_t MicrophoneVolume(uint32_t* volume) const
+ RTC_LOCKS_EXCLUDED(mutex_) override;
+ int32_t MaxMicrophoneVolume(uint32_t* max_volume) const override;
+
+ int32_t MinMicrophoneVolume(uint32_t* min_volume) const override;
+
+ int32_t SpeakerMuteIsAvailable(bool* available) override;
+ int32_t SetSpeakerMute(bool enable) override;
+ int32_t SpeakerMute(bool* enabled) const override;
+
+ int32_t MicrophoneMuteIsAvailable(bool* available) override;
+ int32_t SetMicrophoneMute(bool enable) override;
+ int32_t MicrophoneMute(bool* enabled) const override;
+
+ int32_t StereoPlayoutIsAvailable(bool* available) const override;
+ int32_t SetStereoPlayout(bool enable) override;
+ int32_t StereoPlayout(bool* enabled) const override;
+ int32_t StereoRecordingIsAvailable(bool* available) const override;
+ int32_t SetStereoRecording(bool enable) override;
+ int32_t StereoRecording(bool* enabled) const override;
+
+ int32_t PlayoutDelay(uint16_t* delay_ms) const override;
+
+ bool BuiltInAECIsAvailable() const override { return false; }
+ int32_t EnableBuiltInAEC(bool enable) override { return -1; }
+ bool BuiltInAGCIsAvailable() const override { return false; }
+ int32_t EnableBuiltInAGC(bool enable) override { return -1; }
+ bool BuiltInNSIsAvailable() const override { return false; }
+ int32_t EnableBuiltInNS(bool enable) override { return -1; }
+
+ int32_t GetPlayoutUnderrunCount() const override { return -1; }
+
+ absl::optional<webrtc::AudioDeviceModule::Stats> GetStats() const override {
+ return webrtc::AudioDeviceModule::Stats();
+ }
+#if defined(WEBRTC_IOS)
+ int GetPlayoutAudioParameters(
+ webrtc::AudioParameters* params) const override {
+ return -1;
+ }
+ int GetRecordAudioParameters(webrtc::AudioParameters* params) const override {
+ return -1;
+ }
+#endif // WEBRTC_IOS
+
+ // End of functions inherited from webrtc::AudioDeviceModule.
+
+ protected:
+ // The constructor is protected because the class needs to be created as a
+ // reference counted object (for memory managment reasons). It could be
+ // exposed in which case the burden of proper instantiation would be put on
+ // the creator of a FakeAudioCaptureModule instance. To create an instance of
+ // this class use the Create(..) API.
+ FakeAudioCaptureModule();
+ // The destructor is protected because it is reference counted and should not
+ // be deleted directly.
+ virtual ~FakeAudioCaptureModule();
+
+ private:
+ // Initializes the state of the FakeAudioCaptureModule. This API is called on
+ // creation by the Create() API.
+ bool Initialize();
+ // SetBuffer() sets all samples in send_buffer_ to `value`.
+ void SetSendBuffer(int value);
+ // Resets rec_buffer_. I.e., sets all rec_buffer_ samples to 0.
+ void ResetRecBuffer();
+ // Returns true if rec_buffer_ contains one or more sample greater than or
+ // equal to `value`.
+ bool CheckRecBuffer(int value);
+
+ // Returns true/false depending on if recording or playback has been
+ // enabled/started.
+ bool ShouldStartProcessing() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
+
+ // Starts or stops the pushing and pulling of audio frames.
+ void UpdateProcessing(bool start) RTC_LOCKS_EXCLUDED(mutex_);
+
+ // Starts the periodic calling of ProcessFrame() in a thread safe way.
+ void StartProcessP();
+ // Periodcally called function that ensures that frames are pulled and pushed
+ // periodically if enabled/started.
+ void ProcessFrameP() RTC_LOCKS_EXCLUDED(mutex_);
+ // Pulls frames from the registered webrtc::AudioTransport.
+ void ReceiveFrameP() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
+ // Pushes frames to the registered webrtc::AudioTransport.
+ void SendFrameP() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
+
+ // Callback for playout and recording.
+ webrtc::AudioTransport* audio_callback_ RTC_GUARDED_BY(mutex_);
+
+ bool recording_ RTC_GUARDED_BY(
+ mutex_); // True when audio is being pushed from the instance.
+ bool playing_ RTC_GUARDED_BY(
+ mutex_); // True when audio is being pulled by the instance.
+
+ bool play_is_initialized_; // True when the instance is ready to pull audio.
+ bool rec_is_initialized_; // True when the instance is ready to push audio.
+
+ // Input to and output from RecordedDataIsAvailable(..) makes it possible to
+ // modify the current mic level. The implementation does not care about the
+ // mic level so it just feeds back what it receives.
+ uint32_t current_mic_level_ RTC_GUARDED_BY(mutex_);
+
+ // next_frame_time_ is updated in a non-drifting manner to indicate the next
+ // wall clock time the next frame should be generated and received. started_
+ // ensures that next_frame_time_ can be initialized properly on first call.
+ bool started_ RTC_GUARDED_BY(mutex_);
+ int64_t next_frame_time_ RTC_GUARDED_BY(process_thread_checker_);
+
+ std::unique_ptr<rtc::Thread> process_thread_;
+
+ // Buffer for storing samples received from the webrtc::AudioTransport.
+ char rec_buffer_[kNumberSamples * kNumberBytesPerSample];
+ // Buffer for samples to send to the webrtc::AudioTransport.
+ char send_buffer_[kNumberSamples * kNumberBytesPerSample];
+
+ // Counter of frames received that have samples of high enough amplitude to
+ // indicate that the frames are not faked somewhere in the audio pipeline
+ // (e.g. by a jitter buffer).
+ int frames_received_;
+
+ // Protects variables that are accessed from process_thread_ and
+ // the main thread.
+ mutable webrtc::Mutex mutex_;
+ webrtc::SequenceChecker process_thread_checker_{
+ webrtc::SequenceChecker::kDetached};
+};
+
+#endif // PC_TEST_FAKE_AUDIO_CAPTURE_MODULE_H_
diff --git a/third_party/libwebrtc/pc/test/fake_audio_capture_module_unittest.cc b/third_party/libwebrtc/pc/test/fake_audio_capture_module_unittest.cc
new file mode 100644
index 0000000000..64141b13a9
--- /dev/null
+++ b/third_party/libwebrtc/pc/test/fake_audio_capture_module_unittest.cc
@@ -0,0 +1,198 @@
+/*
+ * Copyright 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/test/fake_audio_capture_module.h"
+
+#include <string.h>
+
+#include <algorithm>
+
+#include "api/scoped_refptr.h"
+#include "rtc_base/gunit.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "test/gtest.h"
+
+class FakeAdmTest : public ::testing::Test, public webrtc::AudioTransport {
+ protected:
+ static const int kMsInSecond = 1000;
+
+ FakeAdmTest()
+ : push_iterations_(0), pull_iterations_(0), rec_buffer_bytes_(0) {
+ memset(rec_buffer_, 0, sizeof(rec_buffer_));
+ }
+
+ void SetUp() override {
+ fake_audio_capture_module_ = FakeAudioCaptureModule::Create();
+ EXPECT_TRUE(fake_audio_capture_module_.get() != NULL);
+ }
+
+ // Callbacks inherited from webrtc::AudioTransport.
+ // ADM is pushing data.
+ int32_t RecordedDataIsAvailable(const void* audioSamples,
+ const size_t nSamples,
+ const size_t nBytesPerSample,
+ const size_t nChannels,
+ const uint32_t samplesPerSec,
+ const uint32_t totalDelayMS,
+ const int32_t clockDrift,
+ const uint32_t currentMicLevel,
+ const bool keyPressed,
+ uint32_t& newMicLevel) override {
+ webrtc::MutexLock lock(&mutex_);
+ rec_buffer_bytes_ = nSamples * nBytesPerSample;
+ if ((rec_buffer_bytes_ == 0) ||
+ (rec_buffer_bytes_ >
+ FakeAudioCaptureModule::kNumberSamples *
+ FakeAudioCaptureModule::kNumberBytesPerSample)) {
+ ADD_FAILURE();
+ return -1;
+ }
+ memcpy(rec_buffer_, audioSamples, rec_buffer_bytes_);
+ ++push_iterations_;
+ newMicLevel = currentMicLevel;
+ return 0;
+ }
+
+ void PullRenderData(int bits_per_sample,
+ int sample_rate,
+ size_t number_of_channels,
+ size_t number_of_frames,
+ void* audio_data,
+ int64_t* elapsed_time_ms,
+ int64_t* ntp_time_ms) override {}
+
+ // ADM is pulling data.
+ int32_t NeedMorePlayData(const size_t nSamples,
+ const size_t nBytesPerSample,
+ const size_t nChannels,
+ const uint32_t samplesPerSec,
+ void* audioSamples,
+ size_t& nSamplesOut,
+ int64_t* elapsed_time_ms,
+ int64_t* ntp_time_ms) override {
+ webrtc::MutexLock lock(&mutex_);
+ ++pull_iterations_;
+ const size_t audio_buffer_size = nSamples * nBytesPerSample;
+ const size_t bytes_out =
+ RecordedDataReceived()
+ ? CopyFromRecBuffer(audioSamples, audio_buffer_size)
+ : GenerateZeroBuffer(audioSamples, audio_buffer_size);
+ nSamplesOut = bytes_out / nBytesPerSample;
+ *elapsed_time_ms = 0;
+ *ntp_time_ms = 0;
+ return 0;
+ }
+
+ int push_iterations() const {
+ webrtc::MutexLock lock(&mutex_);
+ return push_iterations_;
+ }
+ int pull_iterations() const {
+ webrtc::MutexLock lock(&mutex_);
+ return pull_iterations_;
+ }
+
+ rtc::scoped_refptr<FakeAudioCaptureModule> fake_audio_capture_module_;
+
+ private:
+ bool RecordedDataReceived() const { return rec_buffer_bytes_ != 0; }
+ size_t GenerateZeroBuffer(void* audio_buffer, size_t audio_buffer_size) {
+ memset(audio_buffer, 0, audio_buffer_size);
+ return audio_buffer_size;
+ }
+ size_t CopyFromRecBuffer(void* audio_buffer, size_t audio_buffer_size) {
+ EXPECT_EQ(audio_buffer_size, rec_buffer_bytes_);
+ const size_t min_buffer_size =
+ std::min(audio_buffer_size, rec_buffer_bytes_);
+ memcpy(audio_buffer, rec_buffer_, min_buffer_size);
+ return min_buffer_size;
+ }
+
+ rtc::AutoThread main_thread_;
+
+ mutable webrtc::Mutex mutex_;
+
+ int push_iterations_;
+ int pull_iterations_;
+
+ char rec_buffer_[FakeAudioCaptureModule::kNumberSamples *
+ FakeAudioCaptureModule::kNumberBytesPerSample];
+ size_t rec_buffer_bytes_;
+};
+
+TEST_F(FakeAdmTest, PlayoutTest) {
+ EXPECT_EQ(0, fake_audio_capture_module_->RegisterAudioCallback(this));
+
+ bool stereo_available = false;
+ EXPECT_EQ(0, fake_audio_capture_module_->StereoPlayoutIsAvailable(
+ &stereo_available));
+ EXPECT_TRUE(stereo_available);
+
+ EXPECT_NE(0, fake_audio_capture_module_->StartPlayout());
+ EXPECT_FALSE(fake_audio_capture_module_->PlayoutIsInitialized());
+ EXPECT_FALSE(fake_audio_capture_module_->Playing());
+ EXPECT_EQ(0, fake_audio_capture_module_->StopPlayout());
+
+ EXPECT_EQ(0, fake_audio_capture_module_->InitPlayout());
+ EXPECT_TRUE(fake_audio_capture_module_->PlayoutIsInitialized());
+ EXPECT_FALSE(fake_audio_capture_module_->Playing());
+
+ EXPECT_EQ(0, fake_audio_capture_module_->StartPlayout());
+ EXPECT_TRUE(fake_audio_capture_module_->Playing());
+
+ uint16_t delay_ms = 10;
+ EXPECT_EQ(0, fake_audio_capture_module_->PlayoutDelay(&delay_ms));
+ EXPECT_EQ(0, delay_ms);
+
+ EXPECT_TRUE_WAIT(pull_iterations() > 0, kMsInSecond);
+ EXPECT_GE(0, push_iterations());
+
+ EXPECT_EQ(0, fake_audio_capture_module_->StopPlayout());
+ EXPECT_FALSE(fake_audio_capture_module_->Playing());
+}
+
+TEST_F(FakeAdmTest, RecordTest) {
+ EXPECT_EQ(0, fake_audio_capture_module_->RegisterAudioCallback(this));
+
+ bool stereo_available = false;
+ EXPECT_EQ(0, fake_audio_capture_module_->StereoRecordingIsAvailable(
+ &stereo_available));
+ EXPECT_FALSE(stereo_available);
+
+ EXPECT_NE(0, fake_audio_capture_module_->StartRecording());
+ EXPECT_FALSE(fake_audio_capture_module_->Recording());
+ EXPECT_EQ(0, fake_audio_capture_module_->StopRecording());
+
+ EXPECT_EQ(0, fake_audio_capture_module_->InitRecording());
+ EXPECT_EQ(0, fake_audio_capture_module_->StartRecording());
+ EXPECT_TRUE(fake_audio_capture_module_->Recording());
+
+ EXPECT_TRUE_WAIT(push_iterations() > 0, kMsInSecond);
+ EXPECT_GE(0, pull_iterations());
+
+ EXPECT_EQ(0, fake_audio_capture_module_->StopRecording());
+ EXPECT_FALSE(fake_audio_capture_module_->Recording());
+}
+
+TEST_F(FakeAdmTest, DuplexTest) {
+ EXPECT_EQ(0, fake_audio_capture_module_->RegisterAudioCallback(this));
+
+ EXPECT_EQ(0, fake_audio_capture_module_->InitPlayout());
+ EXPECT_EQ(0, fake_audio_capture_module_->StartPlayout());
+
+ EXPECT_EQ(0, fake_audio_capture_module_->InitRecording());
+ EXPECT_EQ(0, fake_audio_capture_module_->StartRecording());
+
+ EXPECT_TRUE_WAIT(push_iterations() > 0, kMsInSecond);
+ EXPECT_TRUE_WAIT(pull_iterations() > 0, kMsInSecond);
+
+ EXPECT_EQ(0, fake_audio_capture_module_->StopPlayout());
+ EXPECT_EQ(0, fake_audio_capture_module_->StopRecording());
+}
diff --git a/third_party/libwebrtc/pc/test/fake_data_channel_controller.h b/third_party/libwebrtc/pc/test/fake_data_channel_controller.h
new file mode 100644
index 0000000000..c489a34324
--- /dev/null
+++ b/third_party/libwebrtc/pc/test/fake_data_channel_controller.h
@@ -0,0 +1,238 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_TEST_FAKE_DATA_CHANNEL_CONTROLLER_H_
+#define PC_TEST_FAKE_DATA_CHANNEL_CONTROLLER_H_
+
+#include <set>
+#include <string>
+#include <utility>
+
+#include "pc/sctp_data_channel.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/weak_ptr.h"
+
+class FakeDataChannelController
+ : public webrtc::SctpDataChannelControllerInterface {
+ public:
+ explicit FakeDataChannelController(rtc::Thread* network_thread)
+ : signaling_thread_(rtc::Thread::Current()),
+ network_thread_(network_thread),
+ send_blocked_(false),
+ transport_available_(false),
+ ready_to_send_(false),
+ transport_error_(false) {}
+
+ ~FakeDataChannelController() override {
+ network_thread_->BlockingCall([&] {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ weak_factory_.InvalidateWeakPtrs();
+ });
+ }
+
+ rtc::WeakPtr<FakeDataChannelController> weak_ptr() {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ return weak_factory_.GetWeakPtr();
+ }
+
+ rtc::scoped_refptr<webrtc::SctpDataChannel> CreateDataChannel(
+ absl::string_view label,
+ webrtc::InternalDataChannelInit init) {
+ rtc::scoped_refptr<webrtc::SctpDataChannel> channel =
+ network_thread_->BlockingCall([&]() {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ rtc::WeakPtr<FakeDataChannelController> my_weak_ptr = weak_ptr();
+ // Explicitly associate the weak ptr instance with the current thread
+ // to catch early any inappropriate referencing of it on the network
+ // thread.
+ RTC_CHECK(my_weak_ptr);
+
+ rtc::scoped_refptr<webrtc::SctpDataChannel> channel =
+ webrtc::SctpDataChannel::Create(
+ std::move(my_weak_ptr), std::string(label),
+ transport_available_, init, signaling_thread_,
+ network_thread_);
+ if (transport_available_ && channel->sid_n().HasValue()) {
+ AddSctpDataStream(channel->sid_n());
+ }
+ if (ready_to_send_) {
+ network_thread_->PostTask([channel = channel] {
+ if (channel->state() !=
+ webrtc::DataChannelInterface::DataState::kClosed) {
+ channel->OnTransportReady();
+ }
+ });
+ }
+ connected_channels_.insert(channel.get());
+ return channel;
+ });
+ return channel;
+ }
+
+ webrtc::RTCError SendData(webrtc::StreamId sid,
+ const webrtc::SendDataParams& params,
+ const rtc::CopyOnWriteBuffer& payload) override {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ RTC_CHECK(ready_to_send_);
+ RTC_CHECK(transport_available_);
+ if (send_blocked_) {
+ return webrtc::RTCError(webrtc::RTCErrorType::RESOURCE_EXHAUSTED);
+ }
+
+ if (transport_error_) {
+ return webrtc::RTCError(webrtc::RTCErrorType::INTERNAL_ERROR);
+ }
+
+ last_sid_ = sid;
+ last_send_data_params_ = params;
+ return webrtc::RTCError::OK();
+ }
+
+ void AddSctpDataStream(webrtc::StreamId sid) override {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ RTC_CHECK(sid.HasValue());
+ if (!transport_available_) {
+ return;
+ }
+ known_stream_ids_.insert(sid);
+ }
+
+ void RemoveSctpDataStream(webrtc::StreamId sid) override {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ RTC_CHECK(sid.HasValue());
+ known_stream_ids_.erase(sid);
+ // Unlike the real SCTP transport, act like the closing procedure finished
+ // instantly.
+ auto it = absl::c_find_if(connected_channels_,
+ [&](const auto* c) { return c->sid_n() == sid; });
+ // This path mimics the DCC's OnChannelClosed handler since the FDCC
+ // (this class) doesn't have a transport that would do that.
+ if (it != connected_channels_.end())
+ (*it)->OnClosingProcedureComplete();
+ }
+
+ void OnChannelStateChanged(
+ webrtc::SctpDataChannel* data_channel,
+ webrtc::DataChannelInterface::DataState state) override {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ if (state == webrtc::DataChannelInterface::DataState::kOpen) {
+ ++channels_opened_;
+ } else if (state == webrtc::DataChannelInterface::DataState::kClosed) {
+ ++channels_closed_;
+ connected_channels_.erase(data_channel);
+ }
+ }
+
+ // Set true to emulate the SCTP stream being blocked by congestion control.
+ void set_send_blocked(bool blocked) {
+ network_thread_->BlockingCall([&]() {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ send_blocked_ = blocked;
+ if (!blocked) {
+ RTC_CHECK(transport_available_);
+ // Make a copy since `connected_channels_` may change while
+ // OnTransportReady is called.
+ auto copy = connected_channels_;
+ for (webrtc::SctpDataChannel* ch : copy) {
+ ch->OnTransportReady();
+ }
+ }
+ });
+ }
+
+ // Set true to emulate the transport channel creation, e.g. after
+ // setLocalDescription/setRemoteDescription called with data content.
+ void set_transport_available(bool available) {
+ network_thread_->BlockingCall([&]() {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ transport_available_ = available;
+ });
+ }
+
+ // Set true to emulate the transport OnTransportReady signal when the
+ // transport becomes writable for the first time.
+ void set_ready_to_send(bool ready) {
+ network_thread_->BlockingCall([&]() {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ RTC_CHECK(transport_available_);
+ ready_to_send_ = ready;
+ if (ready) {
+ std::set<webrtc::SctpDataChannel*>::iterator it;
+ for (it = connected_channels_.begin(); it != connected_channels_.end();
+ ++it) {
+ (*it)->OnTransportReady();
+ }
+ }
+ });
+ }
+
+ void set_transport_error() {
+ network_thread_->BlockingCall([&]() {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ transport_error_ = true;
+ });
+ }
+
+ int last_sid() const {
+ return network_thread_->BlockingCall([&]() {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ return last_sid_.stream_id_int();
+ });
+ }
+
+ webrtc::SendDataParams last_send_data_params() const {
+ return network_thread_->BlockingCall([&]() {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ return last_send_data_params_;
+ });
+ }
+
+ bool IsConnected(webrtc::SctpDataChannel* data_channel) const {
+ return network_thread_->BlockingCall([&]() {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ return connected_channels_.find(data_channel) !=
+ connected_channels_.end();
+ });
+ }
+
+ bool IsStreamAdded(webrtc::StreamId id) const {
+ return network_thread_->BlockingCall([&]() {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ return known_stream_ids_.find(id) != known_stream_ids_.end();
+ });
+ }
+
+ int channels_opened() const {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ return channels_opened_;
+ }
+ int channels_closed() const {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ return channels_closed_;
+ }
+
+ private:
+ rtc::Thread* const signaling_thread_;
+ rtc::Thread* const network_thread_;
+ webrtc::StreamId last_sid_ RTC_GUARDED_BY(network_thread_);
+ webrtc::SendDataParams last_send_data_params_ RTC_GUARDED_BY(network_thread_);
+ bool send_blocked_ RTC_GUARDED_BY(network_thread_);
+ bool transport_available_ RTC_GUARDED_BY(network_thread_);
+ bool ready_to_send_ RTC_GUARDED_BY(network_thread_);
+ bool transport_error_ RTC_GUARDED_BY(network_thread_);
+ int channels_closed_ RTC_GUARDED_BY(network_thread_) = 0;
+ int channels_opened_ RTC_GUARDED_BY(network_thread_) = 0;
+ std::set<webrtc::SctpDataChannel*> connected_channels_
+ RTC_GUARDED_BY(network_thread_);
+ std::set<webrtc::StreamId> known_stream_ids_ RTC_GUARDED_BY(network_thread_);
+ rtc::WeakPtrFactory<FakeDataChannelController> weak_factory_
+ RTC_GUARDED_BY(network_thread_){this};
+};
+#endif // PC_TEST_FAKE_DATA_CHANNEL_CONTROLLER_H_
diff --git a/third_party/libwebrtc/pc/test/fake_peer_connection_base.h b/third_party/libwebrtc/pc/test/fake_peer_connection_base.h
new file mode 100644
index 0000000000..a1c8dca12e
--- /dev/null
+++ b/third_party/libwebrtc/pc/test/fake_peer_connection_base.h
@@ -0,0 +1,371 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_TEST_FAKE_PEER_CONNECTION_BASE_H_
+#define PC_TEST_FAKE_PEER_CONNECTION_BASE_H_
+
+#include <map>
+#include <memory>
+#include <set>
+#include <string>
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "api/field_trials_view.h"
+#include "api/sctp_transport_interface.h"
+#include "pc/peer_connection_internal.h"
+#include "test/scoped_key_value_config.h"
+
+namespace webrtc {
+
+// Customized PeerConnection fakes can be created by subclassing
+// FakePeerConnectionBase then overriding the interesting methods. This class
+// takes care of providing default implementations for all the pure virtual
+// functions specified in the interfaces.
+class FakePeerConnectionBase : public PeerConnectionInternal {
+ public:
+ // PeerConnectionInterface implementation.
+
+ rtc::scoped_refptr<StreamCollectionInterface> local_streams() override {
+ return nullptr;
+ }
+
+ rtc::scoped_refptr<StreamCollectionInterface> remote_streams() override {
+ return nullptr;
+ }
+
+ bool AddStream(MediaStreamInterface* stream) override { return false; }
+
+ void RemoveStream(MediaStreamInterface* stream) override {}
+
+ RTCErrorOr<rtc::scoped_refptr<RtpSenderInterface>> AddTrack(
+ rtc::scoped_refptr<MediaStreamTrackInterface> track,
+ const std::vector<std::string>& stream_ids) override {
+ return RTCError(RTCErrorType::UNSUPPORTED_OPERATION, "Not implemented");
+ }
+
+ RTCErrorOr<rtc::scoped_refptr<RtpSenderInterface>> AddTrack(
+ rtc::scoped_refptr<MediaStreamTrackInterface> track,
+ const std::vector<std::string>& stream_ids,
+ const std::vector<RtpEncodingParameters>& init_send_encodings) override {
+ return RTCError(RTCErrorType::UNSUPPORTED_OPERATION, "Not implemented");
+ }
+
+ RTCError RemoveTrackOrError(
+ rtc::scoped_refptr<RtpSenderInterface> sender) override {
+ return RTCError(RTCErrorType::UNSUPPORTED_OPERATION);
+ }
+
+ RTCErrorOr<rtc::scoped_refptr<RtpTransceiverInterface>> AddTransceiver(
+ rtc::scoped_refptr<MediaStreamTrackInterface> track) override {
+ return RTCError(RTCErrorType::UNSUPPORTED_OPERATION, "Not implemented");
+ }
+
+ RTCErrorOr<rtc::scoped_refptr<RtpTransceiverInterface>> AddTransceiver(
+ rtc::scoped_refptr<MediaStreamTrackInterface> track,
+ const RtpTransceiverInit& init) override {
+ return RTCError(RTCErrorType::UNSUPPORTED_OPERATION, "Not implemented");
+ }
+
+ RTCErrorOr<rtc::scoped_refptr<RtpTransceiverInterface>> AddTransceiver(
+ cricket::MediaType media_type) override {
+ return RTCError(RTCErrorType::UNSUPPORTED_OPERATION, "Not implemented");
+ }
+
+ RTCErrorOr<rtc::scoped_refptr<RtpTransceiverInterface>> AddTransceiver(
+ cricket::MediaType media_type,
+ const RtpTransceiverInit& init) override {
+ return RTCError(RTCErrorType::UNSUPPORTED_OPERATION, "Not implemented");
+ }
+
+ rtc::scoped_refptr<RtpSenderInterface> CreateSender(
+ const std::string& kind,
+ const std::string& stream_id) override {
+ return nullptr;
+ }
+
+ std::vector<rtc::scoped_refptr<RtpSenderInterface>> GetSenders()
+ const override {
+ return {};
+ }
+
+ std::vector<rtc::scoped_refptr<RtpReceiverInterface>> GetReceivers()
+ const override {
+ return {};
+ }
+
+ std::vector<rtc::scoped_refptr<RtpTransceiverInterface>> GetTransceivers()
+ const override {
+ return {};
+ }
+
+ bool GetStats(StatsObserver* observer,
+ MediaStreamTrackInterface* track,
+ StatsOutputLevel level) override {
+ return false;
+ }
+
+ void GetStats(RTCStatsCollectorCallback* callback) override {}
+ void GetStats(
+ rtc::scoped_refptr<RtpSenderInterface> selector,
+ rtc::scoped_refptr<RTCStatsCollectorCallback> callback) override {}
+ void GetStats(
+ rtc::scoped_refptr<RtpReceiverInterface> selector,
+ rtc::scoped_refptr<RTCStatsCollectorCallback> callback) override {}
+
+ void ClearStatsCache() override {}
+
+ rtc::scoped_refptr<SctpTransportInterface> GetSctpTransport() const {
+ return nullptr;
+ }
+
+ RTCErrorOr<rtc::scoped_refptr<DataChannelInterface>> CreateDataChannelOrError(
+ const std::string& label,
+ const DataChannelInit* config) override {
+ return RTCError(RTCErrorType::UNSUPPORTED_OPERATION,
+ "Fake function called");
+ }
+
+ const SessionDescriptionInterface* local_description() const override {
+ return nullptr;
+ }
+ const SessionDescriptionInterface* remote_description() const override {
+ return nullptr;
+ }
+
+ const SessionDescriptionInterface* current_local_description()
+ const override {
+ return nullptr;
+ }
+ const SessionDescriptionInterface* current_remote_description()
+ const override {
+ return nullptr;
+ }
+
+ const SessionDescriptionInterface* pending_local_description()
+ const override {
+ return nullptr;
+ }
+ const SessionDescriptionInterface* pending_remote_description()
+ const override {
+ return nullptr;
+ }
+
+ void RestartIce() override {}
+
+ void CreateOffer(CreateSessionDescriptionObserver* observer,
+ const RTCOfferAnswerOptions& options) override {}
+
+ void CreateAnswer(CreateSessionDescriptionObserver* observer,
+ const RTCOfferAnswerOptions& options) override {}
+
+ void SetLocalDescription(SetSessionDescriptionObserver* observer,
+ SessionDescriptionInterface* desc) override {}
+
+ void SetRemoteDescription(SetSessionDescriptionObserver* observer,
+ SessionDescriptionInterface* desc) override {}
+
+ void SetRemoteDescription(
+ std::unique_ptr<SessionDescriptionInterface> desc,
+ rtc::scoped_refptr<SetRemoteDescriptionObserverInterface> observer)
+ override {}
+
+ RTCConfiguration GetConfiguration() override { return RTCConfiguration(); }
+
+ RTCError SetConfiguration(
+ const PeerConnectionInterface::RTCConfiguration& config) override {
+ return RTCError();
+ }
+
+ bool AddIceCandidate(const IceCandidateInterface* candidate) override {
+ return false;
+ }
+
+ bool RemoveIceCandidates(
+ const std::vector<cricket::Candidate>& candidates) override {
+ return false;
+ }
+
+ RTCError SetBitrate(const BitrateSettings& bitrate) override {
+ return RTCError(RTCErrorType::UNSUPPORTED_OPERATION, "Not implemented");
+ }
+
+ void SetAudioPlayout(bool playout) override {}
+
+ void SetAudioRecording(bool recording) override {}
+
+ rtc::scoped_refptr<DtlsTransportInterface> LookupDtlsTransportByMid(
+ const std::string& mid) {
+ return nullptr;
+ }
+
+ SignalingState signaling_state() override { return SignalingState::kStable; }
+
+ IceConnectionState ice_connection_state() override {
+ return IceConnectionState::kIceConnectionNew;
+ }
+
+ IceConnectionState standardized_ice_connection_state() override {
+ return IceConnectionState::kIceConnectionNew;
+ }
+
+ PeerConnectionState peer_connection_state() override {
+ return PeerConnectionState::kNew;
+ }
+
+ IceGatheringState ice_gathering_state() override {
+ return IceGatheringState::kIceGatheringNew;
+ }
+
+ absl::optional<bool> can_trickle_ice_candidates() { return absl::nullopt; }
+
+ bool StartRtcEventLog(std::unique_ptr<RtcEventLogOutput> output,
+ int64_t output_period_ms) override {
+ return false;
+ }
+
+ bool StartRtcEventLog(std::unique_ptr<RtcEventLogOutput> output) override {
+ return false;
+ }
+
+ void StopRtcEventLog() override {}
+
+ void Close() override {}
+
+ // PeerConnectionInternal implementation.
+
+ rtc::Thread* network_thread() const override { return nullptr; }
+ rtc::Thread* worker_thread() const override { return nullptr; }
+ rtc::Thread* signaling_thread() const override { return nullptr; }
+
+ std::string session_id() const override { return ""; }
+
+ bool initial_offerer() const override { return false; }
+
+ std::vector<
+ rtc::scoped_refptr<RtpTransceiverProxyWithInternal<RtpTransceiver>>>
+ GetTransceiversInternal() const override {
+ return {};
+ }
+
+ absl::optional<std::string> sctp_transport_name() const override {
+ return absl::nullopt;
+ }
+
+ absl::optional<std::string> sctp_mid() const override {
+ return absl::nullopt;
+ }
+
+ std::map<std::string, cricket::TransportStats> GetTransportStatsByNames(
+ const std::set<std::string>& transport_names) override {
+ return {};
+ }
+
+ Call::Stats GetCallStats() override { return Call::Stats(); }
+
+ absl::optional<AudioDeviceModule::Stats> GetAudioDeviceStats() override {
+ return absl::nullopt;
+ }
+
+ bool GetLocalCertificate(
+ const std::string& transport_name,
+ rtc::scoped_refptr<rtc::RTCCertificate>* certificate) override {
+ return false;
+ }
+
+ std::unique_ptr<rtc::SSLCertChain> GetRemoteSSLCertChain(
+ const std::string& transport_name) override {
+ return nullptr;
+ }
+
+ bool IceRestartPending(const std::string& content_name) const override {
+ return false;
+ }
+
+ bool NeedsIceRestart(const std::string& content_name) const override {
+ return false;
+ }
+
+ bool GetSslRole(const std::string& content_name,
+ rtc::SSLRole* role) override {
+ return false;
+ }
+ const PeerConnectionInterface::RTCConfiguration* configuration()
+ const override {
+ return nullptr;
+ }
+
+ void ReportSdpBundleUsage(
+ const SessionDescriptionInterface& remote_description) override {}
+
+ PeerConnectionMessageHandler* message_handler() override { return nullptr; }
+ RtpTransmissionManager* rtp_manager() override { return nullptr; }
+ const RtpTransmissionManager* rtp_manager() const override { return nullptr; }
+ bool dtls_enabled() const override { return false; }
+ const PeerConnectionFactoryInterface::Options* options() const override {
+ return nullptr;
+ }
+
+ CryptoOptions GetCryptoOptions() override { return CryptoOptions(); }
+ JsepTransportController* transport_controller_s() override { return nullptr; }
+ JsepTransportController* transport_controller_n() override { return nullptr; }
+ DataChannelController* data_channel_controller() override { return nullptr; }
+ cricket::PortAllocator* port_allocator() override { return nullptr; }
+ LegacyStatsCollector* legacy_stats() override { return nullptr; }
+ PeerConnectionObserver* Observer() const override { return nullptr; }
+ absl::optional<rtc::SSLRole> GetSctpSslRole_n() override {
+ return absl::nullopt;
+ }
+ PeerConnectionInterface::IceConnectionState ice_connection_state_internal()
+ override {
+ return PeerConnectionInterface::IceConnectionState::kIceConnectionNew;
+ }
+ void SetIceConnectionState(
+ PeerConnectionInterface::IceConnectionState new_state) override {}
+ void NoteUsageEvent(UsageEvent event) override {}
+ bool IsClosed() const override { return false; }
+ bool IsUnifiedPlan() const override { return true; }
+ bool ValidateBundleSettings(
+ const cricket::SessionDescription* desc,
+ const std::map<std::string, const cricket::ContentGroup*>&
+ bundle_groups_by_mid) override {
+ return false;
+ }
+
+ RTCErrorOr<rtc::scoped_refptr<RtpTransceiverInterface>> AddTransceiver(
+ cricket::MediaType media_type,
+ rtc::scoped_refptr<MediaStreamTrackInterface> track,
+ const RtpTransceiverInit& init,
+ bool fire_callback = true) override {
+ return RTCError(RTCErrorType::INTERNAL_ERROR, "");
+ }
+ void StartSctpTransport(int local_port,
+ int remote_port,
+ int max_message_size) override {}
+
+ void AddRemoteCandidate(const std::string& mid,
+ const cricket::Candidate& candidate) override {}
+
+ Call* call_ptr() override { return nullptr; }
+ bool SrtpRequired() const override { return false; }
+ bool CreateDataChannelTransport(absl::string_view mid) override {
+ return false;
+ }
+ void DestroyDataChannelTransport(RTCError error) override {}
+
+ const FieldTrialsView& trials() const override { return field_trials_; }
+
+ protected:
+ webrtc::test::ScopedKeyValueConfig field_trials_;
+};
+
+} // namespace webrtc
+
+#endif // PC_TEST_FAKE_PEER_CONNECTION_BASE_H_
diff --git a/third_party/libwebrtc/pc/test/fake_peer_connection_for_stats.h b/third_party/libwebrtc/pc/test/fake_peer_connection_for_stats.h
new file mode 100644
index 0000000000..7302182912
--- /dev/null
+++ b/third_party/libwebrtc/pc/test/fake_peer_connection_for_stats.h
@@ -0,0 +1,568 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_TEST_FAKE_PEER_CONNECTION_FOR_STATS_H_
+#define PC_TEST_FAKE_PEER_CONNECTION_FOR_STATS_H_
+
+#include <map>
+#include <memory>
+#include <set>
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "media/base/fake_media_engine.h"
+#include "media/base/media_channel.h"
+#include "pc/channel.h"
+#include "pc/stream_collection.h"
+#include "pc/test/fake_data_channel_controller.h"
+#include "pc/test/fake_peer_connection_base.h"
+
+namespace webrtc {
+
+// Fake VoiceMediaChannel where the result of GetStats can be configured.
+class FakeVoiceMediaSendChannelForStats
+ : public cricket::FakeVoiceMediaSendChannel {
+ public:
+ explicit FakeVoiceMediaSendChannelForStats(TaskQueueBase* network_thread)
+ : cricket::FakeVoiceMediaSendChannel(cricket::AudioOptions(),
+ network_thread) {}
+
+ void SetStats(const cricket::VoiceMediaInfo& voice_info) {
+ send_stats_ = cricket::VoiceMediaSendInfo();
+ send_stats_->senders = voice_info.senders;
+ send_stats_->send_codecs = voice_info.send_codecs;
+ }
+
+ // VoiceMediaChannel overrides.
+ bool GetStats(cricket::VoiceMediaSendInfo* info) override {
+ if (send_stats_) {
+ *info = *send_stats_;
+ return true;
+ }
+ return false;
+ }
+
+ private:
+ absl::optional<cricket::VoiceMediaSendInfo> send_stats_;
+};
+
+class FakeVoiceMediaReceiveChannelForStats
+ : public cricket::FakeVoiceMediaReceiveChannel {
+ public:
+ explicit FakeVoiceMediaReceiveChannelForStats(TaskQueueBase* network_thread)
+ : cricket::FakeVoiceMediaReceiveChannel(cricket::AudioOptions(),
+ network_thread) {}
+
+ void SetStats(const cricket::VoiceMediaInfo& voice_info) {
+ receive_stats_ = cricket::VoiceMediaReceiveInfo();
+ receive_stats_->receivers = voice_info.receivers;
+ receive_stats_->receive_codecs = voice_info.receive_codecs;
+ receive_stats_->device_underrun_count = voice_info.device_underrun_count;
+ }
+
+ // VoiceMediaChannel overrides.
+ bool GetStats(cricket::VoiceMediaReceiveInfo* info,
+ bool get_and_clear_legacy_stats) override {
+ if (receive_stats_) {
+ *info = *receive_stats_;
+ return true;
+ }
+ return false;
+ }
+
+ private:
+ absl::optional<cricket::VoiceMediaReceiveInfo> receive_stats_;
+};
+
+// Fake VideoMediaChannel where the result of GetStats can be configured.
+class FakeVideoMediaSendChannelForStats
+ : public cricket::FakeVideoMediaSendChannel {
+ public:
+ explicit FakeVideoMediaSendChannelForStats(TaskQueueBase* network_thread)
+ : cricket::FakeVideoMediaSendChannel(cricket::VideoOptions(),
+ network_thread) {}
+
+ void SetStats(const cricket::VideoMediaInfo& video_info) {
+ send_stats_ = cricket::VideoMediaSendInfo();
+ send_stats_->senders = video_info.senders;
+ send_stats_->aggregated_senders = video_info.aggregated_senders;
+ send_stats_->send_codecs = video_info.send_codecs;
+ }
+
+ // VideoMediaChannel overrides.
+ bool GetStats(cricket::VideoMediaSendInfo* info) override {
+ if (send_stats_) {
+ *info = *send_stats_;
+ return true;
+ }
+ return false;
+ }
+
+ private:
+ absl::optional<cricket::VideoMediaSendInfo> send_stats_;
+};
+
+class FakeVideoMediaReceiveChannelForStats
+ : public cricket::FakeVideoMediaReceiveChannel {
+ public:
+ explicit FakeVideoMediaReceiveChannelForStats(TaskQueueBase* network_thread)
+ : cricket::FakeVideoMediaReceiveChannel(cricket::VideoOptions(),
+ network_thread) {}
+
+ void SetStats(const cricket::VideoMediaInfo& video_info) {
+ receive_stats_ = cricket::VideoMediaReceiveInfo();
+ receive_stats_->receivers = video_info.receivers;
+ receive_stats_->receive_codecs = video_info.receive_codecs;
+ }
+
+ // VideoMediaChannel overrides.
+ bool GetStats(cricket::VideoMediaReceiveInfo* info) override {
+ if (receive_stats_) {
+ *info = *receive_stats_;
+ return true;
+ }
+ return false;
+ }
+
+ private:
+ absl::optional<cricket::VideoMediaReceiveInfo> receive_stats_;
+};
+
+constexpr bool kDefaultRtcpMuxRequired = true;
+constexpr bool kDefaultSrtpRequired = true;
+
+class VoiceChannelForTesting : public cricket::VoiceChannel {
+ public:
+ VoiceChannelForTesting(
+ rtc::Thread* worker_thread,
+ rtc::Thread* network_thread,
+ rtc::Thread* signaling_thread,
+ std::unique_ptr<cricket::VoiceMediaSendChannelInterface> send_channel,
+ std::unique_ptr<cricket::VoiceMediaReceiveChannelInterface>
+ receive_channel,
+ const std::string& content_name,
+ bool srtp_required,
+ webrtc::CryptoOptions crypto_options,
+ rtc::UniqueRandomIdGenerator* ssrc_generator,
+ std::string transport_name)
+ : VoiceChannel(worker_thread,
+ network_thread,
+ signaling_thread,
+ std::move(send_channel),
+ std::move(receive_channel),
+ content_name,
+ srtp_required,
+ std::move(crypto_options),
+ ssrc_generator),
+ test_transport_name_(std::move(transport_name)) {}
+
+ private:
+ absl::string_view transport_name() const override {
+ return test_transport_name_;
+ }
+
+ const std::string test_transport_name_;
+};
+
+class VideoChannelForTesting : public cricket::VideoChannel {
+ public:
+ VideoChannelForTesting(
+ rtc::Thread* worker_thread,
+ rtc::Thread* network_thread,
+ rtc::Thread* signaling_thread,
+ std::unique_ptr<cricket::VideoMediaSendChannelInterface> send_channel,
+ std::unique_ptr<cricket::VideoMediaReceiveChannelInterface>
+ receive_channel,
+ const std::string& content_name,
+ bool srtp_required,
+ webrtc::CryptoOptions crypto_options,
+ rtc::UniqueRandomIdGenerator* ssrc_generator,
+ std::string transport_name)
+ : VideoChannel(worker_thread,
+ network_thread,
+ signaling_thread,
+ std::move(send_channel),
+ std::move(receive_channel),
+ content_name,
+ srtp_required,
+ std::move(crypto_options),
+ ssrc_generator),
+ test_transport_name_(std::move(transport_name)) {}
+
+ private:
+ absl::string_view transport_name() const override {
+ return test_transport_name_;
+ }
+
+ const std::string test_transport_name_;
+};
+
+// This class is intended to be fed into the StatsCollector and
+// RTCStatsCollector so that the stats functionality can be unit tested.
+// Individual tests can configure this fake as needed to simulate scenarios
+// under which to test the stats collectors.
+class FakePeerConnectionForStats : public FakePeerConnectionBase {
+ public:
+ // TODO(steveanton): Add support for specifying separate threads to test
+ // multi-threading correctness.
+ FakePeerConnectionForStats()
+ : network_thread_(rtc::Thread::Current()),
+ worker_thread_(rtc::Thread::Current()),
+ signaling_thread_(rtc::Thread::Current()),
+ // TODO(hta): remove separate thread variables and use context.
+ dependencies_(MakeDependencies()),
+ context_(ConnectionContext::Create(&dependencies_)),
+ local_streams_(StreamCollection::Create()),
+ remote_streams_(StreamCollection::Create()),
+ data_channel_controller_(network_thread_) {}
+
+ ~FakePeerConnectionForStats() {
+ for (auto transceiver : transceivers_) {
+ transceiver->internal()->ClearChannel();
+ }
+ }
+
+ static PeerConnectionFactoryDependencies MakeDependencies() {
+ PeerConnectionFactoryDependencies dependencies;
+ dependencies.network_thread = rtc::Thread::Current();
+ dependencies.worker_thread = rtc::Thread::Current();
+ dependencies.signaling_thread = rtc::Thread::Current();
+ dependencies.media_engine = std::make_unique<cricket::FakeMediaEngine>();
+ return dependencies;
+ }
+
+ rtc::scoped_refptr<StreamCollection> mutable_local_streams() {
+ return local_streams_;
+ }
+
+ rtc::scoped_refptr<StreamCollection> mutable_remote_streams() {
+ return remote_streams_;
+ }
+
+ rtc::scoped_refptr<RtpSenderInterface> AddSender(
+ rtc::scoped_refptr<RtpSenderInternal> sender) {
+ // TODO(steveanton): Switch tests to use RtpTransceivers directly.
+ auto sender_proxy = RtpSenderProxyWithInternal<RtpSenderInternal>::Create(
+ signaling_thread_, sender);
+ GetOrCreateFirstTransceiverOfType(sender->media_type())
+ ->internal()
+ ->AddSender(sender_proxy);
+ return sender_proxy;
+ }
+
+ void RemoveSender(rtc::scoped_refptr<RtpSenderInterface> sender) {
+ GetOrCreateFirstTransceiverOfType(sender->media_type())
+ ->internal()
+ ->RemoveSender(sender.get());
+ }
+
+ rtc::scoped_refptr<RtpReceiverInterface> AddReceiver(
+ rtc::scoped_refptr<RtpReceiverInternal> receiver) {
+ // TODO(steveanton): Switch tests to use RtpTransceivers directly.
+ auto receiver_proxy =
+ RtpReceiverProxyWithInternal<RtpReceiverInternal>::Create(
+ signaling_thread_, worker_thread_, receiver);
+ GetOrCreateFirstTransceiverOfType(receiver->media_type())
+ ->internal()
+ ->AddReceiver(receiver_proxy);
+ return receiver_proxy;
+ }
+
+ void RemoveReceiver(rtc::scoped_refptr<RtpReceiverInterface> receiver) {
+ GetOrCreateFirstTransceiverOfType(receiver->media_type())
+ ->internal()
+ ->RemoveReceiver(receiver.get());
+ }
+
+ std::pair<FakeVoiceMediaSendChannelForStats*,
+ FakeVoiceMediaReceiveChannelForStats*>
+ AddVoiceChannel(
+ const std::string& mid,
+ const std::string& transport_name,
+ cricket::VoiceMediaInfo initial_stats = cricket::VoiceMediaInfo()) {
+ auto voice_media_send_channel =
+ std::make_unique<FakeVoiceMediaSendChannelForStats>(network_thread_);
+ auto voice_media_receive_channel =
+ std::make_unique<FakeVoiceMediaReceiveChannelForStats>(network_thread_);
+ auto* voice_media_send_channel_ptr = voice_media_send_channel.get();
+ auto* voice_media_receive_channel_ptr = voice_media_receive_channel.get();
+ auto voice_channel = std::make_unique<VoiceChannelForTesting>(
+ worker_thread_, network_thread_, signaling_thread_,
+ std::move(voice_media_send_channel),
+ std::move(voice_media_receive_channel), mid, kDefaultSrtpRequired,
+ webrtc::CryptoOptions(), context_->ssrc_generator(), transport_name);
+ auto transceiver =
+ GetOrCreateFirstTransceiverOfType(cricket::MEDIA_TYPE_AUDIO)
+ ->internal();
+ if (transceiver->channel()) {
+ // This transceiver already has a channel, create a new one.
+ transceiver =
+ CreateTransceiverOfType(cricket::MEDIA_TYPE_AUDIO)->internal();
+ }
+ RTC_DCHECK(!transceiver->channel());
+ transceiver->SetChannel(std::move(voice_channel),
+ [](const std::string&) { return nullptr; });
+ voice_media_send_channel_ptr->SetStats(initial_stats);
+ voice_media_receive_channel_ptr->SetStats(initial_stats);
+ return std::make_pair(voice_media_send_channel_ptr,
+ voice_media_receive_channel_ptr);
+ }
+
+ std::pair<FakeVideoMediaSendChannelForStats*,
+ FakeVideoMediaReceiveChannelForStats*>
+ AddVideoChannel(
+ const std::string& mid,
+ const std::string& transport_name,
+ cricket::VideoMediaInfo initial_stats = cricket::VideoMediaInfo()) {
+ auto video_media_send_channel =
+ std::make_unique<FakeVideoMediaSendChannelForStats>(network_thread_);
+ auto video_media_receive_channel =
+ std::make_unique<FakeVideoMediaReceiveChannelForStats>(network_thread_);
+ auto video_media_send_channel_ptr = video_media_send_channel.get();
+ auto video_media_receive_channel_ptr = video_media_receive_channel.get();
+ auto video_channel = std::make_unique<VideoChannelForTesting>(
+ worker_thread_, network_thread_, signaling_thread_,
+ std::move(video_media_send_channel),
+ std::move(video_media_receive_channel), mid, kDefaultSrtpRequired,
+ webrtc::CryptoOptions(), context_->ssrc_generator(), transport_name);
+ auto transceiver =
+ GetOrCreateFirstTransceiverOfType(cricket::MEDIA_TYPE_VIDEO)
+ ->internal();
+ if (transceiver->channel()) {
+ // This transceiver already has a channel, create a new one.
+ transceiver =
+ CreateTransceiverOfType(cricket::MEDIA_TYPE_VIDEO)->internal();
+ }
+ RTC_DCHECK(!transceiver->channel());
+ transceiver->SetChannel(std::move(video_channel),
+ [](const std::string&) { return nullptr; });
+ video_media_send_channel_ptr->SetStats(initial_stats);
+ video_media_receive_channel_ptr->SetStats(initial_stats);
+ return std::make_pair(video_media_send_channel_ptr,
+ video_media_receive_channel_ptr);
+ }
+
+ void AddSctpDataChannel(const std::string& label) {
+ AddSctpDataChannel(label, InternalDataChannelInit());
+ }
+
+ void AddSctpDataChannel(const std::string& label,
+ const InternalDataChannelInit& init) {
+ // TODO(bugs.webrtc.org/11547): Supply a separate network thread.
+ AddSctpDataChannel(SctpDataChannel::Create(
+ data_channel_controller_.weak_ptr(), label, false, init,
+ rtc::Thread::Current(), rtc::Thread::Current()));
+ }
+
+ void AddSctpDataChannel(rtc::scoped_refptr<SctpDataChannel> data_channel) {
+ sctp_data_channels_.push_back(data_channel);
+ }
+
+ void SetTransportStats(const std::string& transport_name,
+ const cricket::TransportChannelStats& channel_stats) {
+ SetTransportStats(
+ transport_name,
+ std::vector<cricket::TransportChannelStats>{channel_stats});
+ }
+
+ void SetTransportStats(
+ const std::string& transport_name,
+ const std::vector<cricket::TransportChannelStats>& channel_stats_list) {
+ cricket::TransportStats transport_stats;
+ transport_stats.transport_name = transport_name;
+ transport_stats.channel_stats = channel_stats_list;
+ transport_stats_by_name_[transport_name] = transport_stats;
+ }
+
+ void SetCallStats(const Call::Stats& call_stats) { call_stats_ = call_stats; }
+
+ void SetAudioDeviceStats(
+ absl::optional<AudioDeviceModule::Stats> audio_device_stats) {
+ audio_device_stats_ = audio_device_stats;
+ }
+
+ void SetLocalCertificate(
+ const std::string& transport_name,
+ rtc::scoped_refptr<rtc::RTCCertificate> certificate) {
+ local_certificates_by_transport_[transport_name] = certificate;
+ }
+
+ void SetRemoteCertChain(const std::string& transport_name,
+ std::unique_ptr<rtc::SSLCertChain> chain) {
+ remote_cert_chains_by_transport_[transport_name] = std::move(chain);
+ }
+
+ // PeerConnectionInterface overrides.
+
+ rtc::scoped_refptr<StreamCollectionInterface> local_streams() override {
+ return local_streams_;
+ }
+
+ rtc::scoped_refptr<StreamCollectionInterface> remote_streams() override {
+ return remote_streams_;
+ }
+
+ std::vector<rtc::scoped_refptr<RtpSenderInterface>> GetSenders()
+ const override {
+ std::vector<rtc::scoped_refptr<RtpSenderInterface>> senders;
+ for (auto transceiver : transceivers_) {
+ for (auto sender : transceiver->internal()->senders()) {
+ senders.push_back(sender);
+ }
+ }
+ return senders;
+ }
+
+ std::vector<rtc::scoped_refptr<RtpReceiverInterface>> GetReceivers()
+ const override {
+ std::vector<rtc::scoped_refptr<RtpReceiverInterface>> receivers;
+ for (auto transceiver : transceivers_) {
+ for (auto receiver : transceiver->internal()->receivers()) {
+ receivers.push_back(receiver);
+ }
+ }
+ return receivers;
+ }
+
+ // PeerConnectionInternal overrides.
+
+ rtc::Thread* network_thread() const override { return network_thread_; }
+
+ rtc::Thread* worker_thread() const override { return worker_thread_; }
+
+ rtc::Thread* signaling_thread() const override { return signaling_thread_; }
+
+ std::vector<
+ rtc::scoped_refptr<RtpTransceiverProxyWithInternal<RtpTransceiver>>>
+ GetTransceiversInternal() const override {
+ return transceivers_;
+ }
+
+ std::vector<DataChannelStats> GetDataChannelStats() const override {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ std::vector<DataChannelStats> stats;
+ for (const auto& channel : sctp_data_channels_)
+ stats.push_back(channel->GetStats());
+ return stats;
+ }
+
+ cricket::CandidateStatsList GetPooledCandidateStats() const override {
+ return {};
+ }
+
+ std::map<std::string, cricket::TransportStats> GetTransportStatsByNames(
+ const std::set<std::string>& transport_names) override {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ std::map<std::string, cricket::TransportStats> transport_stats_by_name;
+ for (const std::string& transport_name : transport_names) {
+ transport_stats_by_name[transport_name] =
+ GetTransportStatsByName(transport_name);
+ }
+ return transport_stats_by_name;
+ }
+
+ Call::Stats GetCallStats() override { return call_stats_; }
+
+ absl::optional<AudioDeviceModule::Stats> GetAudioDeviceStats() override {
+ return audio_device_stats_;
+ }
+
+ bool GetLocalCertificate(
+ const std::string& transport_name,
+ rtc::scoped_refptr<rtc::RTCCertificate>* certificate) override {
+ auto it = local_certificates_by_transport_.find(transport_name);
+ if (it != local_certificates_by_transport_.end()) {
+ *certificate = it->second;
+ return true;
+ } else {
+ return false;
+ }
+ }
+
+ std::unique_ptr<rtc::SSLCertChain> GetRemoteSSLCertChain(
+ const std::string& transport_name) override {
+ auto it = remote_cert_chains_by_transport_.find(transport_name);
+ if (it != remote_cert_chains_by_transport_.end()) {
+ return it->second->Clone();
+ } else {
+ return nullptr;
+ }
+ }
+
+ private:
+ cricket::TransportStats GetTransportStatsByName(
+ const std::string& transport_name) {
+ auto it = transport_stats_by_name_.find(transport_name);
+ if (it != transport_stats_by_name_.end()) {
+ // If specific transport stats have been specified, return those.
+ return it->second;
+ }
+ // Otherwise, generate some dummy stats.
+ cricket::TransportChannelStats channel_stats;
+ channel_stats.component = cricket::ICE_CANDIDATE_COMPONENT_RTP;
+ cricket::TransportStats transport_stats;
+ transport_stats.transport_name = transport_name;
+ transport_stats.channel_stats.push_back(channel_stats);
+ return transport_stats;
+ }
+
+ rtc::scoped_refptr<RtpTransceiverProxyWithInternal<RtpTransceiver>>
+ GetOrCreateFirstTransceiverOfType(cricket::MediaType media_type) {
+ for (auto transceiver : transceivers_) {
+ if (transceiver->internal()->media_type() == media_type) {
+ return transceiver;
+ }
+ }
+ return CreateTransceiverOfType(media_type);
+ }
+
+ rtc::scoped_refptr<RtpTransceiverProxyWithInternal<RtpTransceiver>>
+ CreateTransceiverOfType(cricket::MediaType media_type) {
+ auto transceiver = RtpTransceiverProxyWithInternal<RtpTransceiver>::Create(
+ signaling_thread_,
+ rtc::make_ref_counted<RtpTransceiver>(media_type, context_.get()));
+ transceivers_.push_back(transceiver);
+ return transceiver;
+ }
+
+ rtc::Thread* const network_thread_;
+ rtc::Thread* const worker_thread_;
+ rtc::Thread* const signaling_thread_;
+
+ PeerConnectionFactoryDependencies dependencies_;
+ rtc::scoped_refptr<ConnectionContext> context_;
+
+ rtc::scoped_refptr<StreamCollection> local_streams_;
+ rtc::scoped_refptr<StreamCollection> remote_streams_;
+
+ std::vector<
+ rtc::scoped_refptr<RtpTransceiverProxyWithInternal<RtpTransceiver>>>
+ transceivers_;
+
+ FakeDataChannelController data_channel_controller_;
+
+ std::vector<rtc::scoped_refptr<SctpDataChannel>> sctp_data_channels_;
+
+ std::map<std::string, cricket::TransportStats> transport_stats_by_name_;
+
+ Call::Stats call_stats_;
+
+ absl::optional<AudioDeviceModule::Stats> audio_device_stats_;
+
+ std::map<std::string, rtc::scoped_refptr<rtc::RTCCertificate>>
+ local_certificates_by_transport_;
+ std::map<std::string, std::unique_ptr<rtc::SSLCertChain>>
+ remote_cert_chains_by_transport_;
+};
+
+} // namespace webrtc
+
+#endif // PC_TEST_FAKE_PEER_CONNECTION_FOR_STATS_H_
diff --git a/third_party/libwebrtc/pc/test/fake_periodic_video_source.h b/third_party/libwebrtc/pc/test/fake_periodic_video_source.h
new file mode 100644
index 0000000000..452a8f6c30
--- /dev/null
+++ b/third_party/libwebrtc/pc/test/fake_periodic_video_source.h
@@ -0,0 +1,103 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_TEST_FAKE_PERIODIC_VIDEO_SOURCE_H_
+#define PC_TEST_FAKE_PERIODIC_VIDEO_SOURCE_H_
+
+#include <memory>
+
+#include "api/video/video_source_interface.h"
+#include "media/base/fake_frame_source.h"
+#include "media/base/video_broadcaster.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "rtc_base/task_queue_for_test.h"
+#include "rtc_base/task_utils/repeating_task.h"
+
+namespace webrtc {
+
+class FakePeriodicVideoSource final
+ : public rtc::VideoSourceInterface<VideoFrame> {
+ public:
+ static constexpr int kDefaultFrameIntervalMs = 33;
+ static constexpr int kDefaultWidth = 640;
+ static constexpr int kDefaultHeight = 480;
+
+ struct Config {
+ int width = kDefaultWidth;
+ int height = kDefaultHeight;
+ int frame_interval_ms = kDefaultFrameIntervalMs;
+ VideoRotation rotation = kVideoRotation_0;
+ int64_t timestamp_offset_ms = 0;
+ };
+
+ FakePeriodicVideoSource() : FakePeriodicVideoSource(Config()) {}
+ explicit FakePeriodicVideoSource(Config config)
+ : frame_source_(
+ config.width,
+ config.height,
+ config.frame_interval_ms * rtc::kNumMicrosecsPerMillisec,
+ config.timestamp_offset_ms * rtc::kNumMicrosecsPerMillisec),
+ task_queue_(std::make_unique<TaskQueueForTest>(
+ "FakePeriodicVideoTrackSource")) {
+ frame_source_.SetRotation(config.rotation);
+
+ TimeDelta frame_interval = TimeDelta::Millis(config.frame_interval_ms);
+ repeating_task_handle_ =
+ RepeatingTaskHandle::Start(task_queue_->Get(), [this, frame_interval] {
+ if (broadcaster_.wants().rotation_applied) {
+ broadcaster_.OnFrame(frame_source_.GetFrameRotationApplied());
+ } else {
+ broadcaster_.OnFrame(frame_source_.GetFrame());
+ }
+ return frame_interval;
+ });
+ }
+
+ rtc::VideoSinkWants wants() const {
+ MutexLock lock(&mutex_);
+ return wants_;
+ }
+
+ void RemoveSink(rtc::VideoSinkInterface<webrtc::VideoFrame>* sink) override {
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ broadcaster_.RemoveSink(sink);
+ }
+
+ void AddOrUpdateSink(rtc::VideoSinkInterface<webrtc::VideoFrame>* sink,
+ const rtc::VideoSinkWants& wants) override {
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ {
+ MutexLock lock(&mutex_);
+ wants_ = wants;
+ }
+ broadcaster_.AddOrUpdateSink(sink, wants);
+ }
+
+ void Stop() {
+ RTC_DCHECK(task_queue_);
+ task_queue_->SendTask([&]() { repeating_task_handle_.Stop(); });
+ task_queue_.reset();
+ }
+
+ private:
+ SequenceChecker thread_checker_{SequenceChecker::kDetached};
+
+ rtc::VideoBroadcaster broadcaster_;
+ cricket::FakeFrameSource frame_source_;
+ mutable Mutex mutex_;
+ rtc::VideoSinkWants wants_ RTC_GUARDED_BY(&mutex_);
+
+ std::unique_ptr<TaskQueueForTest> task_queue_;
+ RepeatingTaskHandle repeating_task_handle_;
+};
+
+} // namespace webrtc
+
+#endif // PC_TEST_FAKE_PERIODIC_VIDEO_SOURCE_H_
diff --git a/third_party/libwebrtc/pc/test/fake_periodic_video_track_source.h b/third_party/libwebrtc/pc/test/fake_periodic_video_track_source.h
new file mode 100644
index 0000000000..f91144d1cc
--- /dev/null
+++ b/third_party/libwebrtc/pc/test/fake_periodic_video_track_source.h
@@ -0,0 +1,46 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_TEST_FAKE_PERIODIC_VIDEO_TRACK_SOURCE_H_
+#define PC_TEST_FAKE_PERIODIC_VIDEO_TRACK_SOURCE_H_
+
+#include "pc/test/fake_periodic_video_source.h"
+#include "pc/video_track_source.h"
+
+namespace webrtc {
+
+// A VideoTrackSource generating frames with configured size and frame interval.
+class FakePeriodicVideoTrackSource : public VideoTrackSource {
+ public:
+ explicit FakePeriodicVideoTrackSource(bool remote)
+ : FakePeriodicVideoTrackSource(FakePeriodicVideoSource::Config(),
+ remote) {}
+
+ FakePeriodicVideoTrackSource(FakePeriodicVideoSource::Config config,
+ bool remote)
+ : VideoTrackSource(remote), source_(config) {}
+
+ ~FakePeriodicVideoTrackSource() = default;
+
+ FakePeriodicVideoSource& fake_periodic_source() { return source_; }
+ const FakePeriodicVideoSource& fake_periodic_source() const {
+ return source_;
+ }
+
+ protected:
+ rtc::VideoSourceInterface<VideoFrame>* source() override { return &source_; }
+
+ private:
+ FakePeriodicVideoSource source_;
+};
+
+} // namespace webrtc
+
+#endif // PC_TEST_FAKE_PERIODIC_VIDEO_TRACK_SOURCE_H_
diff --git a/third_party/libwebrtc/pc/test/fake_rtc_certificate_generator.h b/third_party/libwebrtc/pc/test/fake_rtc_certificate_generator.h
new file mode 100644
index 0000000000..61da26a12f
--- /dev/null
+++ b/third_party/libwebrtc/pc/test/fake_rtc_certificate_generator.h
@@ -0,0 +1,222 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_TEST_FAKE_RTC_CERTIFICATE_GENERATOR_H_
+#define PC_TEST_FAKE_RTC_CERTIFICATE_GENERATOR_H_
+
+#include <string>
+#include <utility>
+
+#include "absl/types/optional.h"
+#include "api/peer_connection_interface.h"
+#include "api/task_queue/task_queue_base.h"
+#include "api/units/time_delta.h"
+#include "rtc_base/rtc_certificate.h"
+#include "rtc_base/rtc_certificate_generator.h"
+
+// RSA with mod size 1024, pub exp 0x10001.
+static const rtc::RTCCertificatePEM kRsaPems[] = {
+ rtc::RTCCertificatePEM(
+ "-----BEGIN RSA PRIVATE KEY-----\n"
+ "MIICdwIBADANBgkqhkiG9w0BAQEFAASCAmEwggJdAgEAAoGBAMYRkbhmI7kVA/rM\n"
+ "czsZ+6JDhDvnkF+vn6yCAGuRPV03zuRqZtDy4N4to7PZu9PjqrRl7nDMXrG3YG9y\n"
+ "rlIAZ72KjcKKFAJxQyAKLCIdawKRyp8RdK3LEySWEZb0AV58IadqPZDTNHHRX8dz\n"
+ "5aTSMsbbkZ+C/OzTnbiMqLL/vg6jAgMBAAECgYAvgOs4FJcgvp+TuREx7YtiYVsH\n"
+ "mwQPTum2z/8VzWGwR8BBHBvIpVe1MbD/Y4seyI2aco/7UaisatSgJhsU46/9Y4fq\n"
+ "2TwXH9QANf4at4d9n/R6rzwpAJOpgwZgKvdQjkfrKTtgLV+/dawvpxUYkRH4JZM1\n"
+ "CVGukMfKNrSVH4Ap4QJBAOJmGV1ASPnB4r4nc99at7JuIJmd7fmuVUwUgYi4XgaR\n"
+ "WhScBsgYwZ/JoywdyZJgnbcrTDuVcWG56B3vXbhdpMsCQQDf9zeJrjnPZ3Cqm79y\n"
+ "kdqANep0uwZciiNiWxsQrCHztywOvbFhdp8iYVFG9EK8DMY41Y5TxUwsHD+67zao\n"
+ "ZNqJAkEA1suLUP/GvL8IwuRneQd2tWDqqRQ/Td3qq03hP7e77XtF/buya3Ghclo5\n"
+ "54czUR89QyVfJEC6278nzA7n2h1uVQJAcG6mztNL6ja/dKZjYZye2CY44QjSlLo0\n"
+ "MTgTSjdfg/28fFn2Jjtqf9Pi/X+50LWI/RcYMC2no606wRk9kyOuIQJBAK6VSAim\n"
+ "1pOEjsYQn0X5KEIrz1G3bfCbB848Ime3U2/FWlCHMr6ch8kCZ5d1WUeJD3LbwMNG\n"
+ "UCXiYxSsu20QNVw=\n"
+ "-----END RSA PRIVATE KEY-----\n",
+ "-----BEGIN CERTIFICATE-----\n"
+ "MIIBmTCCAQKgAwIBAgIEbzBSAjANBgkqhkiG9w0BAQsFADARMQ8wDQYDVQQDEwZX\n"
+ "ZWJSVEMwHhcNMTQwMTAyMTgyNDQ3WhcNMTQwMjAxMTgyNDQ3WjARMQ8wDQYDVQQD\n"
+ "EwZXZWJSVEMwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBAMYRkbhmI7kVA/rM\n"
+ "czsZ+6JDhDvnkF+vn6yCAGuRPV03zuRqZtDy4N4to7PZu9PjqrRl7nDMXrG3YG9y\n"
+ "rlIAZ72KjcKKFAJxQyAKLCIdawKRyp8RdK3LEySWEZb0AV58IadqPZDTNHHRX8dz\n"
+ "5aTSMsbbkZ+C/OzTnbiMqLL/vg6jAgMBAAEwDQYJKoZIhvcNAQELBQADgYEAUflI\n"
+ "VUe5Krqf5RVa5C3u/UTAOAUJBiDS3VANTCLBxjuMsvqOG0WvaYWP3HYPgrz0jXK2\n"
+ "LJE/mGw3MyFHEqi81jh95J+ypl6xKW6Rm8jKLR87gUvCaVYn/Z4/P3AqcQTB7wOv\n"
+ "UD0A8qfhfDM+LK6rPAnCsVN0NRDY3jvd6rzix9M=\n"
+ "-----END CERTIFICATE-----\n"),
+ rtc::RTCCertificatePEM(
+ "-----BEGIN RSA PRIVATE KEY-----\n"
+ "MIICXQIBAAKBgQDeYqlyJ1wuiMsi905e3X81/WA/G3ym50PIDZBVtSwZi7JVQPgj\n"
+ "Bl8CPZMvDh9EwB4Ji9ytA8dZZbQ4WbJWPr73zPpJSCvQqz6sOXSlenBRi72acNaQ\n"
+ "sOR/qPvviJx5I6Hqo4qemfnjZhAW85a5BpgrAwKgMLIQTHCTLWwVSyrDrwIDAQAB\n"
+ "AoGARni9eY8/hv+SX+I+05EdXt6MQXNUbQ+cSykBNCfVccLzIFEWUQMT2IHqwl6X\n"
+ "ShIXcq7/n1QzOAEiuzixauM3YHg4xZ1Um2Ha9a7ig5Xg4v6b43bmMkNE6LkoAtYs\n"
+ "qnQdfMh442b1liDud6IMb1Qk0amt3fSrgRMc547TZQVx4QECQQDxUeDm94r3p4ng\n"
+ "5rCLLC1K5/6HSTZsh7jatKPlz7GfP/IZlYV7iE5784/n0wRiCjZOS7hQRy/8m2Gp\n"
+ "pf4aZq+DAkEA6+np4d36FYikydvUrupLT3FkdRHGn/v83qOll/VmeNh+L1xMZlIP\n"
+ "tM26hAXCcQb7O5+J9y3cx2CAQsBS11ZXZQJAfGgTo76WG9p5UEJdXUInD2jOZPwv\n"
+ "XIATolxh6kXKcijLLLlSmT7KB0inNYIpzkkpee+7U1d/u6B3FriGaSHq9QJBAM/J\n"
+ "ICnDdLCgwNvWVraVQC3BpwSB2pswvCFwq7py94V60XFvbw80Ogc6qIv98qvQxVlX\n"
+ "hJIEgA/PjEi+0ng94Q0CQQDm8XSDby35gmjO+6eRmJtAjtB7nguLvrPXM6CPXRmD\n"
+ "sRoBocpHw6j9UdzZ6qYG0FkdXZghezXFY58ro2BYYRR3\n"
+ "-----END RSA PRIVATE KEY-----\n",
+ "-----BEGIN CERTIFICATE-----\n"
+ "MIICWDCCAcGgAwIBAgIJALgDjxMbBOhbMA0GCSqGSIb3DQEBCwUAMEUxCzAJBgNV\n"
+ "BAYTAkFVMRMwEQYDVQQIDApTb21lLVN0YXRlMSEwHwYDVQQKDBhJbnRlcm5ldCBX\n"
+ "aWRnaXRzIFB0eSBMdGQwHhcNMTUxMTEzMjIzMjEzWhcNMTYxMTEyMjIzMjEzWjBF\n"
+ "MQswCQYDVQQGEwJBVTETMBEGA1UECAwKU29tZS1TdGF0ZTEhMB8GA1UECgwYSW50\n"
+ "ZXJuZXQgV2lkZ2l0cyBQdHkgTHRkMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKB\n"
+ "gQDeYqlyJ1wuiMsi905e3X81/WA/G3ym50PIDZBVtSwZi7JVQPgjBl8CPZMvDh9E\n"
+ "wB4Ji9ytA8dZZbQ4WbJWPr73zPpJSCvQqz6sOXSlenBRi72acNaQsOR/qPvviJx5\n"
+ "I6Hqo4qemfnjZhAW85a5BpgrAwKgMLIQTHCTLWwVSyrDrwIDAQABo1AwTjAdBgNV\n"
+ "HQ4EFgQUx2tbJdlcSTCepn09UdYORXKuSTAwHwYDVR0jBBgwFoAUx2tbJdlcSTCe\n"
+ "pn09UdYORXKuSTAwDAYDVR0TBAUwAwEB/zANBgkqhkiG9w0BAQsFAAOBgQAmp9Id\n"
+ "E716gHMqeBG4S2FCgVFCr0a0ugkaneQAN/c2L9CbMemEN9W6jvucUIVOtYd90dDW\n"
+ "lXuowWmT/JctPe3D2qt4yvYW3puECHk2tVQmrJOZiZiTRtWm6HxkmoUYHYp/DtaS\n"
+ "1Xe29gSTnZtI5sQCrGMzk3SGRSSs7ejLKiVDBQ==\n"
+ "-----END CERTIFICATE-----\n")};
+
+// ECDSA with EC_NIST_P256.
+// These PEM strings were created by generating an identity with
+// `SSLIdentity::Create` and invoking `identity->PrivateKeyToPEMString()`,
+// `identity->PublicKeyToPEMString()` and
+// `identity->certificate().ToPEMString()`.
+static const rtc::RTCCertificatePEM kEcdsaPems[] = {
+ rtc::RTCCertificatePEM(
+ "-----BEGIN PRIVATE KEY-----\n"
+ "MIGHAgEAMBMGByqGSM49AgEGCCqGSM49AwEHBG0wawIBAQQg+qaRsR5uHtqG689M\n"
+ "A3PHSJNeVpyi5wUKCft62h0UWy+hRANCAAS5Mjc85q9fVq4ln+zOPlaEC/Rzj5Pb\n"
+ "MVZtf1x/8k2KsbmyZoAMDX2yer/atEuXmItMe3yd6/DXnvboU//D3Lyt\n"
+ "-----END PRIVATE KEY-----\n",
+ "-----BEGIN CERTIFICATE-----\n"
+ "MIIBFTCBu6ADAgECAgkA30tGY5XG7oowCgYIKoZIzj0EAwIwEDEOMAwGA1UEAwwF\n"
+ "dGVzdDMwHhcNMTYwNTA5MDkxODA4WhcNMTYwNjA5MDkxODA4WjAQMQ4wDAYDVQQD\n"
+ "DAV0ZXN0MzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABLkyNzzmr19WriWf7M4+\n"
+ "VoQL9HOPk9sxVm1/XH/yTYqxubJmgAwNfbJ6v9q0S5eYi0x7fJ3r8Nee9uhT/8Pc\n"
+ "vK0wCgYIKoZIzj0EAwIDSQAwRgIhAIIc3+CqfkZ9lLwTj1PvUtt3KhnqF2kD0War\n"
+ "cCoTBbCxAiEAyp9Cn4vo2ZBhRIVDKyoxmwak8Z0PAVhJAQaWCgoY2D4=\n"
+ "-----END CERTIFICATE-----\n"),
+ rtc::RTCCertificatePEM(
+ "-----BEGIN PRIVATE KEY-----\n"
+ "MIGHAgEAMBMGByqGSM49AgEGCCqGSM49AwEHBG0wawIBAQQghL/G4JRYnuDNbQuh\n"
+ "LqkytcE39Alsq6FItDVFgOesfCmhRANCAATd53FjPLyVUcwYguEPbSJM03fP6Rx5\n"
+ "GY1dEZ00+ZykjJI83VfDAyvmpRuGahNtBH0hc+7xkDCbeo6TM0tN35xr\n"
+ "-----END PRIVATE KEY-----\n",
+ "-----BEGIN CERTIFICATE-----\n"
+ "MIIBFDCBu6ADAgECAgkArZYdXMyJ5rswCgYIKoZIzj0EAwIwEDEOMAwGA1UEAwwF\n"
+ "dGVzdDQwHhcNMTYwNTA5MDkxODA4WhcNMTYwNjA5MDkxODA4WjAQMQ4wDAYDVQQD\n"
+ "DAV0ZXN0NDBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABN3ncWM8vJVRzBiC4Q9t\n"
+ "IkzTd8/pHHkZjV0RnTT5nKSMkjzdV8MDK+alG4ZqE20EfSFz7vGQMJt6jpMzS03f\n"
+ "nGswCgYIKoZIzj0EAwIDSAAwRQIgb/LBc8OtsC5lEDyjCP6M9xt5mwzUNrQBOFWZ\n"
+ "1fE/g68CIQD7uoFfbiq6dTp8ZwzbwQ8jJf08KjriamqA9OW/4268Dw==\n"
+ "-----END CERTIFICATE-----\n")};
+
+class FakeRTCCertificateGenerator
+ : public rtc::RTCCertificateGeneratorInterface {
+ public:
+ FakeRTCCertificateGenerator() : should_fail_(false), should_wait_(false) {}
+
+ void set_should_fail(bool should_fail) { should_fail_ = should_fail; }
+
+ // If set to true, stalls the generation of the fake certificate until it is
+ // set to false.
+ void set_should_wait(bool should_wait) { should_wait_ = should_wait; }
+
+ void use_original_key() { key_index_ = 0; }
+ void use_alternate_key() { key_index_ = 1; }
+
+ int generated_certificates() { return generated_certificates_; }
+ int generated_failures() { return generated_failures_; }
+
+ void GenerateCertificateAsync(const rtc::KeyParams& key_params,
+ const absl::optional<uint64_t>& expires_ms,
+ Callback callback) override {
+ // The certificates are created from constant PEM strings and use its coded
+ // expiration time, we do not support modifying it.
+ RTC_DCHECK(!expires_ms);
+
+ // Only supports RSA-1024-0x10001 and ECDSA-P256.
+ if (key_params.type() == rtc::KT_RSA) {
+ RTC_DCHECK_EQ(key_params.rsa_params().mod_size, 1024);
+ RTC_DCHECK_EQ(key_params.rsa_params().pub_exp, 0x10001);
+ } else {
+ RTC_DCHECK_EQ(key_params.type(), rtc::KT_ECDSA);
+ RTC_DCHECK_EQ(key_params.ec_curve(), rtc::EC_NIST_P256);
+ }
+ rtc::KeyType key_type = key_params.type();
+ webrtc::TaskQueueBase::Current()->PostTask(
+ [this, key_type, callback = std::move(callback)]() mutable {
+ GenerateCertificate(key_type, std::move(callback));
+ });
+ }
+
+ static rtc::scoped_refptr<rtc::RTCCertificate> GenerateCertificate() {
+ switch (rtc::KT_DEFAULT) {
+ case rtc::KT_RSA:
+ return rtc::RTCCertificate::FromPEM(kRsaPems[0]);
+ case rtc::KT_ECDSA:
+ return rtc::RTCCertificate::FromPEM(kEcdsaPems[0]);
+ default:
+ RTC_DCHECK_NOTREACHED();
+ return nullptr;
+ }
+ }
+
+ private:
+ const rtc::RTCCertificatePEM& get_pem(const rtc::KeyType& key_type) const {
+ switch (key_type) {
+ case rtc::KT_RSA:
+ return kRsaPems[key_index_];
+ case rtc::KT_ECDSA:
+ return kEcdsaPems[key_index_];
+ default:
+ RTC_DCHECK_NOTREACHED();
+ return kEcdsaPems[key_index_];
+ }
+ }
+ const std::string& get_key(const rtc::KeyType& key_type) const {
+ return get_pem(key_type).private_key();
+ }
+ const std::string& get_cert(const rtc::KeyType& key_type) const {
+ return get_pem(key_type).certificate();
+ }
+
+ void GenerateCertificate(rtc::KeyType key_type, Callback callback) {
+ // If the certificate generation should be stalled, re-post this same
+ // message to the queue with a small delay so as to wait in a loop until
+ // set_should_wait(false) is called.
+ if (should_wait_) {
+ webrtc::TaskQueueBase::Current()->PostDelayedTask(
+ [this, key_type, callback = std::move(callback)]() mutable {
+ GenerateCertificate(key_type, std::move(callback));
+ },
+ webrtc::TimeDelta::Millis(1));
+ return;
+ }
+ if (should_fail_) {
+ ++generated_failures_;
+ std::move(callback)(nullptr);
+ } else {
+ rtc::scoped_refptr<rtc::RTCCertificate> certificate =
+ rtc::RTCCertificate::FromPEM(get_pem(key_type));
+ RTC_DCHECK(certificate);
+ ++generated_certificates_;
+ std::move(callback)(std::move(certificate));
+ }
+ }
+
+ bool should_fail_;
+ bool should_wait_;
+ int key_index_ = 0;
+ int generated_certificates_ = 0;
+ int generated_failures_ = 0;
+};
+
+#endif // PC_TEST_FAKE_RTC_CERTIFICATE_GENERATOR_H_
diff --git a/third_party/libwebrtc/pc/test/fake_video_track_renderer.h b/third_party/libwebrtc/pc/test/fake_video_track_renderer.h
new file mode 100644
index 0000000000..f6e341b4b2
--- /dev/null
+++ b/third_party/libwebrtc/pc/test/fake_video_track_renderer.h
@@ -0,0 +1,33 @@
+/*
+ * Copyright 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_TEST_FAKE_VIDEO_TRACK_RENDERER_H_
+#define PC_TEST_FAKE_VIDEO_TRACK_RENDERER_H_
+
+#include "api/media_stream_interface.h"
+#include "media/base/fake_video_renderer.h"
+
+namespace webrtc {
+
+class FakeVideoTrackRenderer : public cricket::FakeVideoRenderer {
+ public:
+ explicit FakeVideoTrackRenderer(VideoTrackInterface* video_track)
+ : video_track_(video_track) {
+ video_track_->AddOrUpdateSink(this, rtc::VideoSinkWants());
+ }
+ ~FakeVideoTrackRenderer() { video_track_->RemoveSink(this); }
+
+ private:
+ rtc::scoped_refptr<VideoTrackInterface> video_track_;
+};
+
+} // namespace webrtc
+
+#endif // PC_TEST_FAKE_VIDEO_TRACK_RENDERER_H_
diff --git a/third_party/libwebrtc/pc/test/fake_video_track_source.h b/third_party/libwebrtc/pc/test/fake_video_track_source.h
new file mode 100644
index 0000000000..2042c39175
--- /dev/null
+++ b/third_party/libwebrtc/pc/test/fake_video_track_source.h
@@ -0,0 +1,54 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_TEST_FAKE_VIDEO_TRACK_SOURCE_H_
+#define PC_TEST_FAKE_VIDEO_TRACK_SOURCE_H_
+
+#include "api/media_stream_interface.h"
+#include "media/base/video_broadcaster.h"
+#include "pc/video_track_source.h"
+
+namespace webrtc {
+
+// A minimal implementation of VideoTrackSource. Includes a VideoBroadcaster for
+// injection of frames.
+class FakeVideoTrackSource : public VideoTrackSource {
+ public:
+ static rtc::scoped_refptr<FakeVideoTrackSource> Create(bool is_screencast) {
+ return rtc::make_ref_counted<FakeVideoTrackSource>(is_screencast);
+ }
+
+ static rtc::scoped_refptr<FakeVideoTrackSource> Create() {
+ return Create(false);
+ }
+
+ bool is_screencast() const override { return is_screencast_; }
+
+ void InjectFrame(const VideoFrame& frame) {
+ video_broadcaster_.OnFrame(frame);
+ }
+
+ protected:
+ explicit FakeVideoTrackSource(bool is_screencast)
+ : VideoTrackSource(false /* remote */), is_screencast_(is_screencast) {}
+ ~FakeVideoTrackSource() override = default;
+
+ rtc::VideoSourceInterface<VideoFrame>* source() override {
+ return &video_broadcaster_;
+ }
+
+ private:
+ const bool is_screencast_;
+ rtc::VideoBroadcaster video_broadcaster_;
+};
+
+} // namespace webrtc
+
+#endif // PC_TEST_FAKE_VIDEO_TRACK_SOURCE_H_
diff --git a/third_party/libwebrtc/pc/test/frame_generator_capturer_video_track_source.h b/third_party/libwebrtc/pc/test/frame_generator_capturer_video_track_source.h
new file mode 100644
index 0000000000..79a5b3474a
--- /dev/null
+++ b/third_party/libwebrtc/pc/test/frame_generator_capturer_video_track_source.h
@@ -0,0 +1,92 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_TEST_FRAME_GENERATOR_CAPTURER_VIDEO_TRACK_SOURCE_H_
+#define PC_TEST_FRAME_GENERATOR_CAPTURER_VIDEO_TRACK_SOURCE_H_
+
+#include <memory>
+#include <utility>
+
+#include "api/task_queue/default_task_queue_factory.h"
+#include "api/task_queue/task_queue_factory.h"
+#include "api/test/create_frame_generator.h"
+#include "pc/video_track_source.h"
+#include "test/frame_generator_capturer.h"
+
+namespace webrtc {
+
+// Implements a VideoTrackSourceInterface to be used for creating VideoTracks.
+// The video source is generated using a FrameGeneratorCapturer, specifically
+// a SquareGenerator that generates frames with randomly sized and colored
+// squares.
+class FrameGeneratorCapturerVideoTrackSource : public VideoTrackSource {
+ public:
+ static const int kDefaultFramesPerSecond = 30;
+ static const int kDefaultWidth = 640;
+ static const int kDefaultHeight = 480;
+ static const int kNumSquaresGenerated = 50;
+
+ struct Config {
+ int frames_per_second = kDefaultFramesPerSecond;
+ int width = kDefaultWidth;
+ int height = kDefaultHeight;
+ int num_squares_generated = 50;
+ };
+
+ FrameGeneratorCapturerVideoTrackSource(Config config,
+ Clock* clock,
+ bool is_screencast)
+ : VideoTrackSource(false /* remote */),
+ task_queue_factory_(CreateDefaultTaskQueueFactory()),
+ is_screencast_(is_screencast) {
+ video_capturer_ = std::make_unique<test::FrameGeneratorCapturer>(
+ clock,
+ test::CreateSquareFrameGenerator(config.width, config.height,
+ absl::nullopt,
+ config.num_squares_generated),
+ config.frames_per_second, *task_queue_factory_);
+ video_capturer_->Init();
+ }
+
+ FrameGeneratorCapturerVideoTrackSource(
+ std::unique_ptr<test::FrameGeneratorCapturer> video_capturer,
+ bool is_screencast)
+ : VideoTrackSource(false /* remote */),
+ video_capturer_(std::move(video_capturer)),
+ is_screencast_(is_screencast) {}
+
+ ~FrameGeneratorCapturerVideoTrackSource() = default;
+
+ void Start() {
+ SetState(kLive);
+ video_capturer_->Start();
+ }
+
+ void Stop() {
+ SetState(kMuted);
+ video_capturer_->Stop();
+ }
+
+ bool is_screencast() const override { return is_screencast_; }
+
+ protected:
+ rtc::VideoSourceInterface<VideoFrame>* source() override {
+ return video_capturer_.get();
+ }
+
+ private:
+ const std::unique_ptr<TaskQueueFactory> task_queue_factory_;
+ std::unique_ptr<test::FrameGeneratorCapturer> video_capturer_;
+ const bool is_screencast_;
+};
+
+} // namespace webrtc
+
+#endif // PC_TEST_FRAME_GENERATOR_CAPTURER_VIDEO_TRACK_SOURCE_H_
diff --git a/third_party/libwebrtc/pc/test/integration_test_helpers.cc b/third_party/libwebrtc/pc/test/integration_test_helpers.cc
new file mode 100644
index 0000000000..ede159d744
--- /dev/null
+++ b/third_party/libwebrtc/pc/test/integration_test_helpers.cc
@@ -0,0 +1,97 @@
+/*
+ * Copyright 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/test/integration_test_helpers.h"
+
+namespace webrtc {
+
+PeerConnectionInterface::RTCOfferAnswerOptions IceRestartOfferAnswerOptions() {
+ PeerConnectionInterface::RTCOfferAnswerOptions options;
+ options.ice_restart = true;
+ return options;
+}
+
+void RemoveSsrcsAndMsids(cricket::SessionDescription* desc) {
+ for (ContentInfo& content : desc->contents()) {
+ content.media_description()->mutable_streams().clear();
+ }
+ desc->set_msid_supported(false);
+ desc->set_msid_signaling(0);
+}
+
+void RemoveSsrcsAndKeepMsids(cricket::SessionDescription* desc) {
+ for (ContentInfo& content : desc->contents()) {
+ std::string track_id;
+ std::vector<std::string> stream_ids;
+ if (!content.media_description()->streams().empty()) {
+ const StreamParams& first_stream =
+ content.media_description()->streams()[0];
+ track_id = first_stream.id;
+ stream_ids = first_stream.stream_ids();
+ }
+ content.media_description()->mutable_streams().clear();
+ StreamParams new_stream;
+ new_stream.id = track_id;
+ new_stream.set_stream_ids(stream_ids);
+ content.media_description()->AddStream(new_stream);
+ }
+}
+
+int FindFirstMediaStatsIndexByKind(
+ const std::string& kind,
+ const std::vector<const webrtc::RTCInboundRtpStreamStats*>& inbound_rtps) {
+ for (size_t i = 0; i < inbound_rtps.size(); i++) {
+ if (*inbound_rtps[i]->kind == kind) {
+ return i;
+ }
+ }
+ return -1;
+}
+
+void ReplaceFirstSsrc(StreamParams& stream, uint32_t ssrc) {
+ stream.ssrcs[0] = ssrc;
+ for (auto& group : stream.ssrc_groups) {
+ group.ssrcs[0] = ssrc;
+ }
+}
+
+TaskQueueMetronome::TaskQueueMetronome(TimeDelta tick_period)
+ : tick_period_(tick_period) {}
+
+TaskQueueMetronome::~TaskQueueMetronome() {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+}
+void TaskQueueMetronome::RequestCallOnNextTick(
+ absl::AnyInvocable<void() &&> callback) {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ callbacks_.push_back(std::move(callback));
+ // Only schedule a tick callback for the first `callback` addition.
+ // Schedule on the current task queue to comply with RequestCallOnNextTick
+ // requirements.
+ if (callbacks_.size() == 1) {
+ TaskQueueBase::Current()->PostDelayedTask(
+ SafeTask(safety_.flag(),
+ [this] {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ std::vector<absl::AnyInvocable<void() &&>> callbacks;
+ callbacks_.swap(callbacks);
+ for (auto& callback : callbacks)
+ std::move(callback)();
+ }),
+ tick_period_);
+ }
+}
+
+TimeDelta TaskQueueMetronome::TickPeriod() const {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ return tick_period_;
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/test/integration_test_helpers.h b/third_party/libwebrtc/pc/test/integration_test_helpers.h
new file mode 100644
index 0000000000..36b2111324
--- /dev/null
+++ b/third_party/libwebrtc/pc/test/integration_test_helpers.h
@@ -0,0 +1,1945 @@
+/*
+ * Copyright 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_TEST_INTEGRATION_TEST_HELPERS_H_
+#define PC_TEST_INTEGRATION_TEST_HELPERS_H_
+
+#include <limits.h>
+#include <stdint.h>
+#include <stdio.h>
+
+#include <algorithm>
+#include <functional>
+#include <limits>
+#include <list>
+#include <map>
+#include <memory>
+#include <set>
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "absl/algorithm/container.h"
+#include "absl/memory/memory.h"
+#include "absl/strings/string_view.h"
+#include "absl/types/optional.h"
+#include "api/audio_options.h"
+#include "api/call/call_factory_interface.h"
+#include "api/candidate.h"
+#include "api/crypto/crypto_options.h"
+#include "api/data_channel_interface.h"
+#include "api/field_trials_view.h"
+#include "api/ice_transport_interface.h"
+#include "api/jsep.h"
+#include "api/media_stream_interface.h"
+#include "api/media_types.h"
+#include "api/peer_connection_interface.h"
+#include "api/rtc_error.h"
+#include "api/rtc_event_log/rtc_event_log_factory.h"
+#include "api/rtc_event_log/rtc_event_log_factory_interface.h"
+#include "api/rtc_event_log_output.h"
+#include "api/rtp_receiver_interface.h"
+#include "api/rtp_sender_interface.h"
+#include "api/rtp_transceiver_interface.h"
+#include "api/scoped_refptr.h"
+#include "api/stats/rtc_stats.h"
+#include "api/stats/rtc_stats_report.h"
+#include "api/stats/rtcstats_objects.h"
+#include "api/task_queue/default_task_queue_factory.h"
+#include "api/task_queue/pending_task_safety_flag.h"
+#include "api/task_queue/task_queue_factory.h"
+#include "api/test/mock_async_dns_resolver.h"
+#include "api/transport/field_trial_based_config.h"
+#include "api/uma_metrics.h"
+#include "api/units/time_delta.h"
+#include "api/video/video_rotation.h"
+#include "api/video_codecs/sdp_video_format.h"
+#include "api/video_codecs/video_decoder_factory.h"
+#include "api/video_codecs/video_encoder_factory.h"
+#include "call/call.h"
+#include "logging/rtc_event_log/fake_rtc_event_log_factory.h"
+#include "media/base/media_engine.h"
+#include "media/base/stream_params.h"
+#include "media/engine/fake_webrtc_video_engine.h"
+#include "media/engine/webrtc_media_engine.h"
+#include "media/engine/webrtc_media_engine_defaults.h"
+#include "modules/audio_device/include/audio_device.h"
+#include "modules/audio_processing/include/audio_processing.h"
+#include "modules/audio_processing/test/audio_processing_builder_for_testing.h"
+#include "p2p/base/fake_ice_transport.h"
+#include "p2p/base/ice_transport_internal.h"
+#include "p2p/base/mock_async_resolver.h"
+#include "p2p/base/p2p_constants.h"
+#include "p2p/base/port.h"
+#include "p2p/base/port_allocator.h"
+#include "p2p/base/port_interface.h"
+#include "p2p/base/test_stun_server.h"
+#include "p2p/base/test_turn_customizer.h"
+#include "p2p/base/test_turn_server.h"
+#include "p2p/client/basic_port_allocator.h"
+#include "pc/dtmf_sender.h"
+#include "pc/local_audio_source.h"
+#include "pc/media_session.h"
+#include "pc/peer_connection.h"
+#include "pc/peer_connection_factory.h"
+#include "pc/peer_connection_proxy.h"
+#include "pc/rtp_media_utils.h"
+#include "pc/session_description.h"
+#include "pc/test/fake_audio_capture_module.h"
+#include "pc/test/fake_periodic_video_source.h"
+#include "pc/test/fake_periodic_video_track_source.h"
+#include "pc/test/fake_rtc_certificate_generator.h"
+#include "pc/test/fake_video_track_renderer.h"
+#include "pc/test/mock_peer_connection_observers.h"
+#include "pc/video_track_source.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/event.h"
+#include "rtc_base/fake_clock.h"
+#include "rtc_base/fake_mdns_responder.h"
+#include "rtc_base/fake_network.h"
+#include "rtc_base/firewall_socket_server.h"
+#include "rtc_base/gunit.h"
+#include "rtc_base/helpers.h"
+#include "rtc_base/ip_address.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/mdns_responder_interface.h"
+#include "rtc_base/numerics/safe_conversions.h"
+#include "rtc_base/rtc_certificate_generator.h"
+#include "rtc_base/socket_address.h"
+#include "rtc_base/ssl_stream_adapter.h"
+#include "rtc_base/task_queue_for_test.h"
+#include "rtc_base/task_utils/repeating_task.h"
+#include "rtc_base/test_certificate_verifier.h"
+#include "rtc_base/thread.h"
+#include "rtc_base/thread_annotations.h"
+#include "rtc_base/time_utils.h"
+#include "rtc_base/virtual_socket_server.h"
+#include "system_wrappers/include/metrics.h"
+#include "test/gmock.h"
+#include "test/scoped_key_value_config.h"
+
+namespace webrtc {
+
+using ::cricket::ContentInfo;
+using ::cricket::StreamParams;
+using ::rtc::SocketAddress;
+using ::testing::_;
+using ::testing::Combine;
+using ::testing::Contains;
+using ::testing::DoAll;
+using ::testing::ElementsAre;
+using ::testing::InvokeArgument;
+using ::testing::NiceMock;
+using ::testing::Return;
+using ::testing::SetArgPointee;
+using ::testing::UnorderedElementsAreArray;
+using ::testing::Values;
+using RTCConfiguration = PeerConnectionInterface::RTCConfiguration;
+
+static const int kDefaultTimeout = 10000;
+static const int kMaxWaitForStatsMs = 3000;
+static const int kMaxWaitForActivationMs = 5000;
+static const int kMaxWaitForFramesMs = 10000;
+// Default number of audio/video frames to wait for before considering a test
+// successful.
+static const int kDefaultExpectedAudioFrameCount = 3;
+static const int kDefaultExpectedVideoFrameCount = 3;
+
+static const char kDataChannelLabel[] = "data_channel";
+
+// SRTP cipher name negotiated by the tests. This must be updated if the
+// default changes.
+static const int kDefaultSrtpCryptoSuite = rtc::kSrtpAes128CmSha1_80;
+static const int kDefaultSrtpCryptoSuiteGcm = rtc::kSrtpAeadAes256Gcm;
+
+static const SocketAddress kDefaultLocalAddress("192.168.1.1", 0);
+
+// Helper function for constructing offer/answer options to initiate an ICE
+// restart.
+PeerConnectionInterface::RTCOfferAnswerOptions IceRestartOfferAnswerOptions();
+
+// Remove all stream information (SSRCs, track IDs, etc.) and "msid-semantic"
+// attribute from received SDP, simulating a legacy endpoint.
+void RemoveSsrcsAndMsids(cricket::SessionDescription* desc);
+
+// Removes all stream information besides the stream ids, simulating an
+// endpoint that only signals a=msid lines to convey stream_ids.
+void RemoveSsrcsAndKeepMsids(cricket::SessionDescription* desc);
+
+// Replaces the stream's primary SSRC and updates the first SSRC of all
+// ssrc-groups.
+void ReplaceFirstSsrc(StreamParams& stream, uint32_t ssrc);
+
+int FindFirstMediaStatsIndexByKind(
+ const std::string& kind,
+ const std::vector<const webrtc::RTCInboundRtpStreamStats*>& inbound_rtps);
+
+class TaskQueueMetronome : public webrtc::Metronome {
+ public:
+ explicit TaskQueueMetronome(TimeDelta tick_period);
+ ~TaskQueueMetronome() override;
+
+ // webrtc::Metronome implementation.
+ void RequestCallOnNextTick(absl::AnyInvocable<void() &&> callback) override;
+ TimeDelta TickPeriod() const override;
+
+ private:
+ const TimeDelta tick_period_;
+ SequenceChecker sequence_checker_{SequenceChecker::kDetached};
+ std::vector<absl::AnyInvocable<void() &&>> callbacks_;
+ ScopedTaskSafetyDetached safety_;
+};
+
+class SignalingMessageReceiver {
+ public:
+ virtual void ReceiveSdpMessage(SdpType type, const std::string& msg) = 0;
+ virtual void ReceiveIceMessage(const std::string& sdp_mid,
+ int sdp_mline_index,
+ const std::string& msg) = 0;
+
+ protected:
+ SignalingMessageReceiver() {}
+ virtual ~SignalingMessageReceiver() {}
+};
+
+class MockRtpReceiverObserver : public webrtc::RtpReceiverObserverInterface {
+ public:
+ explicit MockRtpReceiverObserver(cricket::MediaType media_type)
+ : expected_media_type_(media_type) {}
+
+ void OnFirstPacketReceived(cricket::MediaType media_type) override {
+ ASSERT_EQ(expected_media_type_, media_type);
+ first_packet_received_ = true;
+ }
+
+ bool first_packet_received() const { return first_packet_received_; }
+
+ virtual ~MockRtpReceiverObserver() {}
+
+ private:
+ bool first_packet_received_ = false;
+ cricket::MediaType expected_media_type_;
+};
+
+// Helper class that wraps a peer connection, observes it, and can accept
+// signaling messages from another wrapper.
+//
+// Uses a fake network, fake A/V capture, and optionally fake
+// encoders/decoders, though they aren't used by default since they don't
+// advertise support of any codecs.
+// TODO(steveanton): See how this could become a subclass of
+// PeerConnectionWrapper defined in peerconnectionwrapper.h.
+class PeerConnectionIntegrationWrapper : public webrtc::PeerConnectionObserver,
+ public SignalingMessageReceiver {
+ public:
+ webrtc::PeerConnectionFactoryInterface* pc_factory() const {
+ return peer_connection_factory_.get();
+ }
+
+ webrtc::PeerConnectionInterface* pc() const { return peer_connection_.get(); }
+
+ // If a signaling message receiver is set (via ConnectFakeSignaling), this
+ // will set the whole offer/answer exchange in motion. Just need to wait for
+ // the signaling state to reach "stable".
+ void CreateAndSetAndSignalOffer() {
+ auto offer = CreateOfferAndWait();
+ ASSERT_NE(nullptr, offer);
+ EXPECT_TRUE(SetLocalDescriptionAndSendSdpMessage(std::move(offer)));
+ }
+
+ // Sets the options to be used when CreateAndSetAndSignalOffer is called, or
+ // when a remote offer is received (via fake signaling) and an answer is
+ // generated. By default, uses default options.
+ void SetOfferAnswerOptions(
+ const PeerConnectionInterface::RTCOfferAnswerOptions& options) {
+ offer_answer_options_ = options;
+ }
+
+ // Set a callback to be invoked when SDP is received via the fake signaling
+ // channel, which provides an opportunity to munge (modify) the SDP. This is
+ // used to test SDP being applied that a PeerConnection would normally not
+ // generate, but a non-JSEP endpoint might.
+ void SetReceivedSdpMunger(
+ std::function<void(cricket::SessionDescription*)> munger) {
+ received_sdp_munger_ = std::move(munger);
+ }
+
+ // Similar to the above, but this is run on SDP immediately after it's
+ // generated.
+ void SetGeneratedSdpMunger(
+ std::function<void(cricket::SessionDescription*)> munger) {
+ generated_sdp_munger_ = std::move(munger);
+ }
+
+ // Set a callback to be invoked when a remote offer is received via the fake
+ // signaling channel. This provides an opportunity to change the
+ // PeerConnection state before an answer is created and sent to the caller.
+ void SetRemoteOfferHandler(std::function<void()> handler) {
+ remote_offer_handler_ = std::move(handler);
+ }
+
+ void SetRemoteAsyncResolver(MockAsyncDnsResolver* resolver) {
+ remote_async_dns_resolver_ = resolver;
+ }
+
+ // Every ICE connection state in order that has been seen by the observer.
+ std::vector<PeerConnectionInterface::IceConnectionState>
+ ice_connection_state_history() const {
+ return ice_connection_state_history_;
+ }
+ void clear_ice_connection_state_history() {
+ ice_connection_state_history_.clear();
+ }
+
+ // Every standardized ICE connection state in order that has been seen by the
+ // observer.
+ std::vector<PeerConnectionInterface::IceConnectionState>
+ standardized_ice_connection_state_history() const {
+ return standardized_ice_connection_state_history_;
+ }
+
+ // Every PeerConnection state in order that has been seen by the observer.
+ std::vector<PeerConnectionInterface::PeerConnectionState>
+ peer_connection_state_history() const {
+ return peer_connection_state_history_;
+ }
+
+ // Every ICE gathering state in order that has been seen by the observer.
+ std::vector<PeerConnectionInterface::IceGatheringState>
+ ice_gathering_state_history() const {
+ return ice_gathering_state_history_;
+ }
+ std::vector<cricket::CandidatePairChangeEvent>
+ ice_candidate_pair_change_history() const {
+ return ice_candidate_pair_change_history_;
+ }
+
+ // Every PeerConnection signaling state in order that has been seen by the
+ // observer.
+ std::vector<PeerConnectionInterface::SignalingState>
+ peer_connection_signaling_state_history() const {
+ return peer_connection_signaling_state_history_;
+ }
+
+ void AddAudioVideoTracks() {
+ AddAudioTrack();
+ AddVideoTrack();
+ }
+
+ rtc::scoped_refptr<RtpSenderInterface> AddAudioTrack() {
+ return AddTrack(CreateLocalAudioTrack());
+ }
+
+ rtc::scoped_refptr<RtpSenderInterface> AddVideoTrack() {
+ return AddTrack(CreateLocalVideoTrack());
+ }
+
+ rtc::scoped_refptr<webrtc::AudioTrackInterface> CreateLocalAudioTrack() {
+ cricket::AudioOptions options;
+ // Disable highpass filter so that we can get all the test audio frames.
+ options.highpass_filter = false;
+ rtc::scoped_refptr<webrtc::AudioSourceInterface> source =
+ peer_connection_factory_->CreateAudioSource(options);
+ // TODO(perkj): Test audio source when it is implemented. Currently audio
+ // always use the default input.
+ return peer_connection_factory_->CreateAudioTrack(rtc::CreateRandomUuid(),
+ source.get());
+ }
+
+ rtc::scoped_refptr<webrtc::VideoTrackInterface> CreateLocalVideoTrack() {
+ webrtc::FakePeriodicVideoSource::Config config;
+ config.timestamp_offset_ms = rtc::TimeMillis();
+ return CreateLocalVideoTrackInternal(config);
+ }
+
+ rtc::scoped_refptr<webrtc::VideoTrackInterface>
+ CreateLocalVideoTrackWithConfig(
+ webrtc::FakePeriodicVideoSource::Config config) {
+ return CreateLocalVideoTrackInternal(config);
+ }
+
+ rtc::scoped_refptr<webrtc::VideoTrackInterface>
+ CreateLocalVideoTrackWithRotation(webrtc::VideoRotation rotation) {
+ webrtc::FakePeriodicVideoSource::Config config;
+ config.rotation = rotation;
+ config.timestamp_offset_ms = rtc::TimeMillis();
+ return CreateLocalVideoTrackInternal(config);
+ }
+
+ rtc::scoped_refptr<RtpSenderInterface> AddTrack(
+ rtc::scoped_refptr<MediaStreamTrackInterface> track,
+ const std::vector<std::string>& stream_ids = {}) {
+ EXPECT_TRUE(track);
+ if (!track) {
+ return nullptr;
+ }
+ auto result = pc()->AddTrack(track, stream_ids);
+ EXPECT_EQ(RTCErrorType::NONE, result.error().type());
+ if (result.ok()) {
+ return result.MoveValue();
+ } else {
+ return nullptr;
+ }
+ }
+
+ std::vector<rtc::scoped_refptr<RtpReceiverInterface>> GetReceiversOfType(
+ cricket::MediaType media_type) {
+ std::vector<rtc::scoped_refptr<RtpReceiverInterface>> receivers;
+ for (const auto& receiver : pc()->GetReceivers()) {
+ if (receiver->media_type() == media_type) {
+ receivers.push_back(receiver);
+ }
+ }
+ return receivers;
+ }
+
+ rtc::scoped_refptr<RtpTransceiverInterface> GetFirstTransceiverOfType(
+ cricket::MediaType media_type) {
+ for (auto transceiver : pc()->GetTransceivers()) {
+ if (transceiver->receiver()->media_type() == media_type) {
+ return transceiver;
+ }
+ }
+ return nullptr;
+ }
+
+ bool SignalingStateStable() {
+ return pc()->signaling_state() == webrtc::PeerConnectionInterface::kStable;
+ }
+
+ bool IceGatheringStateComplete() {
+ return pc()->ice_gathering_state() ==
+ webrtc::PeerConnectionInterface::kIceGatheringComplete;
+ }
+
+ void CreateDataChannel() { CreateDataChannel(nullptr); }
+
+ void CreateDataChannel(const webrtc::DataChannelInit* init) {
+ CreateDataChannel(kDataChannelLabel, init);
+ }
+
+ void CreateDataChannel(const std::string& label,
+ const webrtc::DataChannelInit* init) {
+ auto data_channel_or_error = pc()->CreateDataChannelOrError(label, init);
+ ASSERT_TRUE(data_channel_or_error.ok());
+ data_channels_.push_back(data_channel_or_error.MoveValue());
+ ASSERT_TRUE(data_channels_.back().get() != nullptr);
+ data_observers_.push_back(
+ std::make_unique<MockDataChannelObserver>(data_channels_.back().get()));
+ }
+
+ // Return the last observed data channel.
+ DataChannelInterface* data_channel() {
+ if (data_channels_.size() == 0) {
+ return nullptr;
+ }
+ return data_channels_.back().get();
+ }
+ // Return all data channels.
+ std::vector<rtc::scoped_refptr<DataChannelInterface>>& data_channels() {
+ return data_channels_;
+ }
+
+ const MockDataChannelObserver* data_observer() const {
+ if (data_observers_.size() == 0) {
+ return nullptr;
+ }
+ return data_observers_.back().get();
+ }
+
+ std::vector<std::unique_ptr<MockDataChannelObserver>>& data_observers() {
+ return data_observers_;
+ }
+
+ int audio_frames_received() const {
+ return fake_audio_capture_module_->frames_received();
+ }
+
+ // Takes minimum of video frames received for each track.
+ //
+ // Can be used like:
+ // EXPECT_GE(expected_frames, min_video_frames_received_per_track());
+ //
+ // To ensure that all video tracks received at least a certain number of
+ // frames.
+ int min_video_frames_received_per_track() const {
+ int min_frames = INT_MAX;
+ if (fake_video_renderers_.empty()) {
+ return 0;
+ }
+
+ for (const auto& pair : fake_video_renderers_) {
+ min_frames = std::min(min_frames, pair.second->num_rendered_frames());
+ }
+ return min_frames;
+ }
+
+ // Returns a MockStatsObserver in a state after stats gathering finished,
+ // which can be used to access the gathered stats.
+ rtc::scoped_refptr<MockStatsObserver> OldGetStatsForTrack(
+ webrtc::MediaStreamTrackInterface* track) {
+ auto observer = rtc::make_ref_counted<MockStatsObserver>();
+ EXPECT_TRUE(peer_connection_->GetStats(
+ observer.get(), nullptr,
+ PeerConnectionInterface::kStatsOutputLevelStandard));
+ EXPECT_TRUE_WAIT(observer->called(), kDefaultTimeout);
+ return observer;
+ }
+
+ // Version that doesn't take a track "filter", and gathers all stats.
+ rtc::scoped_refptr<MockStatsObserver> OldGetStats() {
+ return OldGetStatsForTrack(nullptr);
+ }
+
+ // Synchronously gets stats and returns them. If it times out, fails the test
+ // and returns null.
+ rtc::scoped_refptr<const webrtc::RTCStatsReport> NewGetStats() {
+ auto callback =
+ rtc::make_ref_counted<webrtc::MockRTCStatsCollectorCallback>();
+ peer_connection_->GetStats(callback.get());
+ EXPECT_TRUE_WAIT(callback->called(), kDefaultTimeout);
+ return callback->report();
+ }
+
+ int rendered_width() {
+ EXPECT_FALSE(fake_video_renderers_.empty());
+ return fake_video_renderers_.empty()
+ ? 0
+ : fake_video_renderers_.begin()->second->width();
+ }
+
+ int rendered_height() {
+ EXPECT_FALSE(fake_video_renderers_.empty());
+ return fake_video_renderers_.empty()
+ ? 0
+ : fake_video_renderers_.begin()->second->height();
+ }
+
+ double rendered_aspect_ratio() {
+ if (rendered_height() == 0) {
+ return 0.0;
+ }
+ return static_cast<double>(rendered_width()) / rendered_height();
+ }
+
+ webrtc::VideoRotation rendered_rotation() {
+ EXPECT_FALSE(fake_video_renderers_.empty());
+ return fake_video_renderers_.empty()
+ ? webrtc::kVideoRotation_0
+ : fake_video_renderers_.begin()->second->rotation();
+ }
+
+ int local_rendered_width() {
+ return local_video_renderer_ ? local_video_renderer_->width() : 0;
+ }
+
+ int local_rendered_height() {
+ return local_video_renderer_ ? local_video_renderer_->height() : 0;
+ }
+
+ double local_rendered_aspect_ratio() {
+ if (local_rendered_height() == 0) {
+ return 0.0;
+ }
+ return static_cast<double>(local_rendered_width()) /
+ local_rendered_height();
+ }
+
+ size_t number_of_remote_streams() {
+ if (!pc()) {
+ return 0;
+ }
+ return pc()->remote_streams()->count();
+ }
+
+ StreamCollectionInterface* remote_streams() const {
+ if (!pc()) {
+ ADD_FAILURE();
+ return nullptr;
+ }
+ return pc()->remote_streams().get();
+ }
+
+ StreamCollectionInterface* local_streams() {
+ if (!pc()) {
+ ADD_FAILURE();
+ return nullptr;
+ }
+ return pc()->local_streams().get();
+ }
+
+ webrtc::PeerConnectionInterface::SignalingState signaling_state() {
+ return pc()->signaling_state();
+ }
+
+ webrtc::PeerConnectionInterface::IceConnectionState ice_connection_state() {
+ return pc()->ice_connection_state();
+ }
+
+ webrtc::PeerConnectionInterface::IceConnectionState
+ standardized_ice_connection_state() {
+ return pc()->standardized_ice_connection_state();
+ }
+
+ webrtc::PeerConnectionInterface::IceGatheringState ice_gathering_state() {
+ return pc()->ice_gathering_state();
+ }
+
+ // Returns a MockRtpReceiverObserver for each RtpReceiver returned by
+ // GetReceivers. They're updated automatically when a remote offer/answer
+ // from the fake signaling channel is applied, or when
+ // ResetRtpReceiverObservers below is called.
+ const std::vector<std::unique_ptr<MockRtpReceiverObserver>>&
+ rtp_receiver_observers() {
+ return rtp_receiver_observers_;
+ }
+
+ void ResetRtpReceiverObservers() {
+ rtp_receiver_observers_.clear();
+ for (const rtc::scoped_refptr<RtpReceiverInterface>& receiver :
+ pc()->GetReceivers()) {
+ std::unique_ptr<MockRtpReceiverObserver> observer(
+ new MockRtpReceiverObserver(receiver->media_type()));
+ receiver->SetObserver(observer.get());
+ rtp_receiver_observers_.push_back(std::move(observer));
+ }
+ }
+
+ rtc::FakeNetworkManager* network_manager() const {
+ return fake_network_manager_.get();
+ }
+ cricket::PortAllocator* port_allocator() const { return port_allocator_; }
+
+ webrtc::FakeRtcEventLogFactory* event_log_factory() const {
+ return event_log_factory_;
+ }
+
+ const cricket::Candidate& last_candidate_gathered() const {
+ return last_candidate_gathered_;
+ }
+ const cricket::IceCandidateErrorEvent& error_event() const {
+ return error_event_;
+ }
+
+ // Sets the mDNS responder for the owned fake network manager and keeps a
+ // reference to the responder.
+ void SetMdnsResponder(
+ std::unique_ptr<webrtc::FakeMdnsResponder> mdns_responder) {
+ RTC_DCHECK(mdns_responder != nullptr);
+ mdns_responder_ = mdns_responder.get();
+ network_manager()->set_mdns_responder(std::move(mdns_responder));
+ }
+
+ // Returns null on failure.
+ std::unique_ptr<SessionDescriptionInterface> CreateOfferAndWait() {
+ auto observer =
+ rtc::make_ref_counted<MockCreateSessionDescriptionObserver>();
+ pc()->CreateOffer(observer.get(), offer_answer_options_);
+ return WaitForDescriptionFromObserver(observer.get());
+ }
+ bool Rollback() {
+ return SetRemoteDescription(
+ webrtc::CreateSessionDescription(SdpType::kRollback, ""));
+ }
+
+ // Functions for querying stats.
+ void StartWatchingDelayStats() {
+ // Get the baseline numbers for audio_packets and audio_delay.
+ auto received_stats = NewGetStats();
+ auto rtp_stats =
+ received_stats->GetStatsOfType<webrtc::RTCInboundRtpStreamStats>()[0];
+ ASSERT_TRUE(rtp_stats->relative_packet_arrival_delay.is_defined());
+ ASSERT_TRUE(rtp_stats->packets_received.is_defined());
+ rtp_stats_id_ = rtp_stats->id();
+ audio_packets_stat_ = *rtp_stats->packets_received;
+ audio_delay_stat_ = *rtp_stats->relative_packet_arrival_delay;
+ audio_samples_stat_ = *rtp_stats->total_samples_received;
+ audio_concealed_stat_ = *rtp_stats->concealed_samples;
+ }
+
+ void UpdateDelayStats(std::string tag, int desc_size) {
+ auto report = NewGetStats();
+ auto rtp_stats =
+ report->GetAs<webrtc::RTCInboundRtpStreamStats>(rtp_stats_id_);
+ ASSERT_TRUE(rtp_stats);
+ auto delta_packets = *rtp_stats->packets_received - audio_packets_stat_;
+ auto delta_rpad =
+ *rtp_stats->relative_packet_arrival_delay - audio_delay_stat_;
+ auto recent_delay = delta_packets > 0 ? delta_rpad / delta_packets : -1;
+ // The purpose of these checks is to sound the alarm early if we introduce
+ // serious regressions. The numbers are not acceptable for production, but
+ // occur on slow bots.
+ //
+ // An average relative packet arrival delay over the renegotiation of
+ // > 100 ms indicates that something is dramatically wrong, and will impact
+ // quality for sure.
+ // Worst bots:
+ // linux_x86_dbg at 0.206
+#if !defined(NDEBUG)
+ EXPECT_GT(0.25, recent_delay) << tag << " size " << desc_size;
+#else
+ EXPECT_GT(0.1, recent_delay) << tag << " size " << desc_size;
+#endif
+ auto delta_samples =
+ *rtp_stats->total_samples_received - audio_samples_stat_;
+ auto delta_concealed =
+ *rtp_stats->concealed_samples - audio_concealed_stat_;
+ // These limits should be adjusted down as we improve:
+ //
+ // Concealing more than 4000 samples during a renegotiation is unacceptable.
+ // But some bots are slow.
+
+ // Worst bots:
+ // linux_more_configs bot at conceal count 5184
+ // android_arm_rel at conceal count 9241
+ // linux_x86_dbg at 15174
+#if !defined(NDEBUG)
+ EXPECT_GT(18000U, delta_concealed) << "Concealed " << delta_concealed
+ << " of " << delta_samples << " samples";
+#else
+ EXPECT_GT(15000U, delta_concealed) << "Concealed " << delta_concealed
+ << " of " << delta_samples << " samples";
+#endif
+ // Concealing more than 20% of samples during a renegotiation is
+ // unacceptable.
+ // Worst bots:
+ // Nondebug: Linux32 Release at conceal rate 0.606597 (CI run)
+ // Debug: linux_x86_dbg bot at conceal rate 0.854
+ // internal bot at conceal rate 0.967 (b/294020344)
+ // TODO(https://crbug.com/webrtc/15393): Improve audio quality during
+ // renegotiation so that we can reduce these thresholds, 99% is not even
+ // close to the 20% deemed unacceptable above or the 0% that would be ideal.
+ if (delta_samples > 0) {
+#if !defined(NDEBUG)
+ EXPECT_LT(1.0 * delta_concealed / delta_samples, 0.99)
+ << "Concealed " << delta_concealed << " of " << delta_samples
+ << " samples";
+#else
+ EXPECT_LT(1.0 * delta_concealed / delta_samples, 0.7)
+ << "Concealed " << delta_concealed << " of " << delta_samples
+ << " samples";
+#endif
+ }
+ // Increment trailing counters
+ audio_packets_stat_ = *rtp_stats->packets_received;
+ audio_delay_stat_ = *rtp_stats->relative_packet_arrival_delay;
+ audio_samples_stat_ = *rtp_stats->total_samples_received;
+ audio_concealed_stat_ = *rtp_stats->concealed_samples;
+ }
+
+ // Sets number of candidates expected
+ void ExpectCandidates(int candidate_count) {
+ candidates_expected_ = candidate_count;
+ }
+
+ private:
+ // Constructor used by friend class PeerConnectionIntegrationBaseTest.
+ explicit PeerConnectionIntegrationWrapper(const std::string& debug_name)
+ : debug_name_(debug_name) {}
+
+ bool Init(const PeerConnectionFactory::Options* options,
+ const PeerConnectionInterface::RTCConfiguration* config,
+ webrtc::PeerConnectionDependencies dependencies,
+ rtc::SocketServer* socket_server,
+ rtc::Thread* network_thread,
+ rtc::Thread* worker_thread,
+ std::unique_ptr<webrtc::FakeRtcEventLogFactory> event_log_factory,
+ bool reset_encoder_factory,
+ bool reset_decoder_factory,
+ bool create_media_engine) {
+ // There's an error in this test code if Init ends up being called twice.
+ RTC_DCHECK(!peer_connection_);
+ RTC_DCHECK(!peer_connection_factory_);
+
+ fake_network_manager_.reset(new rtc::FakeNetworkManager());
+ fake_network_manager_->AddInterface(kDefaultLocalAddress);
+
+ socket_factory_.reset(new rtc::BasicPacketSocketFactory(socket_server));
+
+ std::unique_ptr<cricket::PortAllocator> port_allocator(
+ new cricket::BasicPortAllocator(fake_network_manager_.get(),
+ socket_factory_.get()));
+ port_allocator_ = port_allocator.get();
+ fake_audio_capture_module_ = FakeAudioCaptureModule::Create();
+ if (!fake_audio_capture_module_) {
+ return false;
+ }
+ rtc::Thread* const signaling_thread = rtc::Thread::Current();
+
+ webrtc::PeerConnectionFactoryDependencies pc_factory_dependencies;
+ pc_factory_dependencies.network_thread = network_thread;
+ pc_factory_dependencies.worker_thread = worker_thread;
+ pc_factory_dependencies.signaling_thread = signaling_thread;
+ pc_factory_dependencies.task_queue_factory =
+ webrtc::CreateDefaultTaskQueueFactory();
+ pc_factory_dependencies.trials = std::make_unique<FieldTrialBasedConfig>();
+ pc_factory_dependencies.metronome =
+ std::make_unique<TaskQueueMetronome>(TimeDelta::Millis(8));
+ cricket::MediaEngineDependencies media_deps;
+ media_deps.task_queue_factory =
+ pc_factory_dependencies.task_queue_factory.get();
+ media_deps.adm = fake_audio_capture_module_;
+ webrtc::SetMediaEngineDefaults(&media_deps);
+
+ if (reset_encoder_factory) {
+ media_deps.video_encoder_factory.reset();
+ }
+ if (reset_decoder_factory) {
+ media_deps.video_decoder_factory.reset();
+ }
+
+ if (!media_deps.audio_processing) {
+ // If the standard Creation method for APM returns a null pointer, instead
+ // use the builder for testing to create an APM object.
+ media_deps.audio_processing = AudioProcessingBuilderForTesting().Create();
+ }
+
+ media_deps.trials = pc_factory_dependencies.trials.get();
+
+ if (create_media_engine) {
+ pc_factory_dependencies.media_engine =
+ cricket::CreateMediaEngine(std::move(media_deps));
+ }
+ pc_factory_dependencies.call_factory = webrtc::CreateCallFactory();
+ if (event_log_factory) {
+ event_log_factory_ = event_log_factory.get();
+ pc_factory_dependencies.event_log_factory = std::move(event_log_factory);
+ } else {
+ pc_factory_dependencies.event_log_factory =
+ std::make_unique<webrtc::RtcEventLogFactory>(
+ pc_factory_dependencies.task_queue_factory.get());
+ }
+ peer_connection_factory_ = webrtc::CreateModularPeerConnectionFactory(
+ std::move(pc_factory_dependencies));
+
+ if (!peer_connection_factory_) {
+ return false;
+ }
+ if (options) {
+ peer_connection_factory_->SetOptions(*options);
+ }
+ if (config) {
+ sdp_semantics_ = config->sdp_semantics;
+ }
+
+ dependencies.allocator = std::move(port_allocator);
+ peer_connection_ = CreatePeerConnection(config, std::move(dependencies));
+ return peer_connection_.get() != nullptr;
+ }
+
+ rtc::scoped_refptr<webrtc::PeerConnectionInterface> CreatePeerConnection(
+ const PeerConnectionInterface::RTCConfiguration* config,
+ webrtc::PeerConnectionDependencies dependencies) {
+ PeerConnectionInterface::RTCConfiguration modified_config;
+ modified_config.sdp_semantics = sdp_semantics_;
+ // If `config` is null, this will result in a default configuration being
+ // used.
+ if (config) {
+ modified_config = *config;
+ }
+ // Disable resolution adaptation; we don't want it interfering with the
+ // test results.
+ // TODO(deadbeef): Do something more robust. Since we're testing for aspect
+ // ratios and not specific resolutions, is this even necessary?
+ modified_config.set_cpu_adaptation(false);
+
+ dependencies.observer = this;
+ auto peer_connection_or_error =
+ peer_connection_factory_->CreatePeerConnectionOrError(
+ modified_config, std::move(dependencies));
+ return peer_connection_or_error.ok() ? peer_connection_or_error.MoveValue()
+ : nullptr;
+ }
+
+ void set_signaling_message_receiver(
+ SignalingMessageReceiver* signaling_message_receiver) {
+ signaling_message_receiver_ = signaling_message_receiver;
+ }
+
+ void set_signaling_delay_ms(int delay_ms) { signaling_delay_ms_ = delay_ms; }
+
+ void set_signal_ice_candidates(bool signal) {
+ signal_ice_candidates_ = signal;
+ }
+
+ rtc::scoped_refptr<webrtc::VideoTrackInterface> CreateLocalVideoTrackInternal(
+ webrtc::FakePeriodicVideoSource::Config config) {
+ // Set max frame rate to 10fps to reduce the risk of test flakiness.
+ // TODO(deadbeef): Do something more robust.
+ config.frame_interval_ms = 100;
+
+ video_track_sources_.emplace_back(
+ rtc::make_ref_counted<webrtc::FakePeriodicVideoTrackSource>(
+ config, false /* remote */));
+ rtc::scoped_refptr<webrtc::VideoTrackInterface> track =
+ peer_connection_factory_->CreateVideoTrack(video_track_sources_.back(),
+ rtc::CreateRandomUuid());
+ if (!local_video_renderer_) {
+ local_video_renderer_.reset(
+ new webrtc::FakeVideoTrackRenderer(track.get()));
+ }
+ return track;
+ }
+
+ void HandleIncomingOffer(const std::string& msg) {
+ RTC_LOG(LS_INFO) << debug_name_ << ": HandleIncomingOffer";
+ std::unique_ptr<SessionDescriptionInterface> desc =
+ webrtc::CreateSessionDescription(SdpType::kOffer, msg);
+ if (received_sdp_munger_) {
+ received_sdp_munger_(desc->description());
+ }
+
+ EXPECT_TRUE(SetRemoteDescription(std::move(desc)));
+ // Setting a remote description may have changed the number of receivers,
+ // so reset the receiver observers.
+ ResetRtpReceiverObservers();
+ if (remote_offer_handler_) {
+ remote_offer_handler_();
+ }
+ auto answer = CreateAnswer();
+ ASSERT_NE(nullptr, answer);
+ EXPECT_TRUE(SetLocalDescriptionAndSendSdpMessage(std::move(answer)));
+ }
+
+ void HandleIncomingAnswer(const std::string& msg) {
+ RTC_LOG(LS_INFO) << debug_name_ << ": HandleIncomingAnswer";
+ std::unique_ptr<SessionDescriptionInterface> desc =
+ webrtc::CreateSessionDescription(SdpType::kAnswer, msg);
+ if (received_sdp_munger_) {
+ received_sdp_munger_(desc->description());
+ }
+
+ EXPECT_TRUE(SetRemoteDescription(std::move(desc)));
+ // Set the RtpReceiverObserver after receivers are created.
+ ResetRtpReceiverObservers();
+ }
+
+ // Returns null on failure.
+ std::unique_ptr<SessionDescriptionInterface> CreateAnswer() {
+ auto observer =
+ rtc::make_ref_counted<MockCreateSessionDescriptionObserver>();
+ pc()->CreateAnswer(observer.get(), offer_answer_options_);
+ return WaitForDescriptionFromObserver(observer.get());
+ }
+
+ std::unique_ptr<SessionDescriptionInterface> WaitForDescriptionFromObserver(
+ MockCreateSessionDescriptionObserver* observer) {
+ EXPECT_EQ_WAIT(true, observer->called(), kDefaultTimeout);
+ if (!observer->result()) {
+ return nullptr;
+ }
+ auto description = observer->MoveDescription();
+ if (generated_sdp_munger_) {
+ generated_sdp_munger_(description->description());
+ }
+ return description;
+ }
+
+ // Setting the local description and sending the SDP message over the fake
+ // signaling channel are combined into the same method because the SDP
+ // message needs to be sent as soon as SetLocalDescription finishes, without
+ // waiting for the observer to be called. This ensures that ICE candidates
+ // don't outrace the description.
+ bool SetLocalDescriptionAndSendSdpMessage(
+ std::unique_ptr<SessionDescriptionInterface> desc) {
+ auto observer = rtc::make_ref_counted<MockSetSessionDescriptionObserver>();
+ RTC_LOG(LS_INFO) << debug_name_ << ": SetLocalDescriptionAndSendSdpMessage";
+ SdpType type = desc->GetType();
+ std::string sdp;
+ EXPECT_TRUE(desc->ToString(&sdp));
+ RTC_LOG(LS_INFO) << debug_name_ << ": local SDP contents=\n" << sdp;
+ pc()->SetLocalDescription(observer.get(), desc.release());
+ RemoveUnusedVideoRenderers();
+ // As mentioned above, we need to send the message immediately after
+ // SetLocalDescription.
+ SendSdpMessage(type, sdp);
+ EXPECT_TRUE_WAIT(observer->called(), kDefaultTimeout);
+ return true;
+ }
+
+ bool SetRemoteDescription(std::unique_ptr<SessionDescriptionInterface> desc) {
+ auto observer = rtc::make_ref_counted<MockSetSessionDescriptionObserver>();
+ RTC_LOG(LS_INFO) << debug_name_ << ": SetRemoteDescription";
+ pc()->SetRemoteDescription(observer.get(), desc.release());
+ RemoveUnusedVideoRenderers();
+ EXPECT_TRUE_WAIT(observer->called(), kDefaultTimeout);
+ return observer->result();
+ }
+
+ // This is a work around to remove unused fake_video_renderers from
+ // transceivers that have either stopped or are no longer receiving.
+ void RemoveUnusedVideoRenderers() {
+ if (sdp_semantics_ != SdpSemantics::kUnifiedPlan) {
+ return;
+ }
+ auto transceivers = pc()->GetTransceivers();
+ std::set<std::string> active_renderers;
+ for (auto& transceiver : transceivers) {
+ // Note - we don't check for direction here. This function is called
+ // before direction is set, and in that case, we should not remove
+ // the renderer.
+ if (transceiver->receiver()->media_type() == cricket::MEDIA_TYPE_VIDEO) {
+ active_renderers.insert(transceiver->receiver()->track()->id());
+ }
+ }
+ for (auto it = fake_video_renderers_.begin();
+ it != fake_video_renderers_.end();) {
+ // Remove fake video renderers belonging to any non-active transceivers.
+ if (!active_renderers.count(it->first)) {
+ it = fake_video_renderers_.erase(it);
+ } else {
+ it++;
+ }
+ }
+ }
+
+ // Simulate sending a blob of SDP with delay `signaling_delay_ms_` (0 by
+ // default).
+ void SendSdpMessage(SdpType type, const std::string& msg) {
+ if (signaling_delay_ms_ == 0) {
+ RelaySdpMessageIfReceiverExists(type, msg);
+ } else {
+ rtc::Thread::Current()->PostDelayedTask(
+ SafeTask(task_safety_.flag(),
+ [this, type, msg] {
+ RelaySdpMessageIfReceiverExists(type, msg);
+ }),
+ TimeDelta::Millis(signaling_delay_ms_));
+ }
+ }
+
+ void RelaySdpMessageIfReceiverExists(SdpType type, const std::string& msg) {
+ if (signaling_message_receiver_) {
+ signaling_message_receiver_->ReceiveSdpMessage(type, msg);
+ }
+ }
+
+ // Simulate trickling an ICE candidate with delay `signaling_delay_ms_` (0 by
+ // default).
+ void SendIceMessage(const std::string& sdp_mid,
+ int sdp_mline_index,
+ const std::string& msg) {
+ if (signaling_delay_ms_ == 0) {
+ RelayIceMessageIfReceiverExists(sdp_mid, sdp_mline_index, msg);
+ } else {
+ rtc::Thread::Current()->PostDelayedTask(
+ SafeTask(task_safety_.flag(),
+ [this, sdp_mid, sdp_mline_index, msg] {
+ RelayIceMessageIfReceiverExists(sdp_mid, sdp_mline_index,
+ msg);
+ }),
+ TimeDelta::Millis(signaling_delay_ms_));
+ }
+ }
+
+ void RelayIceMessageIfReceiverExists(const std::string& sdp_mid,
+ int sdp_mline_index,
+ const std::string& msg) {
+ if (signaling_message_receiver_) {
+ signaling_message_receiver_->ReceiveIceMessage(sdp_mid, sdp_mline_index,
+ msg);
+ }
+ }
+
+ // SignalingMessageReceiver callbacks.
+ void ReceiveSdpMessage(SdpType type, const std::string& msg) override {
+ if (type == SdpType::kOffer) {
+ HandleIncomingOffer(msg);
+ } else {
+ HandleIncomingAnswer(msg);
+ }
+ }
+
+ void ReceiveIceMessage(const std::string& sdp_mid,
+ int sdp_mline_index,
+ const std::string& msg) override {
+ RTC_LOG(LS_INFO) << debug_name_ << ": ReceiveIceMessage";
+ absl::optional<RTCError> result;
+ pc()->AddIceCandidate(absl::WrapUnique(webrtc::CreateIceCandidate(
+ sdp_mid, sdp_mline_index, msg, nullptr)),
+ [&result](RTCError r) { result = r; });
+ EXPECT_TRUE_WAIT(result.has_value(), kDefaultTimeout);
+ EXPECT_TRUE(result.value().ok());
+ }
+
+ // PeerConnectionObserver callbacks.
+ void OnSignalingChange(
+ webrtc::PeerConnectionInterface::SignalingState new_state) override {
+ EXPECT_EQ(pc()->signaling_state(), new_state);
+ peer_connection_signaling_state_history_.push_back(new_state);
+ }
+ void OnAddTrack(rtc::scoped_refptr<RtpReceiverInterface> receiver,
+ const std::vector<rtc::scoped_refptr<MediaStreamInterface>>&
+ streams) override {
+ if (receiver->media_type() == cricket::MEDIA_TYPE_VIDEO) {
+ rtc::scoped_refptr<VideoTrackInterface> video_track(
+ static_cast<VideoTrackInterface*>(receiver->track().get()));
+ ASSERT_TRUE(fake_video_renderers_.find(video_track->id()) ==
+ fake_video_renderers_.end());
+ fake_video_renderers_[video_track->id()] =
+ std::make_unique<FakeVideoTrackRenderer>(video_track.get());
+ }
+ }
+ void OnRemoveTrack(
+ rtc::scoped_refptr<RtpReceiverInterface> receiver) override {
+ if (receiver->media_type() == cricket::MEDIA_TYPE_VIDEO) {
+ auto it = fake_video_renderers_.find(receiver->track()->id());
+ if (it != fake_video_renderers_.end()) {
+ fake_video_renderers_.erase(it);
+ } else {
+ RTC_LOG(LS_ERROR) << "OnRemoveTrack called for non-active renderer";
+ }
+ }
+ }
+ void OnRenegotiationNeeded() override {}
+ void OnIceConnectionChange(
+ webrtc::PeerConnectionInterface::IceConnectionState new_state) override {
+ EXPECT_EQ(pc()->ice_connection_state(), new_state);
+ ice_connection_state_history_.push_back(new_state);
+ }
+ void OnStandardizedIceConnectionChange(
+ webrtc::PeerConnectionInterface::IceConnectionState new_state) override {
+ standardized_ice_connection_state_history_.push_back(new_state);
+ }
+ void OnConnectionChange(
+ webrtc::PeerConnectionInterface::PeerConnectionState new_state) override {
+ peer_connection_state_history_.push_back(new_state);
+ }
+
+ void OnIceGatheringChange(
+ webrtc::PeerConnectionInterface::IceGatheringState new_state) override {
+ EXPECT_EQ(pc()->ice_gathering_state(), new_state);
+ ice_gathering_state_history_.push_back(new_state);
+ }
+
+ void OnIceSelectedCandidatePairChanged(
+ const cricket::CandidatePairChangeEvent& event) {
+ ice_candidate_pair_change_history_.push_back(event);
+ }
+
+ void OnIceCandidate(const webrtc::IceCandidateInterface* candidate) override {
+ RTC_LOG(LS_INFO) << debug_name_ << ": OnIceCandidate";
+
+ if (remote_async_dns_resolver_) {
+ const auto& local_candidate = candidate->candidate();
+ if (local_candidate.address().IsUnresolvedIP()) {
+ RTC_DCHECK(local_candidate.type() == cricket::LOCAL_PORT_TYPE);
+ const auto resolved_ip = mdns_responder_->GetMappedAddressForName(
+ local_candidate.address().hostname());
+ RTC_DCHECK(!resolved_ip.IsNil());
+ remote_async_dns_resolved_addr_ = local_candidate.address();
+ remote_async_dns_resolved_addr_.SetResolvedIP(resolved_ip);
+ EXPECT_CALL(*remote_async_dns_resolver_, Start(_, _))
+ .WillOnce([](const rtc::SocketAddress& addr,
+ absl::AnyInvocable<void()> callback) { callback(); });
+ EXPECT_CALL(*remote_async_dns_resolver_, result())
+ .WillOnce(ReturnRef(remote_async_dns_resolver_result_));
+ EXPECT_CALL(remote_async_dns_resolver_result_, GetResolvedAddress(_, _))
+ .WillOnce(DoAll(SetArgPointee<1>(remote_async_dns_resolved_addr_),
+ Return(true)));
+ }
+ }
+
+ // Check if we expected to have a candidate.
+ EXPECT_GT(candidates_expected_, 1);
+ candidates_expected_--;
+ std::string ice_sdp;
+ EXPECT_TRUE(candidate->ToString(&ice_sdp));
+ if (signaling_message_receiver_ == nullptr || !signal_ice_candidates_) {
+ // Remote party may be deleted.
+ return;
+ }
+ SendIceMessage(candidate->sdp_mid(), candidate->sdp_mline_index(), ice_sdp);
+ last_candidate_gathered_ = candidate->candidate();
+ }
+ void OnIceCandidateError(const std::string& address,
+ int port,
+ const std::string& url,
+ int error_code,
+ const std::string& error_text) override {
+ error_event_ = cricket::IceCandidateErrorEvent(address, port, url,
+ error_code, error_text);
+ }
+ void OnDataChannel(
+ rtc::scoped_refptr<DataChannelInterface> data_channel) override {
+ RTC_LOG(LS_INFO) << debug_name_ << ": OnDataChannel";
+ data_channels_.push_back(data_channel);
+ data_observers_.push_back(
+ std::make_unique<MockDataChannelObserver>(data_channel.get()));
+ }
+
+ std::string debug_name_;
+
+ std::unique_ptr<rtc::FakeNetworkManager> fake_network_manager_;
+ std::unique_ptr<rtc::BasicPacketSocketFactory> socket_factory_;
+ // Reference to the mDNS responder owned by `fake_network_manager_` after set.
+ webrtc::FakeMdnsResponder* mdns_responder_ = nullptr;
+
+ rtc::scoped_refptr<webrtc::PeerConnectionInterface> peer_connection_;
+ rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface>
+ peer_connection_factory_;
+
+ cricket::PortAllocator* port_allocator_;
+ // Needed to keep track of number of frames sent.
+ rtc::scoped_refptr<FakeAudioCaptureModule> fake_audio_capture_module_;
+ // Needed to keep track of number of frames received.
+ std::map<std::string, std::unique_ptr<webrtc::FakeVideoTrackRenderer>>
+ fake_video_renderers_;
+ // Needed to ensure frames aren't received for removed tracks.
+ std::vector<std::unique_ptr<webrtc::FakeVideoTrackRenderer>>
+ removed_fake_video_renderers_;
+
+ // For remote peer communication.
+ SignalingMessageReceiver* signaling_message_receiver_ = nullptr;
+ int signaling_delay_ms_ = 0;
+ bool signal_ice_candidates_ = true;
+ cricket::Candidate last_candidate_gathered_;
+ cricket::IceCandidateErrorEvent error_event_;
+
+ // Store references to the video sources we've created, so that we can stop
+ // them, if required.
+ std::vector<rtc::scoped_refptr<webrtc::VideoTrackSource>>
+ video_track_sources_;
+ // `local_video_renderer_` attached to the first created local video track.
+ std::unique_ptr<webrtc::FakeVideoTrackRenderer> local_video_renderer_;
+
+ SdpSemantics sdp_semantics_;
+ PeerConnectionInterface::RTCOfferAnswerOptions offer_answer_options_;
+ std::function<void(cricket::SessionDescription*)> received_sdp_munger_;
+ std::function<void(cricket::SessionDescription*)> generated_sdp_munger_;
+ std::function<void()> remote_offer_handler_;
+ MockAsyncDnsResolver* remote_async_dns_resolver_ = nullptr;
+ // Result variables for the mock DNS resolver
+ NiceMock<MockAsyncDnsResolverResult> remote_async_dns_resolver_result_;
+ rtc::SocketAddress remote_async_dns_resolved_addr_;
+
+ // All data channels either created or observed on this peerconnection
+ std::vector<rtc::scoped_refptr<DataChannelInterface>> data_channels_;
+ std::vector<std::unique_ptr<MockDataChannelObserver>> data_observers_;
+
+ std::vector<std::unique_ptr<MockRtpReceiverObserver>> rtp_receiver_observers_;
+
+ std::vector<PeerConnectionInterface::IceConnectionState>
+ ice_connection_state_history_;
+ std::vector<PeerConnectionInterface::IceConnectionState>
+ standardized_ice_connection_state_history_;
+ std::vector<PeerConnectionInterface::PeerConnectionState>
+ peer_connection_state_history_;
+ std::vector<PeerConnectionInterface::IceGatheringState>
+ ice_gathering_state_history_;
+ std::vector<cricket::CandidatePairChangeEvent>
+ ice_candidate_pair_change_history_;
+ std::vector<PeerConnectionInterface::SignalingState>
+ peer_connection_signaling_state_history_;
+ webrtc::FakeRtcEventLogFactory* event_log_factory_;
+
+ // Number of ICE candidates expected. The default is no limit.
+ int candidates_expected_ = std::numeric_limits<int>::max();
+
+ // Variables for tracking delay stats on an audio track
+ int audio_packets_stat_ = 0;
+ double audio_delay_stat_ = 0.0;
+ uint64_t audio_samples_stat_ = 0;
+ uint64_t audio_concealed_stat_ = 0;
+ std::string rtp_stats_id_;
+
+ ScopedTaskSafety task_safety_;
+
+ friend class PeerConnectionIntegrationBaseTest;
+};
+
+class MockRtcEventLogOutput : public webrtc::RtcEventLogOutput {
+ public:
+ virtual ~MockRtcEventLogOutput() = default;
+ MOCK_METHOD(bool, IsActive, (), (const, override));
+ MOCK_METHOD(bool, Write, (absl::string_view), (override));
+};
+
+// This helper object is used for both specifying how many audio/video frames
+// are expected to be received for a caller/callee. It provides helper functions
+// to specify these expectations. The object initially starts in a state of no
+// expectations.
+class MediaExpectations {
+ public:
+ enum ExpectFrames {
+ kExpectSomeFrames,
+ kExpectNoFrames,
+ kNoExpectation,
+ };
+
+ void ExpectBidirectionalAudioAndVideo() {
+ ExpectBidirectionalAudio();
+ ExpectBidirectionalVideo();
+ }
+
+ void ExpectBidirectionalAudio() {
+ CallerExpectsSomeAudio();
+ CalleeExpectsSomeAudio();
+ }
+
+ void ExpectNoAudio() {
+ CallerExpectsNoAudio();
+ CalleeExpectsNoAudio();
+ }
+
+ void ExpectBidirectionalVideo() {
+ CallerExpectsSomeVideo();
+ CalleeExpectsSomeVideo();
+ }
+
+ void ExpectNoVideo() {
+ CallerExpectsNoVideo();
+ CalleeExpectsNoVideo();
+ }
+
+ void CallerExpectsSomeAudioAndVideo() {
+ CallerExpectsSomeAudio();
+ CallerExpectsSomeVideo();
+ }
+
+ void CalleeExpectsSomeAudioAndVideo() {
+ CalleeExpectsSomeAudio();
+ CalleeExpectsSomeVideo();
+ }
+
+ // Caller's audio functions.
+ void CallerExpectsSomeAudio(
+ int expected_audio_frames = kDefaultExpectedAudioFrameCount) {
+ caller_audio_expectation_ = kExpectSomeFrames;
+ caller_audio_frames_expected_ = expected_audio_frames;
+ }
+
+ void CallerExpectsNoAudio() {
+ caller_audio_expectation_ = kExpectNoFrames;
+ caller_audio_frames_expected_ = 0;
+ }
+
+ // Caller's video functions.
+ void CallerExpectsSomeVideo(
+ int expected_video_frames = kDefaultExpectedVideoFrameCount) {
+ caller_video_expectation_ = kExpectSomeFrames;
+ caller_video_frames_expected_ = expected_video_frames;
+ }
+
+ void CallerExpectsNoVideo() {
+ caller_video_expectation_ = kExpectNoFrames;
+ caller_video_frames_expected_ = 0;
+ }
+
+ // Callee's audio functions.
+ void CalleeExpectsSomeAudio(
+ int expected_audio_frames = kDefaultExpectedAudioFrameCount) {
+ callee_audio_expectation_ = kExpectSomeFrames;
+ callee_audio_frames_expected_ = expected_audio_frames;
+ }
+
+ void CalleeExpectsNoAudio() {
+ callee_audio_expectation_ = kExpectNoFrames;
+ callee_audio_frames_expected_ = 0;
+ }
+
+ // Callee's video functions.
+ void CalleeExpectsSomeVideo(
+ int expected_video_frames = kDefaultExpectedVideoFrameCount) {
+ callee_video_expectation_ = kExpectSomeFrames;
+ callee_video_frames_expected_ = expected_video_frames;
+ }
+
+ void CalleeExpectsNoVideo() {
+ callee_video_expectation_ = kExpectNoFrames;
+ callee_video_frames_expected_ = 0;
+ }
+
+ ExpectFrames caller_audio_expectation_ = kNoExpectation;
+ ExpectFrames caller_video_expectation_ = kNoExpectation;
+ ExpectFrames callee_audio_expectation_ = kNoExpectation;
+ ExpectFrames callee_video_expectation_ = kNoExpectation;
+ int caller_audio_frames_expected_ = 0;
+ int caller_video_frames_expected_ = 0;
+ int callee_audio_frames_expected_ = 0;
+ int callee_video_frames_expected_ = 0;
+};
+
+class MockIceTransport : public webrtc::IceTransportInterface {
+ public:
+ MockIceTransport(const std::string& name, int component)
+ : internal_(std::make_unique<cricket::FakeIceTransport>(
+ name,
+ component,
+ nullptr /* network_thread */)) {}
+ ~MockIceTransport() = default;
+ cricket::IceTransportInternal* internal() { return internal_.get(); }
+
+ private:
+ std::unique_ptr<cricket::FakeIceTransport> internal_;
+};
+
+class MockIceTransportFactory : public IceTransportFactory {
+ public:
+ ~MockIceTransportFactory() override = default;
+ rtc::scoped_refptr<IceTransportInterface> CreateIceTransport(
+ const std::string& transport_name,
+ int component,
+ IceTransportInit init) {
+ RecordIceTransportCreated();
+ return rtc::make_ref_counted<MockIceTransport>(transport_name, component);
+ }
+ MOCK_METHOD(void, RecordIceTransportCreated, ());
+};
+
+// Tests two PeerConnections connecting to each other end-to-end, using a
+// virtual network, fake A/V capture and fake encoder/decoders. The
+// PeerConnections share the threads/socket servers, but use separate versions
+// of everything else (including "PeerConnectionFactory"s).
+class PeerConnectionIntegrationBaseTest : public ::testing::Test {
+ public:
+ PeerConnectionIntegrationBaseTest(
+ SdpSemantics sdp_semantics,
+ absl::optional<std::string> field_trials = absl::nullopt)
+ : sdp_semantics_(sdp_semantics),
+ ss_(new rtc::VirtualSocketServer()),
+ fss_(new rtc::FirewallSocketServer(ss_.get())),
+ network_thread_(new rtc::Thread(fss_.get())),
+ worker_thread_(rtc::Thread::Create()),
+ // TODO(bugs.webrtc.org/10335): Pass optional ScopedKeyValueConfig.
+ field_trials_(new test::ScopedKeyValueConfig(
+ field_trials.has_value() ? *field_trials : "")) {
+ network_thread_->SetName("PCNetworkThread", this);
+ worker_thread_->SetName("PCWorkerThread", this);
+ RTC_CHECK(network_thread_->Start());
+ RTC_CHECK(worker_thread_->Start());
+ webrtc::metrics::Reset();
+ }
+
+ ~PeerConnectionIntegrationBaseTest() {
+ // The PeerConnections should be deleted before the TurnCustomizers.
+ // A TurnPort is created with a raw pointer to a TurnCustomizer. The
+ // TurnPort has the same lifetime as the PeerConnection, so it's expected
+ // that the TurnCustomizer outlives the life of the PeerConnection or else
+ // when Send() is called it will hit a seg fault.
+ if (caller_) {
+ caller_->set_signaling_message_receiver(nullptr);
+ caller_->pc()->Close();
+ delete SetCallerPcWrapperAndReturnCurrent(nullptr);
+ }
+ if (callee_) {
+ callee_->set_signaling_message_receiver(nullptr);
+ callee_->pc()->Close();
+ delete SetCalleePcWrapperAndReturnCurrent(nullptr);
+ }
+
+ // If turn servers were created for the test they need to be destroyed on
+ // the network thread.
+ SendTask(network_thread(), [this] {
+ turn_servers_.clear();
+ turn_customizers_.clear();
+ });
+ }
+
+ bool SignalingStateStable() {
+ return caller_->SignalingStateStable() && callee_->SignalingStateStable();
+ }
+
+ bool DtlsConnected() {
+ // TODO(deadbeef): kIceConnectionConnected currently means both ICE and DTLS
+ // are connected. This is an important distinction. Once we have separate
+ // ICE and DTLS state, this check needs to use the DTLS state.
+ return (callee()->ice_connection_state() ==
+ webrtc::PeerConnectionInterface::kIceConnectionConnected ||
+ callee()->ice_connection_state() ==
+ webrtc::PeerConnectionInterface::kIceConnectionCompleted) &&
+ (caller()->ice_connection_state() ==
+ webrtc::PeerConnectionInterface::kIceConnectionConnected ||
+ caller()->ice_connection_state() ==
+ webrtc::PeerConnectionInterface::kIceConnectionCompleted);
+ }
+
+ // When `event_log_factory` is null, the default implementation of the event
+ // log factory will be used.
+ std::unique_ptr<PeerConnectionIntegrationWrapper> CreatePeerConnectionWrapper(
+ const std::string& debug_name,
+ const PeerConnectionFactory::Options* options,
+ const RTCConfiguration* config,
+ webrtc::PeerConnectionDependencies dependencies,
+ std::unique_ptr<webrtc::FakeRtcEventLogFactory> event_log_factory,
+ bool reset_encoder_factory,
+ bool reset_decoder_factory,
+ bool create_media_engine = true) {
+ RTCConfiguration modified_config;
+ if (config) {
+ modified_config = *config;
+ }
+ modified_config.sdp_semantics = sdp_semantics_;
+ if (!dependencies.cert_generator) {
+ dependencies.cert_generator =
+ std::make_unique<FakeRTCCertificateGenerator>();
+ }
+ std::unique_ptr<PeerConnectionIntegrationWrapper> client(
+ new PeerConnectionIntegrationWrapper(debug_name));
+
+ if (!client->Init(options, &modified_config, std::move(dependencies),
+ fss_.get(), network_thread_.get(), worker_thread_.get(),
+ std::move(event_log_factory), reset_encoder_factory,
+ reset_decoder_factory, create_media_engine)) {
+ return nullptr;
+ }
+ return client;
+ }
+
+ std::unique_ptr<PeerConnectionIntegrationWrapper>
+ CreatePeerConnectionWrapperWithFakeRtcEventLog(
+ const std::string& debug_name,
+ const PeerConnectionFactory::Options* options,
+ const RTCConfiguration* config,
+ webrtc::PeerConnectionDependencies dependencies) {
+ return CreatePeerConnectionWrapper(
+ debug_name, options, config, std::move(dependencies),
+ std::make_unique<webrtc::FakeRtcEventLogFactory>(),
+ /*reset_encoder_factory=*/false,
+ /*reset_decoder_factory=*/false);
+ }
+
+ bool CreatePeerConnectionWrappers() {
+ return CreatePeerConnectionWrappersWithConfig(
+ PeerConnectionInterface::RTCConfiguration(),
+ PeerConnectionInterface::RTCConfiguration());
+ }
+
+ bool CreatePeerConnectionWrappersWithSdpSemantics(
+ SdpSemantics caller_semantics,
+ SdpSemantics callee_semantics) {
+ // Can't specify the sdp_semantics in the passed-in configuration since it
+ // will be overwritten by CreatePeerConnectionWrapper with whatever is
+ // stored in sdp_semantics_. So get around this by modifying the instance
+ // variable before calling CreatePeerConnectionWrapper for the caller and
+ // callee PeerConnections.
+ SdpSemantics original_semantics = sdp_semantics_;
+ sdp_semantics_ = caller_semantics;
+ caller_ = CreatePeerConnectionWrapper(
+ "Caller", nullptr, nullptr, webrtc::PeerConnectionDependencies(nullptr),
+ nullptr,
+ /*reset_encoder_factory=*/false,
+ /*reset_decoder_factory=*/false);
+ sdp_semantics_ = callee_semantics;
+ callee_ = CreatePeerConnectionWrapper(
+ "Callee", nullptr, nullptr, webrtc::PeerConnectionDependencies(nullptr),
+ nullptr,
+ /*reset_encoder_factory=*/false,
+ /*reset_decoder_factory=*/false);
+ sdp_semantics_ = original_semantics;
+ return caller_ && callee_;
+ }
+
+ bool CreatePeerConnectionWrappersWithConfig(
+ const PeerConnectionInterface::RTCConfiguration& caller_config,
+ const PeerConnectionInterface::RTCConfiguration& callee_config) {
+ caller_ = CreatePeerConnectionWrapper(
+ "Caller", nullptr, &caller_config,
+ webrtc::PeerConnectionDependencies(nullptr), nullptr,
+ /*reset_encoder_factory=*/false,
+ /*reset_decoder_factory=*/false);
+ callee_ = CreatePeerConnectionWrapper(
+ "Callee", nullptr, &callee_config,
+ webrtc::PeerConnectionDependencies(nullptr), nullptr,
+ /*reset_encoder_factory=*/false,
+ /*reset_decoder_factory=*/false);
+ return caller_ && callee_;
+ }
+
+ bool CreatePeerConnectionWrappersWithConfigAndDeps(
+ const PeerConnectionInterface::RTCConfiguration& caller_config,
+ webrtc::PeerConnectionDependencies caller_dependencies,
+ const PeerConnectionInterface::RTCConfiguration& callee_config,
+ webrtc::PeerConnectionDependencies callee_dependencies) {
+ caller_ =
+ CreatePeerConnectionWrapper("Caller", nullptr, &caller_config,
+ std::move(caller_dependencies), nullptr,
+ /*reset_encoder_factory=*/false,
+ /*reset_decoder_factory=*/false);
+ callee_ =
+ CreatePeerConnectionWrapper("Callee", nullptr, &callee_config,
+ std::move(callee_dependencies), nullptr,
+ /*reset_encoder_factory=*/false,
+ /*reset_decoder_factory=*/false);
+ return caller_ && callee_;
+ }
+
+ bool CreatePeerConnectionWrappersWithOptions(
+ const PeerConnectionFactory::Options& caller_options,
+ const PeerConnectionFactory::Options& callee_options) {
+ caller_ = CreatePeerConnectionWrapper(
+ "Caller", &caller_options, nullptr,
+ webrtc::PeerConnectionDependencies(nullptr), nullptr,
+ /*reset_encoder_factory=*/false,
+ /*reset_decoder_factory=*/false);
+ callee_ = CreatePeerConnectionWrapper(
+ "Callee", &callee_options, nullptr,
+ webrtc::PeerConnectionDependencies(nullptr), nullptr,
+ /*reset_encoder_factory=*/false,
+ /*reset_decoder_factory=*/false);
+ return caller_ && callee_;
+ }
+
+ bool CreatePeerConnectionWrappersWithFakeRtcEventLog() {
+ PeerConnectionInterface::RTCConfiguration default_config;
+ caller_ = CreatePeerConnectionWrapperWithFakeRtcEventLog(
+ "Caller", nullptr, &default_config,
+ webrtc::PeerConnectionDependencies(nullptr));
+ callee_ = CreatePeerConnectionWrapperWithFakeRtcEventLog(
+ "Callee", nullptr, &default_config,
+ webrtc::PeerConnectionDependencies(nullptr));
+ return caller_ && callee_;
+ }
+
+ std::unique_ptr<PeerConnectionIntegrationWrapper>
+ CreatePeerConnectionWrapperWithAlternateKey() {
+ std::unique_ptr<FakeRTCCertificateGenerator> cert_generator(
+ new FakeRTCCertificateGenerator());
+ cert_generator->use_alternate_key();
+
+ webrtc::PeerConnectionDependencies dependencies(nullptr);
+ dependencies.cert_generator = std::move(cert_generator);
+ return CreatePeerConnectionWrapper("New Peer", nullptr, nullptr,
+ std::move(dependencies), nullptr,
+ /*reset_encoder_factory=*/false,
+ /*reset_decoder_factory=*/false);
+ }
+
+ bool CreateOneDirectionalPeerConnectionWrappers(bool caller_to_callee) {
+ caller_ = CreatePeerConnectionWrapper(
+ "Caller", nullptr, nullptr, webrtc::PeerConnectionDependencies(nullptr),
+ nullptr,
+ /*reset_encoder_factory=*/!caller_to_callee,
+ /*reset_decoder_factory=*/caller_to_callee);
+ callee_ = CreatePeerConnectionWrapper(
+ "Callee", nullptr, nullptr, webrtc::PeerConnectionDependencies(nullptr),
+ nullptr,
+ /*reset_encoder_factory=*/caller_to_callee,
+ /*reset_decoder_factory=*/!caller_to_callee);
+ return caller_ && callee_;
+ }
+
+ bool CreatePeerConnectionWrappersWithoutMediaEngine() {
+ caller_ = CreatePeerConnectionWrapper(
+ "Caller", nullptr, nullptr, webrtc::PeerConnectionDependencies(nullptr),
+ nullptr,
+ /*reset_encoder_factory=*/false,
+ /*reset_decoder_factory=*/false,
+ /*create_media_engine=*/false);
+ callee_ = CreatePeerConnectionWrapper(
+ "Callee", nullptr, nullptr, webrtc::PeerConnectionDependencies(nullptr),
+ nullptr,
+ /*reset_encoder_factory=*/false,
+ /*reset_decoder_factory=*/false,
+ /*create_media_engine=*/false);
+ return caller_ && callee_;
+ }
+
+ cricket::TestTurnServer* CreateTurnServer(
+ rtc::SocketAddress internal_address,
+ rtc::SocketAddress external_address,
+ cricket::ProtocolType type = cricket::ProtocolType::PROTO_UDP,
+ const std::string& common_name = "test turn server") {
+ rtc::Thread* thread = network_thread();
+ rtc::SocketFactory* socket_factory = fss_.get();
+ std::unique_ptr<cricket::TestTurnServer> turn_server;
+ SendTask(network_thread(), [&] {
+ turn_server = std::make_unique<cricket::TestTurnServer>(
+ thread, socket_factory, internal_address, external_address, type,
+ /*ignore_bad_certs=*/true, common_name);
+ });
+ turn_servers_.push_back(std::move(turn_server));
+ // Interactions with the turn server should be done on the network thread.
+ return turn_servers_.back().get();
+ }
+
+ cricket::TestTurnCustomizer* CreateTurnCustomizer() {
+ std::unique_ptr<cricket::TestTurnCustomizer> turn_customizer;
+ SendTask(network_thread(), [&] {
+ turn_customizer = std::make_unique<cricket::TestTurnCustomizer>();
+ });
+ turn_customizers_.push_back(std::move(turn_customizer));
+ // Interactions with the turn customizer should be done on the network
+ // thread.
+ return turn_customizers_.back().get();
+ }
+
+ // Checks that the function counters for a TestTurnCustomizer are greater than
+ // 0.
+ void ExpectTurnCustomizerCountersIncremented(
+ cricket::TestTurnCustomizer* turn_customizer) {
+ SendTask(network_thread(), [turn_customizer] {
+ EXPECT_GT(turn_customizer->allow_channel_data_cnt_, 0u);
+ EXPECT_GT(turn_customizer->modify_cnt_, 0u);
+ });
+ }
+
+ // Once called, SDP blobs and ICE candidates will be automatically signaled
+ // between PeerConnections.
+ void ConnectFakeSignaling() {
+ caller_->set_signaling_message_receiver(callee_.get());
+ callee_->set_signaling_message_receiver(caller_.get());
+ }
+
+ // Once called, SDP blobs will be automatically signaled between
+ // PeerConnections. Note that ICE candidates will not be signaled unless they
+ // are in the exchanged SDP blobs.
+ void ConnectFakeSignalingForSdpOnly() {
+ ConnectFakeSignaling();
+ SetSignalIceCandidates(false);
+ }
+
+ void SetSignalingDelayMs(int delay_ms) {
+ caller_->set_signaling_delay_ms(delay_ms);
+ callee_->set_signaling_delay_ms(delay_ms);
+ }
+
+ void SetSignalIceCandidates(bool signal) {
+ caller_->set_signal_ice_candidates(signal);
+ callee_->set_signal_ice_candidates(signal);
+ }
+
+ // Messages may get lost on the unreliable DataChannel, so we send multiple
+ // times to avoid test flakiness.
+ void SendRtpDataWithRetries(webrtc::DataChannelInterface* dc,
+ const std::string& data,
+ int retries) {
+ for (int i = 0; i < retries; ++i) {
+ dc->Send(DataBuffer(data));
+ }
+ }
+
+ rtc::Thread* network_thread() { return network_thread_.get(); }
+
+ rtc::VirtualSocketServer* virtual_socket_server() { return ss_.get(); }
+
+ PeerConnectionIntegrationWrapper* caller() { return caller_.get(); }
+
+ // Destroy peerconnections.
+ // This can be used to ensure that all pointers to on-stack mocks
+ // get dropped before exit.
+ void DestroyPeerConnections() {
+ if (caller_) {
+ caller_->pc()->Close();
+ }
+ if (callee_) {
+ callee_->pc()->Close();
+ }
+ caller_.reset();
+ callee_.reset();
+ }
+
+ // Set the `caller_` to the `wrapper` passed in and return the
+ // original `caller_`.
+ PeerConnectionIntegrationWrapper* SetCallerPcWrapperAndReturnCurrent(
+ PeerConnectionIntegrationWrapper* wrapper) {
+ PeerConnectionIntegrationWrapper* old = caller_.release();
+ caller_.reset(wrapper);
+ return old;
+ }
+
+ PeerConnectionIntegrationWrapper* callee() { return callee_.get(); }
+
+ // Set the `callee_` to the `wrapper` passed in and return the
+ // original `callee_`.
+ PeerConnectionIntegrationWrapper* SetCalleePcWrapperAndReturnCurrent(
+ PeerConnectionIntegrationWrapper* wrapper) {
+ PeerConnectionIntegrationWrapper* old = callee_.release();
+ callee_.reset(wrapper);
+ return old;
+ }
+
+ void SetPortAllocatorFlags(uint32_t caller_flags, uint32_t callee_flags) {
+ SendTask(network_thread(), [this, caller_flags] {
+ caller()->port_allocator()->set_flags(caller_flags);
+ });
+ SendTask(network_thread(), [this, callee_flags] {
+ callee()->port_allocator()->set_flags(callee_flags);
+ });
+ }
+
+ rtc::FirewallSocketServer* firewall() const { return fss_.get(); }
+
+ // Expects the provided number of new frames to be received within
+ // kMaxWaitForFramesMs. The new expected frames are specified in
+ // `media_expectations`. Returns false if any of the expectations were
+ // not met.
+ bool ExpectNewFrames(const MediaExpectations& media_expectations) {
+ // Make sure there are no bogus tracks confusing the issue.
+ caller()->RemoveUnusedVideoRenderers();
+ callee()->RemoveUnusedVideoRenderers();
+ // First initialize the expected frame counts based upon the current
+ // frame count.
+ int total_caller_audio_frames_expected = caller()->audio_frames_received();
+ if (media_expectations.caller_audio_expectation_ ==
+ MediaExpectations::kExpectSomeFrames) {
+ total_caller_audio_frames_expected +=
+ media_expectations.caller_audio_frames_expected_;
+ }
+ int total_caller_video_frames_expected =
+ caller()->min_video_frames_received_per_track();
+ if (media_expectations.caller_video_expectation_ ==
+ MediaExpectations::kExpectSomeFrames) {
+ total_caller_video_frames_expected +=
+ media_expectations.caller_video_frames_expected_;
+ }
+ int total_callee_audio_frames_expected = callee()->audio_frames_received();
+ if (media_expectations.callee_audio_expectation_ ==
+ MediaExpectations::kExpectSomeFrames) {
+ total_callee_audio_frames_expected +=
+ media_expectations.callee_audio_frames_expected_;
+ }
+ int total_callee_video_frames_expected =
+ callee()->min_video_frames_received_per_track();
+ if (media_expectations.callee_video_expectation_ ==
+ MediaExpectations::kExpectSomeFrames) {
+ total_callee_video_frames_expected +=
+ media_expectations.callee_video_frames_expected_;
+ }
+
+ // Wait for the expected frames.
+ EXPECT_TRUE_WAIT(caller()->audio_frames_received() >=
+ total_caller_audio_frames_expected &&
+ caller()->min_video_frames_received_per_track() >=
+ total_caller_video_frames_expected &&
+ callee()->audio_frames_received() >=
+ total_callee_audio_frames_expected &&
+ callee()->min_video_frames_received_per_track() >=
+ total_callee_video_frames_expected,
+ kMaxWaitForFramesMs);
+ bool expectations_correct =
+ caller()->audio_frames_received() >=
+ total_caller_audio_frames_expected &&
+ caller()->min_video_frames_received_per_track() >=
+ total_caller_video_frames_expected &&
+ callee()->audio_frames_received() >=
+ total_callee_audio_frames_expected &&
+ callee()->min_video_frames_received_per_track() >=
+ total_callee_video_frames_expected;
+
+ // After the combined wait, print out a more detailed message upon
+ // failure.
+ EXPECT_GE(caller()->audio_frames_received(),
+ total_caller_audio_frames_expected);
+ EXPECT_GE(caller()->min_video_frames_received_per_track(),
+ total_caller_video_frames_expected);
+ EXPECT_GE(callee()->audio_frames_received(),
+ total_callee_audio_frames_expected);
+ EXPECT_GE(callee()->min_video_frames_received_per_track(),
+ total_callee_video_frames_expected);
+
+ // We want to make sure nothing unexpected was received.
+ if (media_expectations.caller_audio_expectation_ ==
+ MediaExpectations::kExpectNoFrames) {
+ EXPECT_EQ(caller()->audio_frames_received(),
+ total_caller_audio_frames_expected);
+ if (caller()->audio_frames_received() !=
+ total_caller_audio_frames_expected) {
+ expectations_correct = false;
+ }
+ }
+ if (media_expectations.caller_video_expectation_ ==
+ MediaExpectations::kExpectNoFrames) {
+ EXPECT_EQ(caller()->min_video_frames_received_per_track(),
+ total_caller_video_frames_expected);
+ if (caller()->min_video_frames_received_per_track() !=
+ total_caller_video_frames_expected) {
+ expectations_correct = false;
+ }
+ }
+ if (media_expectations.callee_audio_expectation_ ==
+ MediaExpectations::kExpectNoFrames) {
+ EXPECT_EQ(callee()->audio_frames_received(),
+ total_callee_audio_frames_expected);
+ if (callee()->audio_frames_received() !=
+ total_callee_audio_frames_expected) {
+ expectations_correct = false;
+ }
+ }
+ if (media_expectations.callee_video_expectation_ ==
+ MediaExpectations::kExpectNoFrames) {
+ EXPECT_EQ(callee()->min_video_frames_received_per_track(),
+ total_callee_video_frames_expected);
+ if (callee()->min_video_frames_received_per_track() !=
+ total_callee_video_frames_expected) {
+ expectations_correct = false;
+ }
+ }
+ return expectations_correct;
+ }
+
+ void ClosePeerConnections() {
+ if (caller())
+ caller()->pc()->Close();
+ if (callee())
+ callee()->pc()->Close();
+ }
+
+ void TestNegotiatedCipherSuite(
+ const PeerConnectionFactory::Options& caller_options,
+ const PeerConnectionFactory::Options& callee_options,
+ int expected_cipher_suite) {
+ ASSERT_TRUE(CreatePeerConnectionWrappersWithOptions(caller_options,
+ callee_options));
+ ConnectFakeSignaling();
+ caller()->AddAudioVideoTracks();
+ callee()->AddAudioVideoTracks();
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(DtlsConnected(), kDefaultTimeout);
+ EXPECT_EQ_WAIT(rtc::SrtpCryptoSuiteToName(expected_cipher_suite),
+ caller()->OldGetStats()->SrtpCipher(), kDefaultTimeout);
+ }
+
+ void TestGcmNegotiationUsesCipherSuite(bool local_gcm_enabled,
+ bool remote_gcm_enabled,
+ bool aes_ctr_enabled,
+ int expected_cipher_suite) {
+ PeerConnectionFactory::Options caller_options;
+ caller_options.crypto_options.srtp.enable_gcm_crypto_suites =
+ local_gcm_enabled;
+ caller_options.crypto_options.srtp.enable_aes128_sha1_80_crypto_cipher =
+ aes_ctr_enabled;
+ PeerConnectionFactory::Options callee_options;
+ callee_options.crypto_options.srtp.enable_gcm_crypto_suites =
+ remote_gcm_enabled;
+ callee_options.crypto_options.srtp.enable_aes128_sha1_80_crypto_cipher =
+ aes_ctr_enabled;
+ TestNegotiatedCipherSuite(caller_options, callee_options,
+ expected_cipher_suite);
+ }
+
+ const FieldTrialsView& trials() const { return *field_trials_.get(); }
+
+ protected:
+ SdpSemantics sdp_semantics_;
+
+ private:
+ rtc::AutoThread main_thread_; // Used as the signal thread by most tests.
+ // `ss_` is used by `network_thread_` so it must be destroyed later.
+ std::unique_ptr<rtc::VirtualSocketServer> ss_;
+ std::unique_ptr<rtc::FirewallSocketServer> fss_;
+ // `network_thread_` and `worker_thread_` are used by both
+ // `caller_` and `callee_` so they must be destroyed
+ // later.
+ std::unique_ptr<rtc::Thread> network_thread_;
+ std::unique_ptr<rtc::Thread> worker_thread_;
+ // The turn servers and turn customizers should be accessed & deleted on the
+ // network thread to avoid a race with the socket read/write that occurs
+ // on the network thread.
+ std::vector<std::unique_ptr<cricket::TestTurnServer>> turn_servers_;
+ std::vector<std::unique_ptr<cricket::TestTurnCustomizer>> turn_customizers_;
+ std::unique_ptr<PeerConnectionIntegrationWrapper> caller_;
+ std::unique_ptr<PeerConnectionIntegrationWrapper> callee_;
+ std::unique_ptr<FieldTrialsView> field_trials_;
+};
+
+} // namespace webrtc
+
+#endif // PC_TEST_INTEGRATION_TEST_HELPERS_H_
diff --git a/third_party/libwebrtc/pc/test/mock_channel_interface.h b/third_party/libwebrtc/pc/test/mock_channel_interface.h
new file mode 100644
index 0000000000..6b85ed8d11
--- /dev/null
+++ b/third_party/libwebrtc/pc/test/mock_channel_interface.h
@@ -0,0 +1,88 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_TEST_MOCK_CHANNEL_INTERFACE_H_
+#define PC_TEST_MOCK_CHANNEL_INTERFACE_H_
+
+#include <string>
+#include <vector>
+
+#include "media/base/media_channel.h"
+#include "pc/channel_interface.h"
+#include "test/gmock.h"
+
+namespace cricket {
+
+// Mock class for BaseChannel.
+// Use this class in unit tests to avoid dependecy on a specific
+// implementation of BaseChannel.
+class MockChannelInterface : public cricket::ChannelInterface {
+ public:
+ MOCK_METHOD(cricket::MediaType, media_type, (), (const, override));
+ MOCK_METHOD(VideoChannel*, AsVideoChannel, (), (override));
+ MOCK_METHOD(VoiceChannel*, AsVoiceChannel, (), (override));
+ MOCK_METHOD(MediaSendChannelInterface*, media_send_channel, (), (override));
+ MOCK_METHOD(VoiceMediaSendChannelInterface*,
+ voice_media_send_channel,
+ (),
+ (override));
+ MOCK_METHOD(VideoMediaSendChannelInterface*,
+ video_media_send_channel,
+ (),
+ (override));
+ MOCK_METHOD(MediaReceiveChannelInterface*,
+ media_receive_channel,
+ (),
+ (override));
+ MOCK_METHOD(VoiceMediaReceiveChannelInterface*,
+ voice_media_receive_channel,
+ (),
+ (override));
+ MOCK_METHOD(VideoMediaReceiveChannelInterface*,
+ video_media_receive_channel,
+ (),
+ (override));
+ MOCK_METHOD(absl::string_view, transport_name, (), (const, override));
+ MOCK_METHOD(const std::string&, mid, (), (const, override));
+ MOCK_METHOD(void, Enable, (bool), (override));
+ MOCK_METHOD(void,
+ SetFirstPacketReceivedCallback,
+ (std::function<void()>),
+ (override));
+ MOCK_METHOD(bool,
+ SetLocalContent,
+ (const cricket::MediaContentDescription*,
+ webrtc::SdpType,
+ std::string&),
+ (override));
+ MOCK_METHOD(bool,
+ SetRemoteContent,
+ (const cricket::MediaContentDescription*,
+ webrtc::SdpType,
+ std::string&),
+ (override));
+ MOCK_METHOD(bool, SetPayloadTypeDemuxingEnabled, (bool), (override));
+ MOCK_METHOD(const std::vector<StreamParams>&,
+ local_streams,
+ (),
+ (const, override));
+ MOCK_METHOD(const std::vector<StreamParams>&,
+ remote_streams,
+ (),
+ (const, override));
+ MOCK_METHOD(bool,
+ SetRtpTransport,
+ (webrtc::RtpTransportInternal*),
+ (override));
+};
+
+} // namespace cricket
+
+#endif // PC_TEST_MOCK_CHANNEL_INTERFACE_H_
diff --git a/third_party/libwebrtc/pc/test/mock_data_channel.h b/third_party/libwebrtc/pc/test/mock_data_channel.h
new file mode 100644
index 0000000000..ef781fe8ae
--- /dev/null
+++ b/third_party/libwebrtc/pc/test/mock_data_channel.h
@@ -0,0 +1,79 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_TEST_MOCK_DATA_CHANNEL_H_
+#define PC_TEST_MOCK_DATA_CHANNEL_H_
+
+#include <string>
+#include <utility>
+
+#include "pc/sctp_data_channel.h"
+#include "test/gmock.h"
+
+namespace webrtc {
+
+class MockSctpDataChannel : public SctpDataChannel {
+ public:
+ MockSctpDataChannel(
+ rtc::WeakPtr<SctpDataChannelControllerInterface> controller,
+ int id,
+ DataState state)
+ : MockSctpDataChannel(std::move(controller),
+ id,
+ "MockSctpDataChannel",
+ state,
+ "someProtocol",
+ 0,
+ 0,
+ 0,
+ 0) {}
+ MockSctpDataChannel(
+ rtc::WeakPtr<SctpDataChannelControllerInterface> controller,
+ int id,
+ const std::string& label,
+ DataState state,
+ const std::string& protocol,
+ uint32_t messages_sent,
+ uint64_t bytes_sent,
+ uint32_t messages_received,
+ uint64_t bytes_received,
+ const InternalDataChannelInit& config = InternalDataChannelInit(),
+ rtc::Thread* signaling_thread = rtc::Thread::Current(),
+ rtc::Thread* network_thread = rtc::Thread::Current())
+ : SctpDataChannel(config,
+ std::move(controller),
+ label,
+ false,
+ signaling_thread,
+ network_thread) {
+ EXPECT_CALL(*this, id()).WillRepeatedly(::testing::Return(id));
+ EXPECT_CALL(*this, state()).WillRepeatedly(::testing::Return(state));
+ EXPECT_CALL(*this, protocol()).WillRepeatedly(::testing::Return(protocol));
+ EXPECT_CALL(*this, messages_sent())
+ .WillRepeatedly(::testing::Return(messages_sent));
+ EXPECT_CALL(*this, bytes_sent())
+ .WillRepeatedly(::testing::Return(bytes_sent));
+ EXPECT_CALL(*this, messages_received())
+ .WillRepeatedly(::testing::Return(messages_received));
+ EXPECT_CALL(*this, bytes_received())
+ .WillRepeatedly(::testing::Return(bytes_received));
+ }
+ MOCK_METHOD(int, id, (), (const, override));
+ MOCK_METHOD(DataState, state, (), (const, override));
+ MOCK_METHOD(std::string, protocol, (), (const, override));
+ MOCK_METHOD(uint32_t, messages_sent, (), (const, override));
+ MOCK_METHOD(uint64_t, bytes_sent, (), (const, override));
+ MOCK_METHOD(uint32_t, messages_received, (), (const, override));
+ MOCK_METHOD(uint64_t, bytes_received, (), (const, override));
+};
+
+} // namespace webrtc
+
+#endif // PC_TEST_MOCK_DATA_CHANNEL_H_
diff --git a/third_party/libwebrtc/pc/test/mock_peer_connection_internal.h b/third_party/libwebrtc/pc/test/mock_peer_connection_internal.h
new file mode 100644
index 0000000000..5fd7a50b4f
--- /dev/null
+++ b/third_party/libwebrtc/pc/test/mock_peer_connection_internal.h
@@ -0,0 +1,325 @@
+/*
+ * Copyright 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_TEST_MOCK_PEER_CONNECTION_INTERNAL_H_
+#define PC_TEST_MOCK_PEER_CONNECTION_INTERNAL_H_
+
+#include <map>
+#include <memory>
+#include <set>
+#include <string>
+#include <vector>
+
+#include "modules/audio_device/include/audio_device.h"
+#include "pc/peer_connection_internal.h"
+#include "test/gmock.h"
+
+namespace webrtc {
+
+class MockPeerConnectionInternal : public PeerConnectionInternal {
+ public:
+ MockPeerConnectionInternal() {}
+ ~MockPeerConnectionInternal() = default;
+ // PeerConnectionInterface
+ MOCK_METHOD(rtc::scoped_refptr<StreamCollectionInterface>,
+ local_streams,
+ (),
+ (override));
+ MOCK_METHOD(rtc::scoped_refptr<StreamCollectionInterface>,
+ remote_streams,
+ (),
+ (override));
+ MOCK_METHOD(bool, AddStream, (MediaStreamInterface*), (override));
+ MOCK_METHOD(void, RemoveStream, (MediaStreamInterface*), (override));
+ MOCK_METHOD(RTCErrorOr<rtc::scoped_refptr<RtpSenderInterface>>,
+ AddTrack,
+ (rtc::scoped_refptr<MediaStreamTrackInterface>,
+ const std::vector<std::string>&),
+ (override));
+ MOCK_METHOD(RTCErrorOr<rtc::scoped_refptr<RtpSenderInterface>>,
+ AddTrack,
+ (rtc::scoped_refptr<MediaStreamTrackInterface>,
+ const std::vector<std::string>&,
+ const std::vector<RtpEncodingParameters>&),
+ (override));
+ MOCK_METHOD(RTCError,
+ RemoveTrackOrError,
+ (rtc::scoped_refptr<RtpSenderInterface>),
+ (override));
+ MOCK_METHOD(RTCErrorOr<rtc::scoped_refptr<RtpTransceiverInterface>>,
+ AddTransceiver,
+ (rtc::scoped_refptr<MediaStreamTrackInterface>),
+ (override));
+ MOCK_METHOD(RTCErrorOr<rtc::scoped_refptr<RtpTransceiverInterface>>,
+ AddTransceiver,
+ (rtc::scoped_refptr<MediaStreamTrackInterface>,
+ const RtpTransceiverInit&),
+ (override));
+ MOCK_METHOD(RTCErrorOr<rtc::scoped_refptr<RtpTransceiverInterface>>,
+ AddTransceiver,
+ (cricket::MediaType),
+ (override));
+ MOCK_METHOD(RTCErrorOr<rtc::scoped_refptr<RtpTransceiverInterface>>,
+ AddTransceiver,
+ (cricket::MediaType, const RtpTransceiverInit&),
+ (override));
+ MOCK_METHOD(rtc::scoped_refptr<RtpSenderInterface>,
+ CreateSender,
+ (const std::string&, const std::string&),
+ (override));
+ MOCK_METHOD(std::vector<rtc::scoped_refptr<RtpSenderInterface>>,
+ GetSenders,
+ (),
+ (const, override));
+ MOCK_METHOD(std::vector<rtc::scoped_refptr<RtpReceiverInterface>>,
+ GetReceivers,
+ (),
+ (const, override));
+ MOCK_METHOD(std::vector<rtc::scoped_refptr<RtpTransceiverInterface>>,
+ GetTransceivers,
+ (),
+ (const, override));
+ MOCK_METHOD(bool,
+ GetStats,
+ (StatsObserver*, MediaStreamTrackInterface*, StatsOutputLevel),
+ (override));
+ MOCK_METHOD(void, GetStats, (RTCStatsCollectorCallback*), (override));
+ MOCK_METHOD(void,
+ GetStats,
+ (rtc::scoped_refptr<RtpSenderInterface>,
+ rtc::scoped_refptr<RTCStatsCollectorCallback>),
+ (override));
+ MOCK_METHOD(void,
+ GetStats,
+ (rtc::scoped_refptr<RtpReceiverInterface>,
+ rtc::scoped_refptr<RTCStatsCollectorCallback>),
+ (override));
+ MOCK_METHOD(void, ClearStatsCache, (), (override));
+ MOCK_METHOD(RTCErrorOr<rtc::scoped_refptr<DataChannelInterface>>,
+ CreateDataChannelOrError,
+ (const std::string&, const DataChannelInit*),
+ (override));
+ MOCK_METHOD(SessionDescriptionInterface*,
+ local_description,
+ (),
+ (const, override));
+ MOCK_METHOD(SessionDescriptionInterface*,
+ remote_description,
+ (),
+ (const, override));
+ MOCK_METHOD(SessionDescriptionInterface*,
+ current_local_description,
+ (),
+ (const, override));
+ MOCK_METHOD(SessionDescriptionInterface*,
+ current_remote_description,
+ (),
+ (const, override));
+ MOCK_METHOD(SessionDescriptionInterface*,
+ pending_local_description,
+ (),
+ (const, override));
+ MOCK_METHOD(SessionDescriptionInterface*,
+ pending_remote_description,
+ (),
+ (const, override));
+ MOCK_METHOD(void, RestartIce, (), (override));
+ MOCK_METHOD(void,
+ CreateOffer,
+ (CreateSessionDescriptionObserver*, const RTCOfferAnswerOptions&),
+ (override));
+ MOCK_METHOD(void,
+ CreateAnswer,
+ (CreateSessionDescriptionObserver*, const RTCOfferAnswerOptions&),
+ (override));
+
+ MOCK_METHOD(void,
+ SetLocalDescription,
+ (SetSessionDescriptionObserver*, SessionDescriptionInterface*),
+ (override));
+ MOCK_METHOD(void,
+ SetRemoteDescription,
+ (SetSessionDescriptionObserver*, SessionDescriptionInterface*),
+ (override));
+ MOCK_METHOD(void,
+ SetRemoteDescription,
+ (std::unique_ptr<SessionDescriptionInterface>,
+ rtc::scoped_refptr<SetRemoteDescriptionObserverInterface>),
+ (override));
+ MOCK_METHOD(PeerConnectionInterface::RTCConfiguration,
+ GetConfiguration,
+ (),
+ (override));
+ MOCK_METHOD(RTCError,
+ SetConfiguration,
+ (const PeerConnectionInterface::RTCConfiguration&),
+ (override));
+ MOCK_METHOD(bool,
+ AddIceCandidate,
+ (const IceCandidateInterface*),
+ (override));
+ MOCK_METHOD(bool,
+ RemoveIceCandidates,
+ (const std::vector<cricket::Candidate>&),
+ (override));
+ MOCK_METHOD(RTCError, SetBitrate, (const BitrateSettings&), (override));
+ MOCK_METHOD(void, SetAudioPlayout, (bool), (override));
+ MOCK_METHOD(void, SetAudioRecording, (bool), (override));
+ MOCK_METHOD(rtc::scoped_refptr<DtlsTransportInterface>,
+ LookupDtlsTransportByMid,
+ (const std::string&),
+ (override));
+ MOCK_METHOD(rtc::scoped_refptr<SctpTransportInterface>,
+ GetSctpTransport,
+ (),
+ (const, override));
+ MOCK_METHOD(SignalingState, signaling_state, (), (override));
+ MOCK_METHOD(IceConnectionState, ice_connection_state, (), (override));
+ MOCK_METHOD(IceConnectionState,
+ standardized_ice_connection_state,
+ (),
+ (override));
+ MOCK_METHOD(PeerConnectionState, peer_connection_state, (), (override));
+ MOCK_METHOD(IceGatheringState, ice_gathering_state, (), (override));
+ MOCK_METHOD(absl::optional<bool>, can_trickle_ice_candidates, (), (override));
+ MOCK_METHOD(bool,
+ StartRtcEventLog,
+ (std::unique_ptr<RtcEventLogOutput>, int64_t),
+ (override));
+ MOCK_METHOD(bool,
+ StartRtcEventLog,
+ (std::unique_ptr<RtcEventLogOutput>),
+ (override));
+ MOCK_METHOD(void, StopRtcEventLog, (), (override));
+ MOCK_METHOD(void, Close, (), (override));
+ MOCK_METHOD(rtc::Thread*, signaling_thread, (), (const, override));
+
+ // PeerConnectionSdpMethods
+ MOCK_METHOD(std::string, session_id, (), (const, override));
+ MOCK_METHOD(bool, NeedsIceRestart, (const std::string&), (const, override));
+ MOCK_METHOD(absl::optional<std::string>, sctp_mid, (), (const, override));
+ MOCK_METHOD(PeerConnectionInterface::RTCConfiguration*,
+ configuration,
+ (),
+ (const, override));
+ MOCK_METHOD(void,
+ ReportSdpBundleUsage,
+ (const SessionDescriptionInterface&),
+ (override));
+ MOCK_METHOD(PeerConnectionMessageHandler*, message_handler, (), (override));
+ MOCK_METHOD(RtpTransmissionManager*, rtp_manager, (), (override));
+ MOCK_METHOD(const RtpTransmissionManager*,
+ rtp_manager,
+ (),
+ (const, override));
+ MOCK_METHOD(bool, dtls_enabled, (), (const, override));
+ MOCK_METHOD(const PeerConnectionFactoryInterface::Options*,
+ options,
+ (),
+ (const, override));
+ MOCK_METHOD(CryptoOptions, GetCryptoOptions, (), (override));
+ MOCK_METHOD(JsepTransportController*, transport_controller_s, (), (override));
+ MOCK_METHOD(JsepTransportController*, transport_controller_n, (), (override));
+ MOCK_METHOD(DataChannelController*, data_channel_controller, (), (override));
+ MOCK_METHOD(cricket::PortAllocator*, port_allocator, (), (override));
+ MOCK_METHOD(LegacyStatsCollector*, legacy_stats, (), (override));
+ MOCK_METHOD(PeerConnectionObserver*, Observer, (), (const, override));
+ MOCK_METHOD(absl::optional<rtc::SSLRole>, GetSctpSslRole_n, (), (override));
+ MOCK_METHOD(PeerConnectionInterface::IceConnectionState,
+ ice_connection_state_internal,
+ (),
+ (override));
+ MOCK_METHOD(void,
+ SetIceConnectionState,
+ (PeerConnectionInterface::IceConnectionState),
+ (override));
+ MOCK_METHOD(void, NoteUsageEvent, (UsageEvent), (override));
+ MOCK_METHOD(bool, IsClosed, (), (const, override));
+ MOCK_METHOD(bool, IsUnifiedPlan, (), (const, override));
+ MOCK_METHOD(bool,
+ ValidateBundleSettings,
+ (const cricket::SessionDescription*,
+ (const std::map<std::string, const cricket::ContentGroup*>&)),
+ (override));
+ MOCK_METHOD(RTCErrorOr<rtc::scoped_refptr<RtpTransceiverInterface>>,
+ AddTransceiver,
+ (cricket::MediaType,
+ rtc::scoped_refptr<MediaStreamTrackInterface>,
+ const RtpTransceiverInit&,
+ bool),
+ (override));
+ MOCK_METHOD(void, StartSctpTransport, (int, int, int), (override));
+ MOCK_METHOD(void,
+ AddRemoteCandidate,
+ (const std::string&, const cricket::Candidate&),
+ (override));
+ MOCK_METHOD(Call*, call_ptr, (), (override));
+ MOCK_METHOD(bool, SrtpRequired, (), (const, override));
+ MOCK_METHOD(bool,
+ CreateDataChannelTransport,
+ (absl::string_view),
+ (override));
+ MOCK_METHOD(void, DestroyDataChannelTransport, (RTCError error), (override));
+ MOCK_METHOD(const FieldTrialsView&, trials, (), (const, override));
+
+ // PeerConnectionInternal
+ MOCK_METHOD(rtc::Thread*, network_thread, (), (const, override));
+ MOCK_METHOD(rtc::Thread*, worker_thread, (), (const, override));
+ MOCK_METHOD(bool, initial_offerer, (), (const, override));
+ MOCK_METHOD(
+ std::vector<
+ rtc::scoped_refptr<RtpTransceiverProxyWithInternal<RtpTransceiver>>>,
+ GetTransceiversInternal,
+ (),
+ (const, override));
+ MOCK_METHOD(std::vector<DataChannelStats>,
+ GetDataChannelStats,
+ (),
+ (const, override));
+ MOCK_METHOD(absl::optional<std::string>,
+ sctp_transport_name,
+ (),
+ (const, override));
+ MOCK_METHOD(cricket::CandidateStatsList,
+ GetPooledCandidateStats,
+ (),
+ (const, override));
+ MOCK_METHOD((std::map<std::string, cricket::TransportStats>),
+ GetTransportStatsByNames,
+ (const std::set<std::string>&),
+ (override));
+ MOCK_METHOD(Call::Stats, GetCallStats, (), (override));
+ MOCK_METHOD(absl::optional<AudioDeviceModule::Stats>,
+ GetAudioDeviceStats,
+ (),
+ (override));
+ MOCK_METHOD(bool,
+ GetLocalCertificate,
+ (const std::string&, rtc::scoped_refptr<rtc::RTCCertificate>*),
+ (override));
+ MOCK_METHOD(std::unique_ptr<rtc::SSLCertChain>,
+ GetRemoteSSLCertChain,
+ (const std::string&),
+ (override));
+ MOCK_METHOD(bool, IceRestartPending, (const std::string&), (const, override));
+ MOCK_METHOD(bool,
+ GetSslRole,
+ (const std::string&, rtc::SSLRole*),
+ (override));
+ MOCK_METHOD(void, NoteDataAddedEvent, (), (override));
+ MOCK_METHOD(void,
+ OnSctpDataChannelStateChanged,
+ (int channel_id, DataChannelInterface::DataState),
+ (override));
+};
+
+} // namespace webrtc
+
+#endif // PC_TEST_MOCK_PEER_CONNECTION_INTERNAL_H_
diff --git a/third_party/libwebrtc/pc/test/mock_peer_connection_observers.h b/third_party/libwebrtc/pc/test/mock_peer_connection_observers.h
new file mode 100644
index 0000000000..e9d97a97f6
--- /dev/null
+++ b/third_party/libwebrtc/pc/test/mock_peer_connection_observers.h
@@ -0,0 +1,599 @@
+/*
+ * Copyright 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This file contains mock implementations of observers used in PeerConnection.
+// TODO(steveanton): These aren't really mocks and should be renamed.
+
+#ifndef PC_TEST_MOCK_PEER_CONNECTION_OBSERVERS_H_
+#define PC_TEST_MOCK_PEER_CONNECTION_OBSERVERS_H_
+
+#include <map>
+#include <memory>
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "api/data_channel_interface.h"
+#include "api/jsep_ice_candidate.h"
+#include "pc/stream_collection.h"
+#include "rtc_base/checks.h"
+
+namespace webrtc {
+
+class MockPeerConnectionObserver : public PeerConnectionObserver {
+ public:
+ struct AddTrackEvent {
+ explicit AddTrackEvent(
+ rtc::scoped_refptr<RtpReceiverInterface> event_receiver,
+ std::vector<rtc::scoped_refptr<MediaStreamInterface>> event_streams)
+ : receiver(std::move(event_receiver)),
+ streams(std::move(event_streams)) {
+ for (auto stream : streams) {
+ std::vector<rtc::scoped_refptr<MediaStreamTrackInterface>> tracks;
+ for (auto audio_track : stream->GetAudioTracks()) {
+ tracks.push_back(audio_track);
+ }
+ for (auto video_track : stream->GetVideoTracks()) {
+ tracks.push_back(video_track);
+ }
+ snapshotted_stream_tracks[stream] = tracks;
+ }
+ }
+
+ rtc::scoped_refptr<RtpReceiverInterface> receiver;
+ std::vector<rtc::scoped_refptr<MediaStreamInterface>> streams;
+ // This map records the tracks present in each stream at the time the
+ // OnAddTrack callback was issued.
+ std::map<rtc::scoped_refptr<MediaStreamInterface>,
+ std::vector<rtc::scoped_refptr<MediaStreamTrackInterface>>>
+ snapshotted_stream_tracks;
+ };
+
+ MockPeerConnectionObserver() : remote_streams_(StreamCollection::Create()) {}
+ virtual ~MockPeerConnectionObserver() {}
+ void SetPeerConnectionInterface(PeerConnectionInterface* pc) {
+ pc_ = pc;
+ if (pc) {
+ state_ = pc_->signaling_state();
+ }
+ }
+ void OnSignalingChange(
+ PeerConnectionInterface::SignalingState new_state) override {
+ RTC_DCHECK(pc_);
+ RTC_DCHECK(pc_->signaling_state() == new_state);
+ state_ = new_state;
+ }
+
+ MediaStreamInterface* RemoteStream(const std::string& label) {
+ return remote_streams_->find(label);
+ }
+ StreamCollectionInterface* remote_streams() const {
+ return remote_streams_.get();
+ }
+ void OnAddStream(rtc::scoped_refptr<MediaStreamInterface> stream) override {
+ last_added_stream_ = stream;
+ remote_streams_->AddStream(stream);
+ }
+ void OnRemoveStream(
+ rtc::scoped_refptr<MediaStreamInterface> stream) override {
+ last_removed_stream_ = stream;
+ remote_streams_->RemoveStream(stream.get());
+ }
+ void OnRenegotiationNeeded() override { renegotiation_needed_ = true; }
+ void OnNegotiationNeededEvent(uint32_t event_id) override {
+ latest_negotiation_needed_event_ = event_id;
+ }
+ void OnDataChannel(
+ rtc::scoped_refptr<DataChannelInterface> data_channel) override {
+ last_datachannel_ = data_channel;
+ }
+
+ void OnIceConnectionChange(
+ PeerConnectionInterface::IceConnectionState new_state) override {
+ RTC_DCHECK(pc_);
+ RTC_DCHECK(pc_->ice_connection_state() == new_state);
+ // When ICE is finished, the caller will get to a kIceConnectionCompleted
+ // state, because it has the ICE controlling role, while the callee
+ // will get to a kIceConnectionConnected state. This means that both ICE
+ // and DTLS are connected.
+ ice_connected_ =
+ (new_state == PeerConnectionInterface::kIceConnectionConnected) ||
+ (new_state == PeerConnectionInterface::kIceConnectionCompleted);
+ callback_triggered_ = true;
+ }
+ void OnIceGatheringChange(
+ PeerConnectionInterface::IceGatheringState new_state) override {
+ RTC_DCHECK(pc_);
+ RTC_DCHECK(pc_->ice_gathering_state() == new_state);
+ ice_gathering_complete_ =
+ new_state == PeerConnectionInterface::kIceGatheringComplete;
+ callback_triggered_ = true;
+ }
+ void OnIceCandidate(const IceCandidateInterface* candidate) override {
+ RTC_DCHECK(pc_);
+ candidates_.push_back(std::make_unique<JsepIceCandidate>(
+ candidate->sdp_mid(), candidate->sdp_mline_index(),
+ candidate->candidate()));
+ callback_triggered_ = true;
+ }
+
+ void OnIceCandidatesRemoved(
+ const std::vector<cricket::Candidate>& candidates) override {
+ num_candidates_removed_++;
+ callback_triggered_ = true;
+ }
+
+ void OnIceConnectionReceivingChange(bool receiving) override {
+ callback_triggered_ = true;
+ }
+
+ void OnAddTrack(rtc::scoped_refptr<RtpReceiverInterface> receiver,
+ const std::vector<rtc::scoped_refptr<MediaStreamInterface>>&
+ streams) override {
+ RTC_DCHECK(receiver);
+ num_added_tracks_++;
+ last_added_track_label_ = receiver->id();
+ add_track_events_.push_back(AddTrackEvent(receiver, streams));
+ }
+
+ void OnTrack(
+ rtc::scoped_refptr<RtpTransceiverInterface> transceiver) override {
+ on_track_transceivers_.push_back(transceiver);
+ }
+
+ void OnRemoveTrack(
+ rtc::scoped_refptr<RtpReceiverInterface> receiver) override {
+ remove_track_events_.push_back(receiver);
+ }
+
+ std::vector<rtc::scoped_refptr<RtpReceiverInterface>> GetAddTrackReceivers() {
+ std::vector<rtc::scoped_refptr<RtpReceiverInterface>> receivers;
+ for (const AddTrackEvent& event : add_track_events_) {
+ receivers.push_back(event.receiver);
+ }
+ return receivers;
+ }
+
+ int CountAddTrackEventsForStream(const std::string& stream_id) {
+ int found_tracks = 0;
+ for (const AddTrackEvent& event : add_track_events_) {
+ bool has_stream_id = false;
+ for (auto stream : event.streams) {
+ if (stream->id() == stream_id) {
+ has_stream_id = true;
+ break;
+ }
+ }
+ if (has_stream_id) {
+ ++found_tracks;
+ }
+ }
+ return found_tracks;
+ }
+
+ // Returns the id of the last added stream.
+ // Empty string if no stream have been added.
+ std::string GetLastAddedStreamId() {
+ if (last_added_stream_.get())
+ return last_added_stream_->id();
+ return "";
+ }
+ std::string GetLastRemovedStreamId() {
+ if (last_removed_stream_.get())
+ return last_removed_stream_->id();
+ return "";
+ }
+
+ IceCandidateInterface* last_candidate() {
+ if (candidates_.empty()) {
+ return nullptr;
+ } else {
+ return candidates_.back().get();
+ }
+ }
+
+ std::vector<const IceCandidateInterface*> GetAllCandidates() {
+ std::vector<const IceCandidateInterface*> candidates;
+ for (const auto& candidate : candidates_) {
+ candidates.push_back(candidate.get());
+ }
+ return candidates;
+ }
+
+ std::vector<IceCandidateInterface*> GetCandidatesByMline(int mline_index) {
+ std::vector<IceCandidateInterface*> candidates;
+ for (const auto& candidate : candidates_) {
+ if (candidate->sdp_mline_index() == mline_index) {
+ candidates.push_back(candidate.get());
+ }
+ }
+ return candidates;
+ }
+
+ bool legacy_renegotiation_needed() const { return renegotiation_needed_; }
+ void clear_legacy_renegotiation_needed() { renegotiation_needed_ = false; }
+
+ bool has_negotiation_needed_event() {
+ return latest_negotiation_needed_event_.has_value();
+ }
+ uint32_t latest_negotiation_needed_event() {
+ return latest_negotiation_needed_event_.value_or(0u);
+ }
+ void clear_latest_negotiation_needed_event() {
+ latest_negotiation_needed_event_ = absl::nullopt;
+ }
+
+ rtc::scoped_refptr<PeerConnectionInterface> pc_;
+ PeerConnectionInterface::SignalingState state_;
+ std::vector<std::unique_ptr<IceCandidateInterface>> candidates_;
+ rtc::scoped_refptr<DataChannelInterface> last_datachannel_;
+ rtc::scoped_refptr<StreamCollection> remote_streams_;
+ bool renegotiation_needed_ = false;
+ absl::optional<uint32_t> latest_negotiation_needed_event_;
+ bool ice_gathering_complete_ = false;
+ bool ice_connected_ = false;
+ bool callback_triggered_ = false;
+ int num_added_tracks_ = 0;
+ std::string last_added_track_label_;
+ std::vector<AddTrackEvent> add_track_events_;
+ std::vector<rtc::scoped_refptr<RtpReceiverInterface>> remove_track_events_;
+ std::vector<rtc::scoped_refptr<RtpTransceiverInterface>>
+ on_track_transceivers_;
+ int num_candidates_removed_ = 0;
+
+ private:
+ rtc::scoped_refptr<MediaStreamInterface> last_added_stream_;
+ rtc::scoped_refptr<MediaStreamInterface> last_removed_stream_;
+};
+
+class MockCreateSessionDescriptionObserver
+ : public webrtc::CreateSessionDescriptionObserver {
+ public:
+ MockCreateSessionDescriptionObserver()
+ : called_(false),
+ error_("MockCreateSessionDescriptionObserver not called") {}
+ virtual ~MockCreateSessionDescriptionObserver() {}
+ void OnSuccess(SessionDescriptionInterface* desc) override {
+ MutexLock lock(&mutex_);
+ called_ = true;
+ error_ = "";
+ desc_.reset(desc);
+ }
+ void OnFailure(webrtc::RTCError error) override {
+ MutexLock lock(&mutex_);
+ called_ = true;
+ error_ = error.message();
+ }
+ bool called() const {
+ MutexLock lock(&mutex_);
+ return called_;
+ }
+ bool result() const {
+ MutexLock lock(&mutex_);
+ return error_.empty();
+ }
+ const std::string& error() const {
+ MutexLock lock(&mutex_);
+ return error_;
+ }
+ std::unique_ptr<SessionDescriptionInterface> MoveDescription() {
+ MutexLock lock(&mutex_);
+ return std::move(desc_);
+ }
+
+ private:
+ mutable Mutex mutex_;
+ bool called_ RTC_GUARDED_BY(mutex_);
+ std::string error_ RTC_GUARDED_BY(mutex_);
+ std::unique_ptr<SessionDescriptionInterface> desc_ RTC_GUARDED_BY(mutex_);
+};
+
+class MockSetSessionDescriptionObserver
+ : public webrtc::SetSessionDescriptionObserver {
+ public:
+ static rtc::scoped_refptr<MockSetSessionDescriptionObserver> Create() {
+ return rtc::make_ref_counted<MockSetSessionDescriptionObserver>();
+ }
+
+ MockSetSessionDescriptionObserver()
+ : called_(false),
+ error_("MockSetSessionDescriptionObserver not called") {}
+ ~MockSetSessionDescriptionObserver() override {}
+ void OnSuccess() override {
+ MutexLock lock(&mutex_);
+
+ called_ = true;
+ error_ = "";
+ }
+ void OnFailure(webrtc::RTCError error) override {
+ MutexLock lock(&mutex_);
+ called_ = true;
+ error_ = error.message();
+ }
+
+ bool called() const {
+ MutexLock lock(&mutex_);
+ return called_;
+ }
+ bool result() const {
+ MutexLock lock(&mutex_);
+ return error_.empty();
+ }
+ const std::string& error() const {
+ MutexLock lock(&mutex_);
+ return error_;
+ }
+
+ private:
+ mutable Mutex mutex_;
+ bool called_;
+ std::string error_;
+};
+
+class FakeSetLocalDescriptionObserver
+ : public SetLocalDescriptionObserverInterface {
+ public:
+ bool called() const { return error_.has_value(); }
+ RTCError& error() {
+ RTC_DCHECK(error_.has_value());
+ return *error_;
+ }
+
+ // SetLocalDescriptionObserverInterface implementation.
+ void OnSetLocalDescriptionComplete(RTCError error) override {
+ error_ = std::move(error);
+ }
+
+ private:
+ // Set on complete, on success this is set to an RTCError::OK() error.
+ absl::optional<RTCError> error_;
+};
+
+class FakeSetRemoteDescriptionObserver
+ : public SetRemoteDescriptionObserverInterface {
+ public:
+ bool called() const { return error_.has_value(); }
+ RTCError& error() {
+ RTC_DCHECK(error_.has_value());
+ return *error_;
+ }
+
+ // SetRemoteDescriptionObserverInterface implementation.
+ void OnSetRemoteDescriptionComplete(RTCError error) override {
+ error_ = std::move(error);
+ }
+
+ private:
+ // Set on complete, on success this is set to an RTCError::OK() error.
+ absl::optional<RTCError> error_;
+};
+
+class MockDataChannelObserver : public webrtc::DataChannelObserver {
+ public:
+ struct Message {
+ std::string data;
+ bool binary;
+ };
+
+ explicit MockDataChannelObserver(webrtc::DataChannelInterface* channel)
+ : channel_(channel) {
+ channel_->RegisterObserver(this);
+ states_.push_back(channel_->state());
+ }
+ virtual ~MockDataChannelObserver() { channel_->UnregisterObserver(); }
+
+ void OnBufferedAmountChange(uint64_t previous_amount) override {}
+
+ void OnStateChange() override { states_.push_back(channel_->state()); }
+ void OnMessage(const DataBuffer& buffer) override {
+ messages_.push_back(
+ {std::string(buffer.data.data<char>(), buffer.data.size()),
+ buffer.binary});
+ }
+
+ bool IsOpen() const { return state() == DataChannelInterface::kOpen; }
+ std::vector<Message> messages() const { return messages_; }
+ std::string last_message() const {
+ if (messages_.empty())
+ return {};
+
+ return messages_.back().data;
+ }
+ bool last_message_is_binary() const {
+ if (messages_.empty())
+ return false;
+ return messages_.back().binary;
+ }
+ size_t received_message_count() const { return messages_.size(); }
+
+ DataChannelInterface::DataState state() const { return states_.back(); }
+ const std::vector<DataChannelInterface::DataState>& states() const {
+ return states_;
+ }
+
+ private:
+ rtc::scoped_refptr<webrtc::DataChannelInterface> channel_;
+ std::vector<DataChannelInterface::DataState> states_;
+ std::vector<Message> messages_;
+};
+
+class MockStatsObserver : public webrtc::StatsObserver {
+ public:
+ MockStatsObserver() : called_(false), stats_() {}
+ virtual ~MockStatsObserver() {}
+
+ virtual void OnComplete(const StatsReports& reports) {
+ RTC_CHECK(!called_);
+ called_ = true;
+ stats_.Clear();
+ stats_.number_of_reports = reports.size();
+ for (const auto* r : reports) {
+ if (r->type() == StatsReport::kStatsReportTypeSsrc) {
+ stats_.timestamp = r->timestamp();
+ GetIntValue(r, StatsReport::kStatsValueNameAudioOutputLevel,
+ &stats_.audio_output_level);
+ GetIntValue(r, StatsReport::kStatsValueNameAudioInputLevel,
+ &stats_.audio_input_level);
+ GetIntValue(r, StatsReport::kStatsValueNameBytesReceived,
+ &stats_.bytes_received);
+ GetIntValue(r, StatsReport::kStatsValueNameBytesSent,
+ &stats_.bytes_sent);
+ GetInt64Value(r, StatsReport::kStatsValueNameCaptureStartNtpTimeMs,
+ &stats_.capture_start_ntp_time);
+ stats_.track_ids.emplace_back();
+ GetStringValue(r, StatsReport::kStatsValueNameTrackId,
+ &stats_.track_ids.back());
+ } else if (r->type() == StatsReport::kStatsReportTypeBwe) {
+ stats_.timestamp = r->timestamp();
+ GetIntValue(r, StatsReport::kStatsValueNameAvailableReceiveBandwidth,
+ &stats_.available_receive_bandwidth);
+ } else if (r->type() == StatsReport::kStatsReportTypeComponent) {
+ stats_.timestamp = r->timestamp();
+ GetStringValue(r, StatsReport::kStatsValueNameDtlsCipher,
+ &stats_.dtls_cipher);
+ GetStringValue(r, StatsReport::kStatsValueNameSrtpCipher,
+ &stats_.srtp_cipher);
+ }
+ }
+ }
+
+ bool called() const { return called_; }
+ size_t number_of_reports() const { return stats_.number_of_reports; }
+ double timestamp() const { return stats_.timestamp; }
+
+ int AudioOutputLevel() const {
+ RTC_CHECK(called_);
+ return stats_.audio_output_level;
+ }
+
+ int AudioInputLevel() const {
+ RTC_CHECK(called_);
+ return stats_.audio_input_level;
+ }
+
+ int BytesReceived() const {
+ RTC_CHECK(called_);
+ return stats_.bytes_received;
+ }
+
+ int BytesSent() const {
+ RTC_CHECK(called_);
+ return stats_.bytes_sent;
+ }
+
+ int64_t CaptureStartNtpTime() const {
+ RTC_CHECK(called_);
+ return stats_.capture_start_ntp_time;
+ }
+
+ int AvailableReceiveBandwidth() const {
+ RTC_CHECK(called_);
+ return stats_.available_receive_bandwidth;
+ }
+
+ std::string DtlsCipher() const {
+ RTC_CHECK(called_);
+ return stats_.dtls_cipher;
+ }
+
+ std::string SrtpCipher() const {
+ RTC_CHECK(called_);
+ return stats_.srtp_cipher;
+ }
+
+ std::vector<std::string> TrackIds() const {
+ RTC_CHECK(called_);
+ return stats_.track_ids;
+ }
+
+ private:
+ bool GetIntValue(const StatsReport* report,
+ StatsReport::StatsValueName name,
+ int* value) {
+ const StatsReport::Value* v = report->FindValue(name);
+ if (v) {
+ // TODO(tommi): We should really just be using an int here :-/
+ *value = rtc::FromString<int>(v->ToString());
+ }
+ return v != nullptr;
+ }
+
+ bool GetInt64Value(const StatsReport* report,
+ StatsReport::StatsValueName name,
+ int64_t* value) {
+ const StatsReport::Value* v = report->FindValue(name);
+ if (v) {
+ // TODO(tommi): We should really just be using an int here :-/
+ *value = rtc::FromString<int64_t>(v->ToString());
+ }
+ return v != nullptr;
+ }
+
+ bool GetStringValue(const StatsReport* report,
+ StatsReport::StatsValueName name,
+ std::string* value) {
+ const StatsReport::Value* v = report->FindValue(name);
+ if (v)
+ *value = v->ToString();
+ return v != nullptr;
+ }
+
+ bool called_;
+ struct {
+ void Clear() {
+ number_of_reports = 0;
+ timestamp = 0;
+ audio_output_level = 0;
+ audio_input_level = 0;
+ bytes_received = 0;
+ bytes_sent = 0;
+ capture_start_ntp_time = 0;
+ available_receive_bandwidth = 0;
+ dtls_cipher.clear();
+ srtp_cipher.clear();
+ track_ids.clear();
+ }
+
+ size_t number_of_reports;
+ double timestamp;
+ int audio_output_level;
+ int audio_input_level;
+ int bytes_received;
+ int bytes_sent;
+ int64_t capture_start_ntp_time;
+ int available_receive_bandwidth;
+ std::string dtls_cipher;
+ std::string srtp_cipher;
+ std::vector<std::string> track_ids;
+ } stats_;
+};
+
+// Helper class that just stores the report from the callback.
+class MockRTCStatsCollectorCallback : public webrtc::RTCStatsCollectorCallback {
+ public:
+ rtc::scoped_refptr<const RTCStatsReport> report() { return report_; }
+
+ bool called() const { return called_; }
+
+ protected:
+ void OnStatsDelivered(
+ const rtc::scoped_refptr<const RTCStatsReport>& report) override {
+ report_ = report;
+ called_ = true;
+ }
+
+ private:
+ bool called_ = false;
+ rtc::scoped_refptr<const RTCStatsReport> report_;
+};
+
+} // namespace webrtc
+
+#endif // PC_TEST_MOCK_PEER_CONNECTION_OBSERVERS_H_
diff --git a/third_party/libwebrtc/pc/test/mock_rtp_receiver_internal.h b/third_party/libwebrtc/pc/test/mock_rtp_receiver_internal.h
new file mode 100644
index 0000000000..e76b56755d
--- /dev/null
+++ b/third_party/libwebrtc/pc/test/mock_rtp_receiver_internal.h
@@ -0,0 +1,82 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_TEST_MOCK_RTP_RECEIVER_INTERNAL_H_
+#define PC_TEST_MOCK_RTP_RECEIVER_INTERNAL_H_
+
+#include <string>
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "pc/rtp_receiver.h"
+#include "test/gmock.h"
+
+namespace webrtc {
+
+// The definition of MockRtpReceiver is copied in to avoid multiple inheritance.
+class MockRtpReceiverInternal : public RtpReceiverInternal {
+ public:
+ // RtpReceiverInterface methods.
+ MOCK_METHOD(rtc::scoped_refptr<MediaStreamTrackInterface>,
+ track,
+ (),
+ (const, override));
+ MOCK_METHOD(rtc::scoped_refptr<DtlsTransportInterface>,
+ dtls_transport,
+ (),
+ (const, override));
+ MOCK_METHOD(std::vector<std::string>, stream_ids, (), (const, override));
+ MOCK_METHOD(std::vector<rtc::scoped_refptr<MediaStreamInterface>>,
+ streams,
+ (),
+ (const, override));
+ MOCK_METHOD(cricket::MediaType, media_type, (), (const, override));
+ MOCK_METHOD(std::string, id, (), (const, override));
+ MOCK_METHOD(RtpParameters, GetParameters, (), (const, override));
+ MOCK_METHOD(void, SetObserver, (RtpReceiverObserverInterface*), (override));
+ MOCK_METHOD(void,
+ SetJitterBufferMinimumDelay,
+ (absl::optional<double>),
+ (override));
+ MOCK_METHOD(std::vector<RtpSource>, GetSources, (), (const, override));
+ MOCK_METHOD(void,
+ SetFrameDecryptor,
+ (rtc::scoped_refptr<FrameDecryptorInterface>),
+ (override));
+ MOCK_METHOD(rtc::scoped_refptr<FrameDecryptorInterface>,
+ GetFrameDecryptor,
+ (),
+ (const, override));
+
+ // RtpReceiverInternal methods.
+ MOCK_METHOD(void, Stop, (), (override));
+ MOCK_METHOD(void,
+ SetMediaChannel,
+ (cricket::MediaReceiveChannelInterface*),
+ (override));
+ MOCK_METHOD(void, SetupMediaChannel, (uint32_t), (override));
+ MOCK_METHOD(void, SetupUnsignaledMediaChannel, (), (override));
+ MOCK_METHOD(absl::optional<uint32_t>, ssrc, (), (const, override));
+ MOCK_METHOD(void, NotifyFirstPacketReceived, (), (override));
+ MOCK_METHOD(void, set_stream_ids, (std::vector<std::string>), (override));
+ MOCK_METHOD(void,
+ set_transport,
+ (rtc::scoped_refptr<DtlsTransportInterface>),
+ (override));
+ MOCK_METHOD(void,
+ SetStreams,
+ (const std::vector<rtc::scoped_refptr<MediaStreamInterface>>&),
+ (override));
+ MOCK_METHOD(int, AttachmentId, (), (const, override));
+};
+
+} // namespace webrtc
+
+#endif // PC_TEST_MOCK_RTP_RECEIVER_INTERNAL_H_
diff --git a/third_party/libwebrtc/pc/test/mock_rtp_sender_internal.h b/third_party/libwebrtc/pc/test/mock_rtp_sender_internal.h
new file mode 100644
index 0000000000..4cfb2cfeaf
--- /dev/null
+++ b/third_party/libwebrtc/pc/test/mock_rtp_sender_internal.h
@@ -0,0 +1,112 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_TEST_MOCK_RTP_SENDER_INTERNAL_H_
+#define PC_TEST_MOCK_RTP_SENDER_INTERNAL_H_
+
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "pc/rtp_sender.h"
+#include "test/gmock.h"
+
+namespace webrtc {
+
+// The definition of MockRtpSender is copied in to avoid multiple inheritance.
+class MockRtpSenderInternal : public RtpSenderInternal {
+ public:
+ // RtpSenderInterface methods.
+ MOCK_METHOD(bool, SetTrack, (MediaStreamTrackInterface*), (override));
+ MOCK_METHOD(rtc::scoped_refptr<MediaStreamTrackInterface>,
+ track,
+ (),
+ (const, override));
+ MOCK_METHOD(uint32_t, ssrc, (), (const, override));
+ MOCK_METHOD(rtc::scoped_refptr<DtlsTransportInterface>,
+ dtls_transport,
+ (),
+ (const, override));
+ MOCK_METHOD(cricket::MediaType, media_type, (), (const, override));
+ MOCK_METHOD(std::string, id, (), (const, override));
+ MOCK_METHOD(std::vector<std::string>, stream_ids, (), (const, override));
+ MOCK_METHOD(std::vector<RtpEncodingParameters>,
+ init_send_encodings,
+ (),
+ (const, override));
+ MOCK_METHOD(void,
+ set_transport,
+ (rtc::scoped_refptr<DtlsTransportInterface>),
+ (override));
+ MOCK_METHOD(RtpParameters, GetParameters, (), (const, override));
+ MOCK_METHOD(RtpParameters, GetParametersInternal, (), (const, override));
+ MOCK_METHOD(RtpParameters,
+ GetParametersInternalWithAllLayers,
+ (),
+ (const, override));
+ MOCK_METHOD(RTCError, SetParameters, (const RtpParameters&), (override));
+ MOCK_METHOD(void,
+ SetParametersAsync,
+ (const RtpParameters&, SetParametersCallback),
+ (override));
+ MOCK_METHOD(void,
+ SetParametersInternal,
+ (const RtpParameters&, SetParametersCallback, bool blocking),
+ (override));
+ MOCK_METHOD(RTCError,
+ SetParametersInternalWithAllLayers,
+ (const RtpParameters&),
+ (override));
+ MOCK_METHOD(RTCError,
+ CheckCodecParameters,
+ (const RtpParameters&),
+ (override));
+ MOCK_METHOD(void,
+ SetCodecPreferences,
+ (std::vector<cricket::VideoCodec>),
+ (override));
+ MOCK_METHOD(rtc::scoped_refptr<DtmfSenderInterface>,
+ GetDtmfSender,
+ (),
+ (const, override));
+ MOCK_METHOD(void,
+ SetFrameEncryptor,
+ (rtc::scoped_refptr<FrameEncryptorInterface>),
+ (override));
+ MOCK_METHOD(rtc::scoped_refptr<FrameEncryptorInterface>,
+ GetFrameEncryptor,
+ (),
+ (const, override));
+ MOCK_METHOD(void,
+ SetEncoderToPacketizerFrameTransformer,
+ (rtc::scoped_refptr<FrameTransformerInterface>),
+ (override));
+ MOCK_METHOD(void,
+ SetEncoderSelector,
+ (std::unique_ptr<VideoEncoderFactory::EncoderSelectorInterface>),
+ (override));
+
+ // RtpSenderInternal methods.
+ MOCK_METHOD1(SetMediaChannel, void(cricket::MediaSendChannelInterface*));
+ MOCK_METHOD1(SetSsrc, void(uint32_t));
+ MOCK_METHOD1(set_stream_ids, void(const std::vector<std::string>&));
+ MOCK_METHOD1(SetStreams, void(const std::vector<std::string>&));
+ MOCK_METHOD1(set_init_send_encodings,
+ void(const std::vector<RtpEncodingParameters>&));
+ MOCK_METHOD0(Stop, void());
+ MOCK_CONST_METHOD0(AttachmentId, int());
+ MOCK_METHOD1(DisableEncodingLayers,
+ RTCError(const std::vector<std::string>&));
+ MOCK_METHOD0(SetTransceiverAsStopped, void());
+};
+
+} // namespace webrtc
+
+#endif // PC_TEST_MOCK_RTP_SENDER_INTERNAL_H_
diff --git a/third_party/libwebrtc/pc/test/mock_voice_media_receive_channel_interface.h b/third_party/libwebrtc/pc/test/mock_voice_media_receive_channel_interface.h
new file mode 100644
index 0000000000..adb1201239
--- /dev/null
+++ b/third_party/libwebrtc/pc/test/mock_voice_media_receive_channel_interface.h
@@ -0,0 +1,130 @@
+/*
+ * Copyright 2023 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef PC_TEST_MOCK_VOICE_MEDIA_RECEIVE_CHANNEL_INTERFACE_H_
+#define PC_TEST_MOCK_VOICE_MEDIA_RECEIVE_CHANNEL_INTERFACE_H_
+
+#include <memory>
+#include <set>
+#include <string>
+#include <vector>
+
+#include "api/call/audio_sink.h"
+#include "media/base/media_channel.h"
+#include "media/base/media_channel_impl.h"
+#include "modules/rtp_rtcp/source/rtp_packet_received.h"
+#include "rtc_base/gunit.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+
+namespace cricket {
+
+class MockVoiceMediaReceiveChannelInterface
+ : public VoiceMediaReceiveChannelInterface {
+ public:
+ MockVoiceMediaReceiveChannelInterface() {
+ ON_CALL(*this, AsVoiceReceiveChannel).WillByDefault(testing::Return(this));
+ }
+
+ // VoiceMediaReceiveChannelInterface
+ MOCK_METHOD(bool,
+ SetReceiverParameters,
+ (const AudioReceiverParameters& params),
+ (override));
+ MOCK_METHOD(webrtc::RtpParameters,
+ GetRtpReceiverParameters,
+ (uint32_t ssrc),
+ (const, override));
+ MOCK_METHOD(std::vector<webrtc::RtpSource>,
+ GetSources,
+ (uint32_t ssrc),
+ (const, override));
+ MOCK_METHOD(webrtc::RtpParameters,
+ GetDefaultRtpReceiveParameters,
+ (),
+ (const, override));
+ MOCK_METHOD(void, SetPlayout, (bool playout), (override));
+ MOCK_METHOD(bool,
+ SetOutputVolume,
+ (uint32_t ssrc, double volume),
+ (override));
+ MOCK_METHOD(bool, SetDefaultOutputVolume, (double volume), (override));
+ MOCK_METHOD(void,
+ SetRawAudioSink,
+ (uint32_t ssrc, std::unique_ptr<webrtc::AudioSinkInterface> sink),
+ (override));
+ MOCK_METHOD(void,
+ SetDefaultRawAudioSink,
+ (std::unique_ptr<webrtc::AudioSinkInterface> sink),
+ (override));
+ MOCK_METHOD(bool,
+ GetStats,
+ (VoiceMediaReceiveInfo * stats, bool reset_legacy),
+ (override));
+ MOCK_METHOD(void, SetReceiveNackEnabled, (bool enabled), (override));
+ MOCK_METHOD(void, SetReceiveNonSenderRttEnabled, (bool enabled), (override));
+
+ // MediaReceiveChannelInterface
+ MOCK_METHOD(VideoMediaReceiveChannelInterface*,
+ AsVideoReceiveChannel,
+ (),
+ (override));
+ MOCK_METHOD(VoiceMediaReceiveChannelInterface*,
+ AsVoiceReceiveChannel,
+ (),
+ (override));
+ MOCK_METHOD(cricket::MediaType, media_type, (), (const, override));
+ MOCK_METHOD(bool, AddRecvStream, (const StreamParams& sp), (override));
+ MOCK_METHOD(bool, RemoveRecvStream, (uint32_t ssrc), (override));
+ MOCK_METHOD(void, ResetUnsignaledRecvStream, (), (override));
+ MOCK_METHOD(void,
+ SetInterface,
+ (MediaChannelNetworkInterface * iface),
+ (override));
+ MOCK_METHOD(void,
+ OnPacketReceived,
+ (const webrtc::RtpPacketReceived& packet),
+ (override));
+ MOCK_METHOD(absl::optional<uint32_t>,
+ GetUnsignaledSsrc,
+ (),
+ (const, override));
+ MOCK_METHOD(void,
+ ChooseReceiverReportSsrc,
+ (const std::set<uint32_t>& choices),
+ (override));
+ MOCK_METHOD(void, OnDemuxerCriteriaUpdatePending, (), (override));
+ MOCK_METHOD(void, OnDemuxerCriteriaUpdateComplete, (), (override));
+ MOCK_METHOD(
+ void,
+ SetFrameDecryptor,
+ (uint32_t ssrc,
+ rtc::scoped_refptr<webrtc::FrameDecryptorInterface> frame_decryptor),
+ (override));
+ MOCK_METHOD(
+ void,
+ SetDepacketizerToDecoderFrameTransformer,
+ (uint32_t ssrc,
+ rtc::scoped_refptr<webrtc::FrameTransformerInterface> frame_transformer),
+ (override));
+ MOCK_METHOD(bool,
+ SetBaseMinimumPlayoutDelayMs,
+ (uint32_t ssrc, int delay_ms),
+ (override));
+ MOCK_METHOD(absl::optional<int>,
+ GetBaseMinimumPlayoutDelayMs,
+ (uint32_t ssrc),
+ (const, override));
+};
+
+static_assert(!std::is_abstract_v<MockVoiceMediaReceiveChannelInterface>, "");
+
+} // namespace cricket
+
+#endif // PC_TEST_MOCK_VOICE_MEDIA_RECEIVE_CHANNEL_INTERFACE_H_
diff --git a/third_party/libwebrtc/pc/test/peer_connection_test_wrapper.cc b/third_party/libwebrtc/pc/test/peer_connection_test_wrapper.cc
new file mode 100644
index 0000000000..1a3dd3109a
--- /dev/null
+++ b/third_party/libwebrtc/pc/test/peer_connection_test_wrapper.cc
@@ -0,0 +1,429 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/test/peer_connection_test_wrapper.h"
+
+#include <stddef.h>
+
+#include <memory>
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "absl/strings/match.h"
+#include "absl/types/optional.h"
+#include "api/audio/audio_mixer.h"
+#include "api/create_peerconnection_factory.h"
+#include "api/media_types.h"
+#include "api/sequence_checker.h"
+#include "api/video_codecs/video_decoder_factory.h"
+#include "api/video_codecs/video_decoder_factory_template.h"
+#include "api/video_codecs/video_decoder_factory_template_dav1d_adapter.h"
+#include "api/video_codecs/video_decoder_factory_template_libvpx_vp8_adapter.h"
+#include "api/video_codecs/video_decoder_factory_template_libvpx_vp9_adapter.h"
+#include "api/video_codecs/video_decoder_factory_template_open_h264_adapter.h"
+#include "api/video_codecs/video_encoder_factory.h"
+#include "api/video_codecs/video_encoder_factory_template.h"
+#include "api/video_codecs/video_encoder_factory_template_libaom_av1_adapter.h"
+#include "api/video_codecs/video_encoder_factory_template_libvpx_vp8_adapter.h"
+#include "api/video_codecs/video_encoder_factory_template_libvpx_vp9_adapter.h"
+#include "api/video_codecs/video_encoder_factory_template_open_h264_adapter.h"
+#include "media/engine/simulcast_encoder_adapter.h"
+#include "modules/audio_device/include/audio_device.h"
+#include "modules/audio_processing/include/audio_processing.h"
+#include "p2p/base/fake_port_allocator.h"
+#include "p2p/base/port_allocator.h"
+#include "pc/test/fake_periodic_video_source.h"
+#include "pc/test/fake_rtc_certificate_generator.h"
+#include "pc/test/mock_peer_connection_observers.h"
+#include "rtc_base/gunit.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/rtc_certificate_generator.h"
+#include "rtc_base/string_encode.h"
+#include "rtc_base/time_utils.h"
+#include "test/gtest.h"
+
+using webrtc::FakeVideoTrackRenderer;
+using webrtc::IceCandidateInterface;
+using webrtc::MediaStreamInterface;
+using webrtc::MediaStreamTrackInterface;
+using webrtc::MockSetSessionDescriptionObserver;
+using webrtc::PeerConnectionInterface;
+using webrtc::RtpReceiverInterface;
+using webrtc::SdpType;
+using webrtc::SessionDescriptionInterface;
+using webrtc::VideoTrackInterface;
+
+namespace {
+const char kStreamIdBase[] = "stream_id";
+const char kVideoTrackLabelBase[] = "video_track";
+const char kAudioTrackLabelBase[] = "audio_track";
+constexpr int kMaxWait = 10000;
+constexpr int kTestAudioFrameCount = 3;
+constexpr int kTestVideoFrameCount = 3;
+
+class FuzzyMatchedVideoEncoderFactory : public webrtc::VideoEncoderFactory {
+ public:
+ std::vector<webrtc::SdpVideoFormat> GetSupportedFormats() const override {
+ return factory_.GetSupportedFormats();
+ }
+
+ std::unique_ptr<webrtc::VideoEncoder> CreateVideoEncoder(
+ const webrtc::SdpVideoFormat& format) override {
+ if (absl::optional<webrtc::SdpVideoFormat> original_format =
+ webrtc::FuzzyMatchSdpVideoFormat(factory_.GetSupportedFormats(),
+ format)) {
+ return std::make_unique<webrtc::SimulcastEncoderAdapter>(
+ &factory_, *original_format);
+ }
+
+ return nullptr;
+ }
+
+ CodecSupport QueryCodecSupport(
+ const webrtc::SdpVideoFormat& format,
+ absl::optional<std::string> scalability_mode) const override {
+ return factory_.QueryCodecSupport(format, scalability_mode);
+ }
+
+ private:
+ webrtc::VideoEncoderFactoryTemplate<webrtc::LibvpxVp8EncoderTemplateAdapter,
+ webrtc::LibvpxVp9EncoderTemplateAdapter,
+ webrtc::OpenH264EncoderTemplateAdapter,
+ webrtc::LibaomAv1EncoderTemplateAdapter>
+ factory_;
+};
+} // namespace
+
+void PeerConnectionTestWrapper::Connect(PeerConnectionTestWrapper* caller,
+ PeerConnectionTestWrapper* callee) {
+ caller->SignalOnIceCandidateReady.connect(
+ callee, &PeerConnectionTestWrapper::AddIceCandidate);
+ callee->SignalOnIceCandidateReady.connect(
+ caller, &PeerConnectionTestWrapper::AddIceCandidate);
+
+ caller->SignalOnSdpReady.connect(callee,
+ &PeerConnectionTestWrapper::ReceiveOfferSdp);
+ callee->SignalOnSdpReady.connect(
+ caller, &PeerConnectionTestWrapper::ReceiveAnswerSdp);
+}
+
+PeerConnectionTestWrapper::PeerConnectionTestWrapper(
+ const std::string& name,
+ rtc::SocketServer* socket_server,
+ rtc::Thread* network_thread,
+ rtc::Thread* worker_thread)
+ : name_(name),
+ socket_server_(socket_server),
+ network_thread_(network_thread),
+ worker_thread_(worker_thread),
+ pending_negotiation_(false) {
+ pc_thread_checker_.Detach();
+}
+
+PeerConnectionTestWrapper::~PeerConnectionTestWrapper() {
+ RTC_DCHECK_RUN_ON(&pc_thread_checker_);
+ // To avoid flaky bot failures, make sure fake sources are stopped prior to
+ // closing the peer connections. See https://crbug.com/webrtc/15018.
+ StopFakeVideoSources();
+ // Either network_thread or worker_thread might be active at this point.
+ // Relying on ~PeerConnection to properly wait for them doesn't work,
+ // as a vptr race might occur (before we enter the destruction body).
+ // See: bugs.webrtc.org/9847
+ if (pc()) {
+ pc()->Close();
+ }
+}
+
+bool PeerConnectionTestWrapper::CreatePc(
+ const webrtc::PeerConnectionInterface::RTCConfiguration& config,
+ rtc::scoped_refptr<webrtc::AudioEncoderFactory> audio_encoder_factory,
+ rtc::scoped_refptr<webrtc::AudioDecoderFactory> audio_decoder_factory) {
+ std::unique_ptr<cricket::PortAllocator> port_allocator(
+ new cricket::FakePortAllocator(
+ network_thread_,
+ std::make_unique<rtc::BasicPacketSocketFactory>(socket_server_),
+ &field_trials_));
+
+ RTC_DCHECK_RUN_ON(&pc_thread_checker_);
+
+ fake_audio_capture_module_ = FakeAudioCaptureModule::Create();
+ if (fake_audio_capture_module_ == nullptr) {
+ return false;
+ }
+
+ peer_connection_factory_ = webrtc::CreatePeerConnectionFactory(
+ network_thread_, worker_thread_, rtc::Thread::Current(),
+ rtc::scoped_refptr<webrtc::AudioDeviceModule>(fake_audio_capture_module_),
+ audio_encoder_factory, audio_decoder_factory,
+ std::make_unique<FuzzyMatchedVideoEncoderFactory>(),
+ std::make_unique<webrtc::VideoDecoderFactoryTemplate<
+ webrtc::LibvpxVp8DecoderTemplateAdapter,
+ webrtc::LibvpxVp9DecoderTemplateAdapter,
+ webrtc::OpenH264DecoderTemplateAdapter,
+ webrtc::Dav1dDecoderTemplateAdapter>>(),
+ nullptr /* audio_mixer */, nullptr /* audio_processing */);
+ if (!peer_connection_factory_) {
+ return false;
+ }
+
+ std::unique_ptr<rtc::RTCCertificateGeneratorInterface> cert_generator(
+ new FakeRTCCertificateGenerator());
+ webrtc::PeerConnectionDependencies deps(this);
+ deps.allocator = std::move(port_allocator);
+ deps.cert_generator = std::move(cert_generator);
+ auto result = peer_connection_factory_->CreatePeerConnectionOrError(
+ config, std::move(deps));
+ if (result.ok()) {
+ peer_connection_ = result.MoveValue();
+ return true;
+ } else {
+ return false;
+ }
+}
+
+rtc::scoped_refptr<webrtc::DataChannelInterface>
+PeerConnectionTestWrapper::CreateDataChannel(
+ const std::string& label,
+ const webrtc::DataChannelInit& init) {
+ auto result = peer_connection_->CreateDataChannelOrError(label, &init);
+ if (!result.ok()) {
+ RTC_LOG(LS_ERROR) << "CreateDataChannel failed: "
+ << ToString(result.error().type()) << " "
+ << result.error().message();
+ return nullptr;
+ }
+ return result.MoveValue();
+}
+
+absl::optional<webrtc::RtpCodecCapability>
+PeerConnectionTestWrapper::FindFirstSendCodecWithName(
+ cricket::MediaType media_type,
+ const std::string& name) const {
+ std::vector<webrtc::RtpCodecCapability> codecs =
+ peer_connection_factory_->GetRtpSenderCapabilities(media_type).codecs;
+ for (const auto& codec : codecs) {
+ if (absl::EqualsIgnoreCase(codec.name, name)) {
+ return codec;
+ }
+ }
+ return absl::nullopt;
+}
+
+void PeerConnectionTestWrapper::WaitForNegotiation() {
+ EXPECT_TRUE_WAIT(!pending_negotiation_, kMaxWait);
+}
+
+void PeerConnectionTestWrapper::OnSignalingChange(
+ webrtc::PeerConnectionInterface::SignalingState new_state) {
+ if (new_state == webrtc::PeerConnectionInterface::SignalingState::kStable) {
+ pending_negotiation_ = false;
+ }
+}
+
+void PeerConnectionTestWrapper::OnAddTrack(
+ rtc::scoped_refptr<RtpReceiverInterface> receiver,
+ const std::vector<rtc::scoped_refptr<MediaStreamInterface>>& streams) {
+ RTC_LOG(LS_INFO) << "PeerConnectionTestWrapper " << name_ << ": OnAddTrack";
+ if (receiver->track()->kind() == MediaStreamTrackInterface::kVideoKind) {
+ auto* video_track =
+ static_cast<VideoTrackInterface*>(receiver->track().get());
+ renderer_ = std::make_unique<FakeVideoTrackRenderer>(video_track);
+ }
+}
+
+void PeerConnectionTestWrapper::OnIceCandidate(
+ const IceCandidateInterface* candidate) {
+ std::string sdp;
+ EXPECT_TRUE(candidate->ToString(&sdp));
+ SignalOnIceCandidateReady(candidate->sdp_mid(), candidate->sdp_mline_index(),
+ sdp);
+}
+
+void PeerConnectionTestWrapper::OnDataChannel(
+ rtc::scoped_refptr<webrtc::DataChannelInterface> data_channel) {
+ SignalOnDataChannel(data_channel.get());
+}
+
+void PeerConnectionTestWrapper::OnSuccess(SessionDescriptionInterface* desc) {
+ // This callback should take the ownership of `desc`.
+ std::unique_ptr<SessionDescriptionInterface> owned_desc(desc);
+ std::string sdp;
+ EXPECT_TRUE(desc->ToString(&sdp));
+
+ RTC_LOG(LS_INFO) << "PeerConnectionTestWrapper " << name_ << ": "
+ << webrtc::SdpTypeToString(desc->GetType())
+ << " sdp created: " << sdp;
+
+ SetLocalDescription(desc->GetType(), sdp);
+
+ SignalOnSdpReady(sdp);
+}
+
+void PeerConnectionTestWrapper::CreateOffer(
+ const webrtc::PeerConnectionInterface::RTCOfferAnswerOptions& options) {
+ RTC_LOG(LS_INFO) << "PeerConnectionTestWrapper " << name_ << ": CreateOffer.";
+ pending_negotiation_ = true;
+ peer_connection_->CreateOffer(this, options);
+}
+
+void PeerConnectionTestWrapper::CreateAnswer(
+ const webrtc::PeerConnectionInterface::RTCOfferAnswerOptions& options) {
+ RTC_LOG(LS_INFO) << "PeerConnectionTestWrapper " << name_
+ << ": CreateAnswer.";
+ pending_negotiation_ = true;
+ peer_connection_->CreateAnswer(this, options);
+}
+
+void PeerConnectionTestWrapper::ReceiveOfferSdp(const std::string& sdp) {
+ SetRemoteDescription(SdpType::kOffer, sdp);
+ CreateAnswer(webrtc::PeerConnectionInterface::RTCOfferAnswerOptions());
+}
+
+void PeerConnectionTestWrapper::ReceiveAnswerSdp(const std::string& sdp) {
+ SetRemoteDescription(SdpType::kAnswer, sdp);
+}
+
+void PeerConnectionTestWrapper::SetLocalDescription(SdpType type,
+ const std::string& sdp) {
+ RTC_LOG(LS_INFO) << "PeerConnectionTestWrapper " << name_
+ << ": SetLocalDescription " << webrtc::SdpTypeToString(type)
+ << " " << sdp;
+
+ auto observer = rtc::make_ref_counted<MockSetSessionDescriptionObserver>();
+ peer_connection_->SetLocalDescription(
+ observer.get(), webrtc::CreateSessionDescription(type, sdp).release());
+}
+
+void PeerConnectionTestWrapper::SetRemoteDescription(SdpType type,
+ const std::string& sdp) {
+ RTC_LOG(LS_INFO) << "PeerConnectionTestWrapper " << name_
+ << ": SetRemoteDescription " << webrtc::SdpTypeToString(type)
+ << " " << sdp;
+
+ auto observer = rtc::make_ref_counted<MockSetSessionDescriptionObserver>();
+ peer_connection_->SetRemoteDescription(
+ observer.get(), webrtc::CreateSessionDescription(type, sdp).release());
+}
+
+void PeerConnectionTestWrapper::AddIceCandidate(const std::string& sdp_mid,
+ int sdp_mline_index,
+ const std::string& candidate) {
+ std::unique_ptr<webrtc::IceCandidateInterface> owned_candidate(
+ webrtc::CreateIceCandidate(sdp_mid, sdp_mline_index, candidate, NULL));
+ EXPECT_TRUE(peer_connection_->AddIceCandidate(owned_candidate.get()));
+}
+
+void PeerConnectionTestWrapper::WaitForCallEstablished() {
+ WaitForConnection();
+ WaitForAudio();
+ WaitForVideo();
+}
+
+void PeerConnectionTestWrapper::WaitForConnection() {
+ EXPECT_TRUE_WAIT(CheckForConnection(), kMaxWait);
+ RTC_LOG(LS_INFO) << "PeerConnectionTestWrapper " << name_ << ": Connected.";
+}
+
+bool PeerConnectionTestWrapper::CheckForConnection() {
+ return (peer_connection_->ice_connection_state() ==
+ PeerConnectionInterface::kIceConnectionConnected) ||
+ (peer_connection_->ice_connection_state() ==
+ PeerConnectionInterface::kIceConnectionCompleted);
+}
+
+void PeerConnectionTestWrapper::WaitForAudio() {
+ EXPECT_TRUE_WAIT(CheckForAudio(), kMaxWait);
+ RTC_LOG(LS_INFO) << "PeerConnectionTestWrapper " << name_
+ << ": Got enough audio frames.";
+}
+
+bool PeerConnectionTestWrapper::CheckForAudio() {
+ return (fake_audio_capture_module_->frames_received() >=
+ kTestAudioFrameCount);
+}
+
+void PeerConnectionTestWrapper::WaitForVideo() {
+ EXPECT_TRUE_WAIT(CheckForVideo(), kMaxWait);
+ RTC_LOG(LS_INFO) << "PeerConnectionTestWrapper " << name_
+ << ": Got enough video frames.";
+}
+
+bool PeerConnectionTestWrapper::CheckForVideo() {
+ if (!renderer_) {
+ return false;
+ }
+ return (renderer_->num_rendered_frames() >= kTestVideoFrameCount);
+}
+
+void PeerConnectionTestWrapper::GetAndAddUserMedia(
+ bool audio,
+ const cricket::AudioOptions& audio_options,
+ bool video) {
+ rtc::scoped_refptr<webrtc::MediaStreamInterface> stream =
+ GetUserMedia(audio, audio_options, video);
+ for (const auto& audio_track : stream->GetAudioTracks()) {
+ EXPECT_TRUE(peer_connection_->AddTrack(audio_track, {stream->id()}).ok());
+ }
+ for (const auto& video_track : stream->GetVideoTracks()) {
+ EXPECT_TRUE(peer_connection_->AddTrack(video_track, {stream->id()}).ok());
+ }
+}
+
+rtc::scoped_refptr<webrtc::MediaStreamInterface>
+PeerConnectionTestWrapper::GetUserMedia(
+ bool audio,
+ const cricket::AudioOptions& audio_options,
+ bool video,
+ webrtc::Resolution resolution) {
+ std::string stream_id =
+ kStreamIdBase + rtc::ToString(num_get_user_media_calls_++);
+ rtc::scoped_refptr<webrtc::MediaStreamInterface> stream =
+ peer_connection_factory_->CreateLocalMediaStream(stream_id);
+
+ if (audio) {
+ cricket::AudioOptions options = audio_options;
+ // Disable highpass filter so that we can get all the test audio frames.
+ options.highpass_filter = false;
+ rtc::scoped_refptr<webrtc::AudioSourceInterface> source =
+ peer_connection_factory_->CreateAudioSource(options);
+ rtc::scoped_refptr<webrtc::AudioTrackInterface> audio_track(
+ peer_connection_factory_->CreateAudioTrack(kAudioTrackLabelBase,
+ source.get()));
+ stream->AddTrack(audio_track);
+ }
+
+ if (video) {
+ // Set max frame rate to 10fps to reduce the risk of the tests to be flaky.
+ webrtc::FakePeriodicVideoSource::Config config;
+ config.frame_interval_ms = 100;
+ config.timestamp_offset_ms = rtc::TimeMillis();
+ config.width = resolution.width;
+ config.height = resolution.height;
+
+ auto source = rtc::make_ref_counted<webrtc::FakePeriodicVideoTrackSource>(
+ config, /* remote */ false);
+ fake_video_sources_.push_back(source);
+
+ std::string videotrack_label = stream_id + kVideoTrackLabelBase;
+ rtc::scoped_refptr<webrtc::VideoTrackInterface> video_track(
+ peer_connection_factory_->CreateVideoTrack(source, videotrack_label));
+
+ stream->AddTrack(video_track);
+ }
+ return stream;
+}
+
+void PeerConnectionTestWrapper::StopFakeVideoSources() {
+ for (const auto& fake_video_source : fake_video_sources_) {
+ fake_video_source->fake_periodic_source().Stop();
+ }
+ fake_video_sources_.clear();
+}
diff --git a/third_party/libwebrtc/pc/test/peer_connection_test_wrapper.h b/third_party/libwebrtc/pc/test/peer_connection_test_wrapper.h
new file mode 100644
index 0000000000..751c9462d3
--- /dev/null
+++ b/third_party/libwebrtc/pc/test/peer_connection_test_wrapper.h
@@ -0,0 +1,150 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_TEST_PEER_CONNECTION_TEST_WRAPPER_H_
+#define PC_TEST_PEER_CONNECTION_TEST_WRAPPER_H_
+
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "api/audio_codecs/audio_decoder_factory.h"
+#include "api/audio_codecs/audio_encoder_factory.h"
+#include "api/audio_options.h"
+#include "api/data_channel_interface.h"
+#include "api/jsep.h"
+#include "api/media_stream_interface.h"
+#include "api/peer_connection_interface.h"
+#include "api/rtc_error.h"
+#include "api/rtp_parameters.h"
+#include "api/rtp_receiver_interface.h"
+#include "api/scoped_refptr.h"
+#include "api/sequence_checker.h"
+#include "api/video/resolution.h"
+#include "pc/test/fake_audio_capture_module.h"
+#include "pc/test/fake_periodic_video_source.h"
+#include "pc/test/fake_periodic_video_track_source.h"
+#include "pc/test/fake_video_track_renderer.h"
+#include "rtc_base/third_party/sigslot/sigslot.h"
+#include "rtc_base/thread.h"
+#include "test/scoped_key_value_config.h"
+
+class PeerConnectionTestWrapper
+ : public webrtc::PeerConnectionObserver,
+ public webrtc::CreateSessionDescriptionObserver,
+ public sigslot::has_slots<> {
+ public:
+ static void Connect(PeerConnectionTestWrapper* caller,
+ PeerConnectionTestWrapper* callee);
+
+ PeerConnectionTestWrapper(const std::string& name,
+ rtc::SocketServer* socket_server,
+ rtc::Thread* network_thread,
+ rtc::Thread* worker_thread);
+ virtual ~PeerConnectionTestWrapper();
+
+ bool CreatePc(
+ const webrtc::PeerConnectionInterface::RTCConfiguration& config,
+ rtc::scoped_refptr<webrtc::AudioEncoderFactory> audio_encoder_factory,
+ rtc::scoped_refptr<webrtc::AudioDecoderFactory> audio_decoder_factory);
+
+ rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface> pc_factory()
+ const {
+ return peer_connection_factory_;
+ }
+ webrtc::PeerConnectionInterface* pc() { return peer_connection_.get(); }
+
+ rtc::scoped_refptr<webrtc::DataChannelInterface> CreateDataChannel(
+ const std::string& label,
+ const webrtc::DataChannelInit& init);
+
+ absl::optional<webrtc::RtpCodecCapability> FindFirstSendCodecWithName(
+ cricket::MediaType media_type,
+ const std::string& name) const;
+
+ void WaitForNegotiation();
+
+ // Implements PeerConnectionObserver.
+ void OnSignalingChange(
+ webrtc::PeerConnectionInterface::SignalingState new_state) override;
+ void OnAddTrack(
+ rtc::scoped_refptr<webrtc::RtpReceiverInterface> receiver,
+ const std::vector<rtc::scoped_refptr<webrtc::MediaStreamInterface>>&
+ streams) override;
+ void OnDataChannel(
+ rtc::scoped_refptr<webrtc::DataChannelInterface> data_channel) override;
+ void OnRenegotiationNeeded() override {}
+ void OnIceConnectionChange(
+ webrtc::PeerConnectionInterface::IceConnectionState new_state) override {}
+ void OnIceGatheringChange(
+ webrtc::PeerConnectionInterface::IceGatheringState new_state) override {}
+ void OnIceCandidate(const webrtc::IceCandidateInterface* candidate) override;
+
+ // Implements CreateSessionDescriptionObserver.
+ void OnSuccess(webrtc::SessionDescriptionInterface* desc) override;
+ void OnFailure(webrtc::RTCError) override {}
+
+ void CreateOffer(
+ const webrtc::PeerConnectionInterface::RTCOfferAnswerOptions& options);
+ void CreateAnswer(
+ const webrtc::PeerConnectionInterface::RTCOfferAnswerOptions& options);
+ void ReceiveOfferSdp(const std::string& sdp);
+ void ReceiveAnswerSdp(const std::string& sdp);
+ void AddIceCandidate(const std::string& sdp_mid,
+ int sdp_mline_index,
+ const std::string& candidate);
+ void WaitForCallEstablished();
+ void WaitForConnection();
+ void WaitForAudio();
+ void WaitForVideo();
+ void GetAndAddUserMedia(bool audio,
+ const cricket::AudioOptions& audio_options,
+ bool video);
+
+ // sigslots
+ sigslot::signal3<const std::string&, int, const std::string&>
+ SignalOnIceCandidateReady;
+ sigslot::signal1<const std::string&> SignalOnSdpReady;
+ sigslot::signal1<webrtc::DataChannelInterface*> SignalOnDataChannel;
+
+ rtc::scoped_refptr<webrtc::MediaStreamInterface> GetUserMedia(
+ bool audio,
+ const cricket::AudioOptions& audio_options,
+ bool video,
+ webrtc::Resolution resolution = {
+ .width = webrtc::FakePeriodicVideoSource::kDefaultWidth,
+ .height = webrtc::FakePeriodicVideoSource::kDefaultHeight});
+ void StopFakeVideoSources();
+
+ private:
+ void SetLocalDescription(webrtc::SdpType type, const std::string& sdp);
+ void SetRemoteDescription(webrtc::SdpType type, const std::string& sdp);
+ bool CheckForConnection();
+ bool CheckForAudio();
+ bool CheckForVideo();
+
+ webrtc::test::ScopedKeyValueConfig field_trials_;
+ std::string name_;
+ rtc::SocketServer* const socket_server_;
+ rtc::Thread* const network_thread_;
+ rtc::Thread* const worker_thread_;
+ webrtc::SequenceChecker pc_thread_checker_;
+ rtc::scoped_refptr<webrtc::PeerConnectionInterface> peer_connection_;
+ rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface>
+ peer_connection_factory_;
+ rtc::scoped_refptr<FakeAudioCaptureModule> fake_audio_capture_module_;
+ std::unique_ptr<webrtc::FakeVideoTrackRenderer> renderer_;
+ int num_get_user_media_calls_ = 0;
+ bool pending_negotiation_;
+ std::vector<rtc::scoped_refptr<webrtc::FakePeriodicVideoTrackSource>>
+ fake_video_sources_;
+};
+
+#endif // PC_TEST_PEER_CONNECTION_TEST_WRAPPER_H_
diff --git a/third_party/libwebrtc/pc/test/rtc_stats_obtainer.h b/third_party/libwebrtc/pc/test/rtc_stats_obtainer.h
new file mode 100644
index 0000000000..b1cc701a06
--- /dev/null
+++ b/third_party/libwebrtc/pc/test/rtc_stats_obtainer.h
@@ -0,0 +1,55 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_TEST_RTC_STATS_OBTAINER_H_
+#define PC_TEST_RTC_STATS_OBTAINER_H_
+
+#include "api/make_ref_counted.h"
+#include "api/sequence_checker.h"
+#include "api/stats/rtc_stats_collector_callback.h"
+#include "api/stats/rtc_stats_report.h"
+#include "rtc_base/gunit.h"
+
+namespace webrtc {
+
+class RTCStatsObtainer : public RTCStatsCollectorCallback {
+ public:
+ static rtc::scoped_refptr<RTCStatsObtainer> Create(
+ rtc::scoped_refptr<const RTCStatsReport>* report_ptr = nullptr) {
+ return rtc::make_ref_counted<RTCStatsObtainer>(report_ptr);
+ }
+
+ void OnStatsDelivered(
+ const rtc::scoped_refptr<const RTCStatsReport>& report) override {
+ EXPECT_TRUE(thread_checker_.IsCurrent());
+ report_ = report;
+ if (report_ptr_)
+ *report_ptr_ = report_;
+ }
+
+ rtc::scoped_refptr<const RTCStatsReport> report() const {
+ EXPECT_TRUE(thread_checker_.IsCurrent());
+ return report_;
+ }
+
+ protected:
+ explicit RTCStatsObtainer(
+ rtc::scoped_refptr<const RTCStatsReport>* report_ptr)
+ : report_ptr_(report_ptr) {}
+
+ private:
+ SequenceChecker thread_checker_;
+ rtc::scoped_refptr<const RTCStatsReport> report_;
+ rtc::scoped_refptr<const RTCStatsReport>* report_ptr_;
+};
+
+} // namespace webrtc
+
+#endif // PC_TEST_RTC_STATS_OBTAINER_H_
diff --git a/third_party/libwebrtc/pc/test/rtp_transport_test_util.h b/third_party/libwebrtc/pc/test/rtp_transport_test_util.h
new file mode 100644
index 0000000000..593ee002c9
--- /dev/null
+++ b/third_party/libwebrtc/pc/test/rtp_transport_test_util.h
@@ -0,0 +1,98 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_TEST_RTP_TRANSPORT_TEST_UTIL_H_
+#define PC_TEST_RTP_TRANSPORT_TEST_UTIL_H_
+
+#include <utility>
+
+#include "call/rtp_packet_sink_interface.h"
+#include "modules/rtp_rtcp/source/rtp_packet_received.h"
+#include "pc/rtp_transport_internal.h"
+
+namespace webrtc {
+
+// Used to handle the signals when the RtpTransport receives an RTP/RTCP packet.
+// Used in Rtp/Srtp/DtlsTransport unit tests.
+class TransportObserver : public RtpPacketSinkInterface {
+ public:
+ TransportObserver() {}
+
+ explicit TransportObserver(RtpTransportInternal* rtp_transport) {
+ rtp_transport->SubscribeRtcpPacketReceived(
+ this, [this](rtc::CopyOnWriteBuffer* buffer, int64_t packet_time_ms) {
+ OnRtcpPacketReceived(buffer, packet_time_ms);
+ });
+ rtp_transport->SubscribeReadyToSend(
+ this, [this](bool arg) { OnReadyToSend(arg); });
+ rtp_transport->SetUnDemuxableRtpPacketReceivedHandler(
+ [this](webrtc::RtpPacketReceived& packet) {
+ OnUndemuxableRtpPacket(packet);
+ });
+ }
+
+ // RtpPacketInterface override.
+ void OnRtpPacket(const RtpPacketReceived& packet) override {
+ rtp_count_++;
+ last_recv_rtp_packet_ = packet.Buffer();
+ }
+
+ void OnUndemuxableRtpPacket(const RtpPacketReceived& packet) {
+ un_demuxable_rtp_count_++;
+ }
+
+ void OnRtcpPacketReceived(rtc::CopyOnWriteBuffer* packet,
+ int64_t packet_time_us) {
+ rtcp_count_++;
+ last_recv_rtcp_packet_ = *packet;
+ }
+
+ int rtp_count() const { return rtp_count_; }
+ int un_demuxable_rtp_count() const { return un_demuxable_rtp_count_; }
+ int rtcp_count() const { return rtcp_count_; }
+
+ rtc::CopyOnWriteBuffer last_recv_rtp_packet() {
+ return last_recv_rtp_packet_;
+ }
+
+ rtc::CopyOnWriteBuffer last_recv_rtcp_packet() {
+ return last_recv_rtcp_packet_;
+ }
+
+ void OnReadyToSend(bool ready) {
+ if (action_on_ready_to_send_) {
+ action_on_ready_to_send_(ready);
+ }
+ ready_to_send_signal_count_++;
+ ready_to_send_ = ready;
+ }
+
+ bool ready_to_send() { return ready_to_send_; }
+
+ int ready_to_send_signal_count() { return ready_to_send_signal_count_; }
+
+ void SetActionOnReadyToSend(absl::AnyInvocable<void(bool)> action) {
+ action_on_ready_to_send_ = std::move(action);
+ }
+
+ private:
+ bool ready_to_send_ = false;
+ int rtp_count_ = 0;
+ int un_demuxable_rtp_count_ = 0;
+ int rtcp_count_ = 0;
+ int ready_to_send_signal_count_ = 0;
+ rtc::CopyOnWriteBuffer last_recv_rtp_packet_;
+ rtc::CopyOnWriteBuffer last_recv_rtcp_packet_;
+ absl::AnyInvocable<void(bool)> action_on_ready_to_send_;
+};
+
+} // namespace webrtc
+
+#endif // PC_TEST_RTP_TRANSPORT_TEST_UTIL_H_
diff --git a/third_party/libwebrtc/pc/test/simulcast_layer_util.cc b/third_party/libwebrtc/pc/test/simulcast_layer_util.cc
new file mode 100644
index 0000000000..6ce09b5e9b
--- /dev/null
+++ b/third_party/libwebrtc/pc/test/simulcast_layer_util.cc
@@ -0,0 +1,55 @@
+/*
+ * Copyright 2023 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/test/simulcast_layer_util.h"
+
+#include "absl/algorithm/container.h"
+#include "rtc_base/checks.h"
+
+namespace webrtc {
+
+std::vector<cricket::SimulcastLayer> CreateLayers(
+ const std::vector<std::string>& rids,
+ const std::vector<bool>& active) {
+ RTC_DCHECK_EQ(rids.size(), active.size());
+ std::vector<cricket::SimulcastLayer> result;
+ absl::c_transform(rids, active, std::back_inserter(result),
+ [](const std::string& rid, bool is_active) {
+ return cricket::SimulcastLayer(rid, !is_active);
+ });
+ return result;
+}
+
+std::vector<cricket::SimulcastLayer> CreateLayers(
+ const std::vector<std::string>& rids,
+ bool active) {
+ return CreateLayers(rids, std::vector<bool>(rids.size(), active));
+}
+
+RtpTransceiverInit CreateTransceiverInit(
+ const std::vector<cricket::SimulcastLayer>& layers) {
+ RtpTransceiverInit init;
+ for (const cricket::SimulcastLayer& layer : layers) {
+ RtpEncodingParameters encoding;
+ encoding.rid = layer.rid;
+ encoding.active = !layer.is_paused;
+ init.send_encodings.push_back(encoding);
+ }
+ return init;
+}
+
+cricket::SimulcastDescription RemoveSimulcast(SessionDescriptionInterface* sd) {
+ auto mcd = sd->description()->contents()[0].media_description();
+ auto result = mcd->simulcast_description();
+ mcd->set_simulcast_description(cricket::SimulcastDescription());
+ return result;
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/test/simulcast_layer_util.h b/third_party/libwebrtc/pc/test/simulcast_layer_util.h
new file mode 100644
index 0000000000..6822e3c9fd
--- /dev/null
+++ b/third_party/libwebrtc/pc/test/simulcast_layer_util.h
@@ -0,0 +1,39 @@
+/*
+ * Copyright 2023 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_TEST_SIMULCAST_LAYER_UTIL_H_
+#define PC_TEST_SIMULCAST_LAYER_UTIL_H_
+
+#include <string>
+#include <vector>
+
+#include "api/jsep.h"
+#include "api/rtp_transceiver_interface.h"
+#include "pc/session_description.h"
+#include "pc/simulcast_description.h"
+
+namespace webrtc {
+
+std::vector<cricket::SimulcastLayer> CreateLayers(
+ const std::vector<std::string>& rids,
+ const std::vector<bool>& active);
+
+std::vector<cricket::SimulcastLayer> CreateLayers(
+ const std::vector<std::string>& rids,
+ bool active);
+
+RtpTransceiverInit CreateTransceiverInit(
+ const std::vector<cricket::SimulcastLayer>& layers);
+
+cricket::SimulcastDescription RemoveSimulcast(SessionDescriptionInterface* sd);
+
+} // namespace webrtc
+
+#endif // PC_TEST_SIMULCAST_LAYER_UTIL_H_
diff --git a/third_party/libwebrtc/pc/test/srtp_test_util.h b/third_party/libwebrtc/pc/test/srtp_test_util.h
new file mode 100644
index 0000000000..ae02310eba
--- /dev/null
+++ b/third_party/libwebrtc/pc/test/srtp_test_util.h
@@ -0,0 +1,45 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_TEST_SRTP_TEST_UTIL_H_
+#define PC_TEST_SRTP_TEST_UTIL_H_
+
+#include <string>
+
+namespace rtc {
+
+extern const char kCsAesCm128HmacSha1_32[];
+extern const char kCsAeadAes128Gcm[];
+extern const char kCsAeadAes256Gcm[];
+
+static const uint8_t kTestKey1[] = "ABCDEFGHIJKLMNOPQRSTUVWXYZ1234";
+static const uint8_t kTestKey2[] = "4321ZYXWVUTSRQPONMLKJIHGFEDCBA";
+static const int kTestKeyLen = 30;
+
+static int rtp_auth_tag_len(const std::string& cs) {
+ if (cs == kCsAesCm128HmacSha1_32) {
+ return 4;
+ } else if (cs == kCsAeadAes128Gcm || cs == kCsAeadAes256Gcm) {
+ return 16;
+ } else {
+ return 10;
+ }
+}
+static int rtcp_auth_tag_len(const std::string& cs) {
+ if (cs == kCsAeadAes128Gcm || cs == kCsAeadAes256Gcm) {
+ return 16;
+ } else {
+ return 10;
+ }
+}
+
+} // namespace rtc
+
+#endif // PC_TEST_SRTP_TEST_UTIL_H_
diff --git a/third_party/libwebrtc/pc/test/svc_e2e_tests.cc b/third_party/libwebrtc/pc/test/svc_e2e_tests.cc
new file mode 100644
index 0000000000..ae35c7f676
--- /dev/null
+++ b/third_party/libwebrtc/pc/test/svc_e2e_tests.cc
@@ -0,0 +1,507 @@
+/*
+ * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include <memory>
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "api/media_stream_interface.h"
+#include "api/stats/rtcstats_objects.h"
+#include "api/test/create_network_emulation_manager.h"
+#include "api/test/create_peer_connection_quality_test_frame_generator.h"
+#include "api/test/create_peerconnection_quality_test_fixture.h"
+#include "api/test/frame_generator_interface.h"
+#include "api/test/metrics/global_metrics_logger_and_exporter.h"
+#include "api/test/network_emulation_manager.h"
+#include "api/test/pclf/media_configuration.h"
+#include "api/test/pclf/media_quality_test_params.h"
+#include "api/test/pclf/peer_configurer.h"
+#include "api/test/peerconnection_quality_test_fixture.h"
+#include "api/test/simulated_network.h"
+#include "api/test/time_controller.h"
+#include "api/video_codecs/vp9_profile.h"
+#include "call/simulated_network.h"
+#include "modules/video_coding/codecs/vp9/include/vp9.h"
+#include "modules/video_coding/svc/scalability_mode_util.h"
+#include "rtc_base/containers/flat_map.h"
+#include "system_wrappers/include/field_trial.h"
+#include "test/field_trial.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+#include "test/pc/e2e/analyzer/video/default_video_quality_analyzer.h"
+#include "test/pc/e2e/network_quality_metrics_reporter.h"
+#include "test/testsupport/file_utils.h"
+
+namespace webrtc {
+namespace {
+
+using ::cricket::kAv1CodecName;
+using ::cricket::kH264CodecName;
+using ::cricket::kVp8CodecName;
+using ::cricket::kVp9CodecName;
+using ::testing::Combine;
+using ::testing::Optional;
+using ::testing::UnitTest;
+using ::testing::Values;
+using ::testing::ValuesIn;
+using ::webrtc::webrtc_pc_e2e::EmulatedSFUConfig;
+using ::webrtc::webrtc_pc_e2e::PeerConfigurer;
+using ::webrtc::webrtc_pc_e2e::RunParams;
+using ::webrtc::webrtc_pc_e2e::ScreenShareConfig;
+using ::webrtc::webrtc_pc_e2e::VideoCodecConfig;
+using ::webrtc::webrtc_pc_e2e::VideoConfig;
+
+std::unique_ptr<webrtc_pc_e2e::PeerConnectionE2EQualityTestFixture>
+CreateTestFixture(absl::string_view test_case_name,
+ TimeController& time_controller,
+ std::pair<EmulatedNetworkManagerInterface*,
+ EmulatedNetworkManagerInterface*> network_links,
+ rtc::FunctionView<void(PeerConfigurer*)> alice_configurer,
+ rtc::FunctionView<void(PeerConfigurer*)> bob_configurer,
+ std::unique_ptr<VideoQualityAnalyzerInterface>
+ video_quality_analyzer = nullptr) {
+ auto fixture = webrtc_pc_e2e::CreatePeerConnectionE2EQualityTestFixture(
+ std::string(test_case_name), time_controller, nullptr,
+ std::move(video_quality_analyzer));
+ auto alice = std::make_unique<PeerConfigurer>(
+ network_links.first->network_dependencies());
+ auto bob = std::make_unique<PeerConfigurer>(
+ network_links.second->network_dependencies());
+ alice_configurer(alice.get());
+ bob_configurer(bob.get());
+ fixture->AddPeer(std::move(alice));
+ fixture->AddPeer(std::move(bob));
+ return fixture;
+}
+
+// Takes the current active field trials set, and appends some new trials.
+std::string AppendFieldTrials(std::string new_trial_string) {
+ return std::string(field_trial::GetFieldTrialString()) + new_trial_string;
+}
+
+enum class UseDependencyDescriptor {
+ Enabled,
+ Disabled,
+};
+
+struct SvcTestParameters {
+ static SvcTestParameters Create(const std::string& codec_name,
+ const std::string& scalability_mode_str) {
+ absl::optional<ScalabilityMode> scalability_mode =
+ ScalabilityModeFromString(scalability_mode_str);
+ RTC_CHECK(scalability_mode.has_value())
+ << "Unsupported scalability mode: " << scalability_mode_str;
+
+ int num_spatial_layers =
+ ScalabilityModeToNumSpatialLayers(*scalability_mode);
+ int num_temporal_layers =
+ ScalabilityModeToNumTemporalLayers(*scalability_mode);
+
+ return SvcTestParameters{codec_name, scalability_mode_str,
+ num_spatial_layers, num_temporal_layers};
+ }
+
+ std::string codec_name;
+ std::string scalability_mode;
+ int expected_spatial_layers;
+ int expected_temporal_layers;
+};
+
+class SvcTest : public testing::TestWithParam<
+ std::tuple<SvcTestParameters, UseDependencyDescriptor>> {
+ public:
+ SvcTest()
+ : video_codec_config(ToVideoCodecConfig(SvcTestParameters().codec_name)) {
+ }
+
+ static VideoCodecConfig ToVideoCodecConfig(absl::string_view codec) {
+ if (codec == cricket::kVp9CodecName) {
+ return VideoCodecConfig(
+ cricket::kVp9CodecName,
+ {{kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile0)}});
+ }
+
+ return VideoCodecConfig(codec);
+ }
+
+ const SvcTestParameters& SvcTestParameters() const {
+ return std::get<0>(GetParam());
+ }
+
+ bool UseDependencyDescriptor() const {
+ return std::get<1>(GetParam()) == UseDependencyDescriptor::Enabled;
+ }
+
+ bool IsSMode() const {
+ return SvcTestParameters().scalability_mode[0] == 'S';
+ }
+
+ protected:
+ VideoCodecConfig video_codec_config;
+};
+
+std::string SvcTestNameGenerator(
+ const testing::TestParamInfo<SvcTest::ParamType>& info) {
+ return std::get<0>(info.param).scalability_mode +
+ (std::get<1>(info.param) == UseDependencyDescriptor::Enabled ? "_DD"
+ : "");
+}
+
+} // namespace
+
+// Records how many frames are seen for each spatial and temporal index at the
+// encoder and decoder level.
+class SvcVideoQualityAnalyzer : public DefaultVideoQualityAnalyzer {
+ public:
+ using SpatialTemporalLayerCounts =
+ webrtc::flat_map<int, webrtc::flat_map<int, int>>;
+
+ explicit SvcVideoQualityAnalyzer(webrtc::Clock* clock)
+ : DefaultVideoQualityAnalyzer(clock,
+ test::GetGlobalMetricsLogger(),
+ DefaultVideoQualityAnalyzerOptions{
+ .compute_psnr = false,
+ .compute_ssim = false,
+ }) {}
+ ~SvcVideoQualityAnalyzer() override = default;
+
+ void OnFrameEncoded(absl::string_view peer_name,
+ uint16_t frame_id,
+ const EncodedImage& encoded_image,
+ const EncoderStats& stats,
+ bool discarded) override {
+ absl::optional<int> spatial_id = encoded_image.SpatialIndex();
+ absl::optional<int> temporal_id = encoded_image.TemporalIndex();
+ encoder_layers_seen_[spatial_id.value_or(0)][temporal_id.value_or(0)]++;
+ DefaultVideoQualityAnalyzer::OnFrameEncoded(
+ peer_name, frame_id, encoded_image, stats, discarded);
+ }
+
+ void OnFramePreDecode(absl::string_view peer_name,
+ uint16_t frame_id,
+ const EncodedImage& input_image) override {
+ absl::optional<int> spatial_id = input_image.SpatialIndex();
+ absl::optional<int> temporal_id = input_image.TemporalIndex();
+ if (!spatial_id) {
+ decoder_layers_seen_[0][temporal_id.value_or(0)]++;
+ } else {
+ for (int i = 0; i <= *spatial_id; ++i) {
+ // If there are no spatial layers (for example VP8), we still want to
+ // record the temporal index for pseudo-layer "0" frames.
+ if (*spatial_id == 0 ||
+ input_image.SpatialLayerFrameSize(i).value_or(0) > 0) {
+ decoder_layers_seen_[i][temporal_id.value_or(0)]++;
+ }
+ }
+ }
+ DefaultVideoQualityAnalyzer::OnFramePreDecode(peer_name, frame_id,
+ input_image);
+ }
+
+ void OnStatsReports(
+ absl::string_view pc_label,
+ const rtc::scoped_refptr<const RTCStatsReport>& report) override {
+ // Extract the scalability mode reported in the stats.
+ auto outbound_stats = report->GetStatsOfType<RTCOutboundRtpStreamStats>();
+ for (const auto& stat : outbound_stats) {
+ if (stat->scalability_mode.is_defined()) {
+ reported_scalability_mode_ = *stat->scalability_mode;
+ }
+ }
+ }
+
+ const SpatialTemporalLayerCounts& encoder_layers_seen() const {
+ return encoder_layers_seen_;
+ }
+ const SpatialTemporalLayerCounts& decoder_layers_seen() const {
+ return decoder_layers_seen_;
+ }
+ const absl::optional<std::string> reported_scalability_mode() const {
+ return reported_scalability_mode_;
+ }
+
+ private:
+ SpatialTemporalLayerCounts encoder_layers_seen_;
+ SpatialTemporalLayerCounts decoder_layers_seen_;
+ absl::optional<std::string> reported_scalability_mode_;
+};
+
+MATCHER_P2(HasSpatialAndTemporalLayers,
+ expected_spatial_layers,
+ expected_temporal_layers,
+ "") {
+ if (arg.size() != static_cast<size_t>(expected_spatial_layers)) {
+ *result_listener << "spatial layer count mismatch expected "
+ << expected_spatial_layers << " but got " << arg.size();
+ return false;
+ }
+ for (const auto& [spatial_layer_index, temporal_layers] : arg) {
+ if (spatial_layer_index < 0 ||
+ spatial_layer_index >= expected_spatial_layers) {
+ *result_listener << "spatial layer index is not in range [0,"
+ << expected_spatial_layers << "[.";
+ return false;
+ }
+
+ if (temporal_layers.size() !=
+ static_cast<size_t>(expected_temporal_layers)) {
+ *result_listener << "temporal layer count mismatch on spatial layer "
+ << spatial_layer_index << ", expected "
+ << expected_temporal_layers << " but got "
+ << temporal_layers.size();
+ return false;
+ }
+ for (const auto& [temporal_layer_index, temporal_layer_frame_count] :
+ temporal_layers) {
+ if (temporal_layer_index < 0 ||
+ temporal_layer_index >= expected_temporal_layers) {
+ *result_listener << "temporal layer index on spatial layer "
+ << spatial_layer_index << " is not in range [0,"
+ << expected_temporal_layers << "[.";
+ return false;
+ }
+ }
+ }
+ return true;
+}
+
+MATCHER_P2(HasSpatialAndTemporalLayersSMode,
+ expected_spatial_layers,
+ expected_temporal_layers,
+ "") {
+ if (arg.size() != 1) {
+ *result_listener << "spatial layer count mismatch expected 1 but got "
+ << arg.size();
+ return false;
+ }
+ for (const auto& [spatial_layer_index, temporal_layers] : arg) {
+ if (spatial_layer_index != expected_spatial_layers - 1) {
+ *result_listener << "spatial layer index is not equal to "
+ << expected_spatial_layers - 1 << ".";
+ return false;
+ }
+
+ if (temporal_layers.size() !=
+ static_cast<size_t>(expected_temporal_layers)) {
+ *result_listener << "temporal layer count mismatch on spatial layer "
+ << spatial_layer_index << ", expected "
+ << expected_temporal_layers << " but got "
+ << temporal_layers.size();
+ return false;
+ }
+ for (const auto& [temporal_layer_index, temporal_layer_frame_count] :
+ temporal_layers) {
+ if (temporal_layer_index < 0 ||
+ temporal_layer_index >= expected_temporal_layers) {
+ *result_listener << "temporal layer index on spatial layer "
+ << spatial_layer_index << " is not in range [0,"
+ << expected_temporal_layers << "[.";
+ return false;
+ }
+ }
+ }
+ return true;
+}
+
+TEST_P(SvcTest, ScalabilityModeSupported) {
+ std::string trials;
+ if (UseDependencyDescriptor()) {
+ trials += "WebRTC-DependencyDescriptorAdvertised/Enabled/";
+ }
+ webrtc::test::ScopedFieldTrials override_trials(AppendFieldTrials(trials));
+ std::unique_ptr<NetworkEmulationManager> network_emulation_manager =
+ CreateNetworkEmulationManager(webrtc::TimeMode::kSimulated);
+ auto analyzer = std::make_unique<SvcVideoQualityAnalyzer>(
+ network_emulation_manager->time_controller()->GetClock());
+ SvcVideoQualityAnalyzer* analyzer_ptr = analyzer.get();
+ auto fixture = CreateTestFixture(
+ UnitTest::GetInstance()->current_test_info()->name(),
+ *network_emulation_manager->time_controller(),
+ network_emulation_manager->CreateEndpointPairWithTwoWayRoutes(
+ BuiltInNetworkBehaviorConfig()),
+ [this](PeerConfigurer* alice) {
+ VideoConfig video(/*stream_label=*/"alice-video", /*width=*/1850,
+ /*height=*/1110, /*fps=*/30);
+ if (IsSMode()) {
+ video.emulated_sfu_config = EmulatedSFUConfig(
+ SvcTestParameters().expected_spatial_layers - 1,
+ SvcTestParameters().expected_temporal_layers - 1);
+ }
+ RtpEncodingParameters parameters;
+ parameters.scalability_mode = SvcTestParameters().scalability_mode;
+ video.encoding_params.push_back(parameters);
+ alice->AddVideoConfig(
+ std::move(video),
+ CreateScreenShareFrameGenerator(
+ video, ScreenShareConfig(TimeDelta::Seconds(5))));
+ alice->SetVideoCodecs({video_codec_config});
+ },
+ [](PeerConfigurer* bob) {}, std::move(analyzer));
+ fixture->Run(RunParams(TimeDelta::Seconds(10)));
+ EXPECT_THAT(analyzer_ptr->encoder_layers_seen(),
+ HasSpatialAndTemporalLayers(
+ SvcTestParameters().expected_spatial_layers,
+ SvcTestParameters().expected_temporal_layers));
+ if (IsSMode()) {
+ EXPECT_THAT(analyzer_ptr->decoder_layers_seen(),
+ HasSpatialAndTemporalLayersSMode(
+ SvcTestParameters().expected_spatial_layers,
+ SvcTestParameters().expected_temporal_layers));
+ } else {
+ EXPECT_THAT(analyzer_ptr->decoder_layers_seen(),
+ HasSpatialAndTemporalLayers(
+ SvcTestParameters().expected_spatial_layers,
+ SvcTestParameters().expected_temporal_layers));
+ }
+ EXPECT_THAT(analyzer_ptr->reported_scalability_mode(),
+ Optional(SvcTestParameters().scalability_mode));
+
+ RTC_LOG(LS_INFO) << "Encoder layers seen: "
+ << analyzer_ptr->encoder_layers_seen().size();
+ for (auto& [spatial_index, temporal_layers] :
+ analyzer_ptr->encoder_layers_seen()) {
+ for (auto& [temporal_index, frame_count] : temporal_layers) {
+ RTC_LOG(LS_INFO) << " Layer: " << spatial_index << "," << temporal_index
+ << " frames: " << frame_count;
+ }
+ }
+ RTC_LOG(LS_INFO) << "Decoder layers seen: "
+ << analyzer_ptr->decoder_layers_seen().size();
+ for (auto& [spatial_index, temporal_layers] :
+ analyzer_ptr->decoder_layers_seen()) {
+ for (auto& [temporal_index, frame_count] : temporal_layers) {
+ RTC_LOG(LS_INFO) << " Layer: " << spatial_index << "," << temporal_index
+ << " frames: " << frame_count;
+ }
+ }
+}
+
+INSTANTIATE_TEST_SUITE_P(
+ SvcTestVP8,
+ SvcTest,
+ Combine(Values(SvcTestParameters::Create(kVp8CodecName, "L1T1"),
+ SvcTestParameters::Create(kVp8CodecName, "L1T2"),
+ SvcTestParameters::Create(kVp8CodecName, "L1T3")),
+ Values(UseDependencyDescriptor::Disabled,
+ UseDependencyDescriptor::Enabled)),
+ SvcTestNameGenerator);
+
+#if defined(WEBRTC_USE_H264)
+INSTANTIATE_TEST_SUITE_P(
+ SvcTestH264,
+ SvcTest,
+ Combine(ValuesIn({
+ SvcTestParameters::Create(kH264CodecName, "L1T1"),
+ SvcTestParameters::Create(kH264CodecName, "L1T2"),
+ SvcTestParameters::Create(kH264CodecName, "L1T3"),
+ }),
+ // Like AV1, H.264 RTP format does not include SVC related
+ // information, so always use Dependency Descriptor.
+ Values(UseDependencyDescriptor::Enabled)),
+ SvcTestNameGenerator);
+#endif
+
+#if defined(RTC_ENABLE_VP9)
+INSTANTIATE_TEST_SUITE_P(
+ SvcTestVP9,
+ SvcTest,
+ Combine(
+ // TODO(bugs.webrtc.org/13960): Fix and enable remaining VP9 modes
+ ValuesIn({
+ SvcTestParameters::Create(kVp9CodecName, "L1T1"),
+ SvcTestParameters::Create(kVp9CodecName, "L1T2"),
+ SvcTestParameters::Create(kVp9CodecName, "L1T3"),
+ SvcTestParameters::Create(kVp9CodecName, "L2T1"),
+ SvcTestParameters::Create(kVp9CodecName, "L2T1h"),
+ SvcTestParameters::Create(kVp9CodecName, "L2T1_KEY"),
+ SvcTestParameters::Create(kVp9CodecName, "L2T2"),
+ SvcTestParameters::Create(kVp9CodecName, "L2T2h"),
+ SvcTestParameters::Create(kVp9CodecName, "L2T2_KEY"),
+ SvcTestParameters::Create(kVp9CodecName, "L2T2_KEY_SHIFT"),
+ SvcTestParameters::Create(kVp9CodecName, "L2T3"),
+ SvcTestParameters::Create(kVp9CodecName, "L2T3h"),
+ SvcTestParameters::Create(kVp9CodecName, "L2T3_KEY"),
+ // SvcTestParameters::Create(kVp9CodecName, "L2T3_KEY_SHIFT"),
+ SvcTestParameters::Create(kVp9CodecName, "L3T1"),
+ SvcTestParameters::Create(kVp9CodecName, "L3T1h"),
+ SvcTestParameters::Create(kVp9CodecName, "L3T1_KEY"),
+ SvcTestParameters::Create(kVp9CodecName, "L3T2"),
+ SvcTestParameters::Create(kVp9CodecName, "L3T2h"),
+ SvcTestParameters::Create(kVp9CodecName, "L3T2_KEY"),
+ // SvcTestParameters::Create(kVp9CodecName, "L3T2_KEY_SHIFT"),
+ SvcTestParameters::Create(kVp9CodecName, "L3T3"),
+ SvcTestParameters::Create(kVp9CodecName, "L3T3h"),
+ SvcTestParameters::Create(kVp9CodecName, "L3T3_KEY"),
+ // SvcTestParameters::Create(kVp9CodecName, "L3T3_KEY_SHIFT"),
+ SvcTestParameters::Create(kVp9CodecName, "S2T1"),
+ SvcTestParameters::Create(kVp9CodecName, "S2T1h"),
+ SvcTestParameters::Create(kVp9CodecName, "S2T2"),
+ SvcTestParameters::Create(kVp9CodecName, "S2T2h"),
+ SvcTestParameters::Create(kVp9CodecName, "S2T3"),
+ SvcTestParameters::Create(kVp9CodecName, "S2T3h"),
+ SvcTestParameters::Create(kVp9CodecName, "S3T1"),
+ SvcTestParameters::Create(kVp9CodecName, "S3T1h"),
+ SvcTestParameters::Create(kVp9CodecName, "S3T2"),
+ SvcTestParameters::Create(kVp9CodecName, "S3T2h"),
+ SvcTestParameters::Create(kVp9CodecName, "S3T3"),
+ SvcTestParameters::Create(kVp9CodecName, "S3T3h"),
+ }),
+ Values(UseDependencyDescriptor::Disabled,
+ UseDependencyDescriptor::Enabled)),
+ SvcTestNameGenerator);
+
+INSTANTIATE_TEST_SUITE_P(
+ SvcTestAV1,
+ SvcTest,
+ Combine(ValuesIn({
+ SvcTestParameters::Create(kAv1CodecName, "L1T1"),
+ SvcTestParameters::Create(kAv1CodecName, "L1T2"),
+ SvcTestParameters::Create(kAv1CodecName, "L1T3"),
+ SvcTestParameters::Create(kAv1CodecName, "L2T1"),
+ SvcTestParameters::Create(kAv1CodecName, "L2T1h"),
+ SvcTestParameters::Create(kAv1CodecName, "L2T1_KEY"),
+ SvcTestParameters::Create(kAv1CodecName, "L2T2"),
+ SvcTestParameters::Create(kAv1CodecName, "L2T2h"),
+ SvcTestParameters::Create(kAv1CodecName, "L2T2_KEY"),
+ SvcTestParameters::Create(kAv1CodecName, "L2T2_KEY_SHIFT"),
+ SvcTestParameters::Create(kAv1CodecName, "L2T3"),
+ SvcTestParameters::Create(kAv1CodecName, "L2T3h"),
+ SvcTestParameters::Create(kAv1CodecName, "L2T3_KEY"),
+ // SvcTestParameters::Create(kAv1CodecName, "L2T3_KEY_SHIFT"),
+ SvcTestParameters::Create(kAv1CodecName, "L3T1"),
+ SvcTestParameters::Create(kAv1CodecName, "L3T1h"),
+ SvcTestParameters::Create(kAv1CodecName, "L3T1_KEY"),
+ SvcTestParameters::Create(kAv1CodecName, "L3T2"),
+ SvcTestParameters::Create(kAv1CodecName, "L3T2h"),
+ SvcTestParameters::Create(kAv1CodecName, "L3T2_KEY"),
+ // SvcTestParameters::Create(kAv1CodecName, "L3T2_KEY_SHIFT"),
+ SvcTestParameters::Create(kAv1CodecName, "L3T3"),
+ SvcTestParameters::Create(kAv1CodecName, "L3T3h"),
+ SvcTestParameters::Create(kAv1CodecName, "L3T3_KEY"),
+ // SvcTestParameters::Create(kAv1CodecName, "L3T3_KEY_SHIFT"),
+ SvcTestParameters::Create(kAv1CodecName, "S2T1"),
+ SvcTestParameters::Create(kAv1CodecName, "S2T1h"),
+ SvcTestParameters::Create(kAv1CodecName, "S2T2"),
+ SvcTestParameters::Create(kAv1CodecName, "S2T2h"),
+ SvcTestParameters::Create(kAv1CodecName, "S2T3"),
+ SvcTestParameters::Create(kAv1CodecName, "S2T3h"),
+ SvcTestParameters::Create(kAv1CodecName, "S3T1"),
+ SvcTestParameters::Create(kAv1CodecName, "S3T1h"),
+ SvcTestParameters::Create(kAv1CodecName, "S3T2"),
+ SvcTestParameters::Create(kAv1CodecName, "S3T2h"),
+ SvcTestParameters::Create(kAv1CodecName, "S3T3"),
+ SvcTestParameters::Create(kAv1CodecName, "S3T3h"),
+ }),
+ Values(UseDependencyDescriptor::Enabled)),
+ SvcTestNameGenerator);
+
+#endif
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/test/test_sdp_strings.h b/third_party/libwebrtc/pc/test/test_sdp_strings.h
new file mode 100644
index 0000000000..e4ad325d31
--- /dev/null
+++ b/third_party/libwebrtc/pc/test/test_sdp_strings.h
@@ -0,0 +1,184 @@
+/*
+ * Copyright 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This file contain SDP strings used for testing.
+
+#ifndef PC_TEST_TEST_SDP_STRINGS_H_
+#define PC_TEST_TEST_SDP_STRINGS_H_
+
+namespace webrtc {
+
+// SDP offer string from a Nightly FireFox build.
+static const char kFireFoxSdpOffer[] =
+ "v=0\r\n"
+ "o=Mozilla-SIPUA 23551 0 IN IP4 0.0.0.0\r\n"
+ "s=SIP Call\r\n"
+ "t=0 0\r\n"
+ "a=ice-ufrag:e5785931\r\n"
+ "a=ice-pwd:36fb7878390db89481c1d46daa4278d8\r\n"
+ "a=fingerprint:sha-256 A7:24:72:CA:6E:02:55:39:BA:66:DF:6E:CC:4C:D8:B0:1A:"
+ "BF:1A:56:65:7D:F4:03:AD:7E:77:43:2A:29:EC:93\r\n"
+ "m=audio 36993 RTP/SAVPF 109 0 8 101\r\n"
+ "c=IN IP4 74.95.2.170\r\n"
+ "a=rtpmap:109 opus/48000/2\r\n"
+ "a=ptime:20\r\n"
+ "a=rtcp-mux\r\n"
+ "a=rtpmap:0 PCMU/8000\r\n"
+ "a=rtpmap:8 PCMA/8000\r\n"
+ "a=rtpmap:101 telephone-event/8000\r\n"
+ "a=fmtp:101 0-15\r\n"
+ "a=sendrecv\r\n"
+ "a=candidate:0 1 UDP 2112946431 172.16.191.1 61725 typ host\r\n"
+ "a=candidate:2 1 UDP 2112487679 172.16.131.1 58798 typ host\r\n"
+ "a=candidate:4 1 UDP 2113667327 10.0.254.2 58122 typ host\r\n"
+ "a=candidate:5 1 UDP 1694302207 74.95.2.170 36993 typ srflx raddr "
+ "10.0.254.2 rport 58122\r\n"
+ "a=candidate:0 2 UDP 2112946430 172.16.191.1 55025 typ host\r\n"
+ "a=candidate:2 2 UDP 2112487678 172.16.131.1 63576 typ host\r\n"
+ "a=candidate:4 2 UDP 2113667326 10.0.254.2 50962 typ host\r\n"
+ "a=candidate:5 2 UDP 1694302206 74.95.2.170 41028 typ srflx raddr"
+ " 10.0.254.2 rport 50962\r\n"
+ "m=video 38826 RTP/SAVPF 120\r\n"
+ "c=IN IP4 74.95.2.170\r\n"
+ "a=rtcp-mux\r\n"
+ "a=rtpmap:120 VP8/90000\r\n"
+ "a=sendrecv\r\n"
+ "a=candidate:0 1 UDP 2112946431 172.16.191.1 62017 typ host\r\n"
+ "a=candidate:2 1 UDP 2112487679 172.16.131.1 59741 typ host\r\n"
+ "a=candidate:4 1 UDP 2113667327 10.0.254.2 62652 typ host\r\n"
+ "a=candidate:5 1 UDP 1694302207 74.95.2.170 38826 typ srflx raddr"
+ " 10.0.254.2 rport 62652\r\n"
+ "a=candidate:0 2 UDP 2112946430 172.16.191.1 63440 typ host\r\n"
+ "a=candidate:2 2 UDP 2112487678 172.16.131.1 51847 typ host\r\n"
+ "a=candidate:4 2 UDP 2113667326 10.0.254.2 58890 typ host\r\n"
+ "a=candidate:5 2 UDP 1694302206 74.95.2.170 33611 typ srflx raddr"
+ " 10.0.254.2 rport 58890\r\n"
+#ifdef WEBRTC_HAVE_SCTP
+ "m=application 45536 DTLS/SCTP 5000\r\n"
+ "c=IN IP4 74.95.2.170\r\n"
+ "a=fmtp:5000 protocol=webrtc-datachannel;streams=16\r\n"
+ "a=sendrecv\r\n"
+ "a=candidate:0 1 UDP 2112946431 172.16.191.1 60248 typ host\r\n"
+ "a=candidate:2 1 UDP 2112487679 172.16.131.1 55925 typ host\r\n"
+ "a=candidate:4 1 UDP 2113667327 10.0.254.2 65268 typ host\r\n"
+ "a=candidate:5 1 UDP 1694302207 74.95.2.170 45536 typ srflx raddr"
+ " 10.0.254.2 rport 65268\r\n"
+ "a=candidate:0 2 UDP 2112946430 172.16.191.1 49162 typ host\r\n"
+ "a=candidate:2 2 UDP 2112487678 172.16.131.1 59635 typ host\r\n"
+ "a=candidate:4 2 UDP 2113667326 10.0.254.2 61232 typ host\r\n"
+ "a=candidate:5 2 UDP 1694302206 74.95.2.170 45468 typ srflx raddr"
+ " 10.0.254.2 rport 61232\r\n"
+#endif
+ ; // NOLINT(whitespace/semicolon)
+
+// Audio SDP with a limited set of audio codecs.
+static const char kAudioSdpPlanB[] =
+ "v=0\r\n"
+ "o=- 7859371131 2 IN IP4 192.168.30.208\r\n"
+ "s=-\r\n"
+ "c=IN IP4 192.168.30.208\r\n"
+ "t=0 0\r\n"
+ "m=audio 16000 RTP/SAVPF 0 8 126\r\n"
+ "a=rtpmap:0 PCMU/8000\r\n"
+ "a=rtpmap:8 PCMA/8000\r\n"
+ "a=rtpmap:126 telephone-event/8000\r\n"
+ "a=sendrecv\r\n"
+ "a=rtcp:16000 IN IP4 192.168.30.208\r\n"
+ "a=rtcp-mux\r\n"
+ "a=crypto:0 AES_CM_128_HMAC_SHA1_80 "
+ "inline:tvKIFjbMQ7W0/C2RzhwN0oQglj/7GJg+frdsNRxt\r\n"
+ "a=ice-ufrag:AI2sRT3r\r\n"
+ "a=ice-pwd:lByS9z2RSQlSE9XurlvjYmEm\r\n"
+ "a=ssrc:4227871655 cname:GeAAgb6XCPNLVMX5\r\n"
+ "a=ssrc:4227871655 msid:1NFAV3iD08ioO2339rQS9pfOI9mDf6GeG9F4 a0\r\n"
+ "a=ssrc:4227871655 mslabel:1NFAV3iD08ioO2339rQS9pfOI9mDf6GeG9F4\r\n"
+ "a=ssrc:4227871655 label:1NFAV3iD08ioO2339rQS9pfOI9mDf6GeG9F4a0\r\n"
+ "a=mid:audio\r\n";
+// Same string as above but with the MID changed to the Unified Plan default.
+// This is needed so that this SDP can be used as an answer for a Unified Plan
+// offer.
+static const char kAudioSdpUnifiedPlan[] =
+ "v=0\r\n"
+ "o=- 7859371131 2 IN IP4 192.168.30.208\r\n"
+ "s=-\r\n"
+ "c=IN IP4 192.168.30.208\r\n"
+ "t=0 0\r\n"
+ "m=audio 16000 RTP/SAVPF 0 8 126\r\n"
+ "a=rtpmap:0 PCMU/8000\r\n"
+ "a=rtpmap:8 PCMA/8000\r\n"
+ "a=rtpmap:126 telephone-event/8000\r\n"
+ "a=sendrecv\r\n"
+ "a=rtcp:16000 IN IP4 192.168.30.208\r\n"
+ "a=rtcp-mux\r\n"
+ "a=crypto:0 AES_CM_128_HMAC_SHA1_80 "
+ "inline:tvKIFjbMQ7W0/C2RzhwN0oQglj/7GJg+frdsNRxt\r\n"
+ "a=ice-ufrag:AI2sRT3r\r\n"
+ "a=ice-pwd:lByS9z2RSQlSE9XurlvjYmEm\r\n"
+ "a=ssrc:4227871655 cname:GeAAgb6XCPNLVMX5\r\n"
+ "a=ssrc:4227871655 msid:1NFAV3iD08ioO2339rQS9pfOI9mDf6GeG9F4 a0\r\n"
+ "a=ssrc:4227871655 mslabel:1NFAV3iD08ioO2339rQS9pfOI9mDf6GeG9F4\r\n"
+ "a=ssrc:4227871655 label:1NFAV3iD08ioO2339rQS9pfOI9mDf6GeG9F4a0\r\n"
+ "a=mid:0\r\n";
+
+static const char kAudioSdpWithUnsupportedCodecsPlanB[] =
+ "v=0\r\n"
+ "o=- 6858750541 2 IN IP4 192.168.30.208\r\n"
+ "s=-\r\n"
+ "c=IN IP4 192.168.30.208\r\n"
+ "t=0 0\r\n"
+ "m=audio 16000 RTP/SAVPF 0 8 109 110 126\r\n"
+ "a=rtpmap:0 PCMU/8000\r\n"
+ "a=rtpmap:8 PCMA/8000\r\n"
+ "a=rtpmap:109 WeirdCodec1/8000\r\n"
+ "a=rtpmap:110 WeirdCodec2/8000\r\n"
+ "a=rtpmap:126 telephone-event/8000\r\n"
+ "a=sendonly\r\n"
+ "a=rtcp:16000 IN IP4 192.168.30.208\r\n"
+ "a=rtcp-mux\r\n"
+ "a=crypto:0 AES_CM_128_HMAC_SHA1_80 "
+ "inline:tvKIFjbMQ7W0/C2RzhwN0oQglj/7GJg+frdsNRxt\r\n"
+ "a=ice-ufrag:AI2sRT3r\r\n"
+ "a=ice-pwd:lByS9z2RSQlSE9XurlvjYmEm\r\n"
+ "a=ssrc:4227871655 cname:TsmD02HRfhkJBm4m\r\n"
+ "a=ssrc:4227871655 msid:7nU0TApbB-n4dfPlCplWT9QTEsbBDS1IlpW3 a0\r\n"
+ "a=ssrc:4227871655 mslabel:7nU0TApbB-n4dfPlCplWT9QTEsbBDS1IlpW3\r\n"
+ "a=ssrc:4227871655 label:7nU0TApbB-n4dfPlCplWT9QTEsbBDS1IlpW3a0\r\n"
+ "a=mid:audio\r\n";
+// Same string as above but with the MID changed to the Unified Plan default.
+// This is needed so that this SDP can be used as an answer for a Unified Plan
+// offer.
+static const char kAudioSdpWithUnsupportedCodecsUnifiedPlan[] =
+ "v=0\r\n"
+ "o=- 6858750541 2 IN IP4 192.168.30.208\r\n"
+ "s=-\r\n"
+ "c=IN IP4 192.168.30.208\r\n"
+ "t=0 0\r\n"
+ "m=audio 16000 RTP/SAVPF 0 8 109 110 126\r\n"
+ "a=rtpmap:0 PCMU/8000\r\n"
+ "a=rtpmap:8 PCMA/8000\r\n"
+ "a=rtpmap:109 WeirdCodec1/8000\r\n"
+ "a=rtpmap:110 WeirdCodec2/8000\r\n"
+ "a=rtpmap:126 telephone-event/8000\r\n"
+ "a=sendonly\r\n"
+ "a=rtcp:16000 IN IP4 192.168.30.208\r\n"
+ "a=rtcp-mux\r\n"
+ "a=crypto:0 AES_CM_128_HMAC_SHA1_80 "
+ "inline:tvKIFjbMQ7W0/C2RzhwN0oQglj/7GJg+frdsNRxt\r\n"
+ "a=ice-ufrag:AI2sRT3r\r\n"
+ "a=ice-pwd:lByS9z2RSQlSE9XurlvjYmEm\r\n"
+ "a=ssrc:4227871655 cname:TsmD02HRfhkJBm4m\r\n"
+ "a=ssrc:4227871655 msid:7nU0TApbB-n4dfPlCplWT9QTEsbBDS1IlpW3 a0\r\n"
+ "a=ssrc:4227871655 mslabel:7nU0TApbB-n4dfPlCplWT9QTEsbBDS1IlpW3\r\n"
+ "a=ssrc:4227871655 label:7nU0TApbB-n4dfPlCplWT9QTEsbBDS1IlpW3a0\r\n"
+ "a=mid:0\r\n";
+
+} // namespace webrtc
+
+#endif // PC_TEST_TEST_SDP_STRINGS_H_
diff --git a/third_party/libwebrtc/pc/track_media_info_map.cc b/third_party/libwebrtc/pc/track_media_info_map.cc
new file mode 100644
index 0000000000..ac24d07f61
--- /dev/null
+++ b/third_party/libwebrtc/pc/track_media_info_map.cc
@@ -0,0 +1,273 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/track_media_info_map.h"
+
+#include <cstdint>
+#include <set>
+#include <type_traits>
+#include <utility>
+
+#include "api/media_types.h"
+#include "api/rtp_parameters.h"
+#include "media/base/stream_params.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/thread.h"
+
+namespace webrtc {
+
+namespace {
+
+template <typename K, typename V>
+V FindValueOrNull(const std::map<K, V>& map, const K& key) {
+ auto it = map.find(key);
+ return (it != map.end()) ? it->second : nullptr;
+}
+
+template <typename K, typename V>
+const V* FindAddressOrNull(const std::map<K, V>& map, const K& key) {
+ auto it = map.find(key);
+ return (it != map.end()) ? &it->second : nullptr;
+}
+
+void GetAudioAndVideoTrackBySsrc(
+ rtc::ArrayView<rtc::scoped_refptr<RtpSenderInternal>> rtp_senders,
+ rtc::ArrayView<rtc::scoped_refptr<RtpReceiverInternal>> rtp_receivers,
+ std::map<uint32_t, AudioTrackInterface*>* local_audio_track_by_ssrc,
+ std::map<uint32_t, VideoTrackInterface*>* local_video_track_by_ssrc,
+ std::map<uint32_t, AudioTrackInterface*>* remote_audio_track_by_ssrc,
+ std::map<uint32_t, VideoTrackInterface*>* remote_video_track_by_ssrc,
+ AudioTrackInterface** unsignaled_audio_track,
+ VideoTrackInterface** unsignaled_video_track) {
+ RTC_DCHECK(local_audio_track_by_ssrc->empty());
+ RTC_DCHECK(local_video_track_by_ssrc->empty());
+ RTC_DCHECK(remote_audio_track_by_ssrc->empty());
+ RTC_DCHECK(remote_video_track_by_ssrc->empty());
+ for (const auto& rtp_sender : rtp_senders) {
+ cricket::MediaType media_type = rtp_sender->media_type();
+ MediaStreamTrackInterface* track = rtp_sender->track().get();
+ if (!track) {
+ continue;
+ }
+ // TODO(deadbeef): `ssrc` should be removed in favor of `GetParameters`.
+ uint32_t ssrc = rtp_sender->ssrc();
+ if (ssrc != 0) {
+ if (media_type == cricket::MEDIA_TYPE_AUDIO) {
+ RTC_DCHECK(local_audio_track_by_ssrc->find(ssrc) ==
+ local_audio_track_by_ssrc->end());
+ (*local_audio_track_by_ssrc)[ssrc] =
+ static_cast<AudioTrackInterface*>(track);
+ } else {
+ RTC_DCHECK(local_video_track_by_ssrc->find(ssrc) ==
+ local_video_track_by_ssrc->end());
+ (*local_video_track_by_ssrc)[ssrc] =
+ static_cast<VideoTrackInterface*>(track);
+ }
+ }
+ }
+ for (const auto& rtp_receiver : rtp_receivers) {
+ cricket::MediaType media_type = rtp_receiver->media_type();
+ MediaStreamTrackInterface* track = rtp_receiver->track().get();
+ RTC_DCHECK(track);
+ RtpParameters params = rtp_receiver->GetParameters();
+ for (const RtpEncodingParameters& encoding : params.encodings) {
+ if (!encoding.ssrc) {
+ if (media_type == cricket::MEDIA_TYPE_AUDIO) {
+ *unsignaled_audio_track = static_cast<AudioTrackInterface*>(track);
+ } else {
+ RTC_DCHECK(media_type == cricket::MEDIA_TYPE_VIDEO);
+ *unsignaled_video_track = static_cast<VideoTrackInterface*>(track);
+ }
+ continue;
+ }
+ if (media_type == cricket::MEDIA_TYPE_AUDIO) {
+ RTC_DCHECK(remote_audio_track_by_ssrc->find(*encoding.ssrc) ==
+ remote_audio_track_by_ssrc->end());
+ (*remote_audio_track_by_ssrc)[*encoding.ssrc] =
+ static_cast<AudioTrackInterface*>(track);
+ } else {
+ RTC_DCHECK(remote_video_track_by_ssrc->find(*encoding.ssrc) ==
+ remote_video_track_by_ssrc->end());
+ (*remote_video_track_by_ssrc)[*encoding.ssrc] =
+ static_cast<VideoTrackInterface*>(track);
+ }
+ }
+ }
+}
+
+} // namespace
+
+TrackMediaInfoMap::TrackMediaInfoMap() = default;
+
+void TrackMediaInfoMap::Initialize(
+ absl::optional<cricket::VoiceMediaInfo> voice_media_info,
+ absl::optional<cricket::VideoMediaInfo> video_media_info,
+ rtc::ArrayView<rtc::scoped_refptr<RtpSenderInternal>> rtp_senders,
+ rtc::ArrayView<rtc::scoped_refptr<RtpReceiverInternal>> rtp_receivers) {
+ rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
+ RTC_DCHECK(!is_initialized_);
+ is_initialized_ = true;
+ voice_media_info_ = std::move(voice_media_info);
+ video_media_info_ = std::move(video_media_info);
+
+ std::map<uint32_t, AudioTrackInterface*> local_audio_track_by_ssrc;
+ std::map<uint32_t, VideoTrackInterface*> local_video_track_by_ssrc;
+ std::map<uint32_t, AudioTrackInterface*> remote_audio_track_by_ssrc;
+ std::map<uint32_t, VideoTrackInterface*> remote_video_track_by_ssrc;
+ AudioTrackInterface* unsignaled_audio_track = nullptr;
+ VideoTrackInterface* unsignaled_video_track = nullptr;
+ GetAudioAndVideoTrackBySsrc(
+ rtp_senders, rtp_receivers, &local_audio_track_by_ssrc,
+ &local_video_track_by_ssrc, &remote_audio_track_by_ssrc,
+ &remote_video_track_by_ssrc, &unsignaled_audio_track,
+ &unsignaled_video_track);
+
+ for (const auto& sender : rtp_senders) {
+ attachment_id_by_track_[sender->track().get()] = sender->AttachmentId();
+ }
+ for (const auto& receiver : rtp_receivers) {
+ attachment_id_by_track_[receiver->track().get()] = receiver->AttachmentId();
+ }
+
+ if (voice_media_info_.has_value()) {
+ for (auto& sender_info : voice_media_info_->senders) {
+ AudioTrackInterface* associated_track =
+ FindValueOrNull(local_audio_track_by_ssrc, sender_info.ssrc());
+ if (associated_track) {
+ // One sender is associated with at most one track.
+ // One track may be associated with multiple senders.
+ audio_track_by_sender_info_[&sender_info] = associated_track;
+ }
+ if (sender_info.ssrc() == 0)
+ continue; // Unconnected SSRC. bugs.webrtc.org/8673
+ RTC_CHECK(voice_info_by_sender_ssrc_.count(sender_info.ssrc()) == 0)
+ << "Duplicate voice sender SSRC: " << sender_info.ssrc();
+ voice_info_by_sender_ssrc_[sender_info.ssrc()] = &sender_info;
+ }
+ for (auto& receiver_info : voice_media_info_->receivers) {
+ AudioTrackInterface* associated_track =
+ FindValueOrNull(remote_audio_track_by_ssrc, receiver_info.ssrc());
+ if (associated_track) {
+ // One receiver is associated with at most one track, which is uniquely
+ // associated with that receiver.
+ audio_track_by_receiver_info_[&receiver_info] = associated_track;
+ } else if (unsignaled_audio_track) {
+ audio_track_by_receiver_info_[&receiver_info] = unsignaled_audio_track;
+ }
+ RTC_CHECK(voice_info_by_receiver_ssrc_.count(receiver_info.ssrc()) == 0)
+ << "Duplicate voice receiver SSRC: " << receiver_info.ssrc();
+ voice_info_by_receiver_ssrc_[receiver_info.ssrc()] = &receiver_info;
+ }
+ }
+ if (video_media_info_.has_value()) {
+ for (auto& sender_info : video_media_info_->senders) {
+ std::set<uint32_t> ssrcs;
+ ssrcs.insert(sender_info.ssrc());
+ for (auto& ssrc_group : sender_info.ssrc_groups) {
+ for (auto ssrc : ssrc_group.ssrcs) {
+ ssrcs.insert(ssrc);
+ }
+ }
+ for (auto ssrc : ssrcs) {
+ VideoTrackInterface* associated_track =
+ FindValueOrNull(local_video_track_by_ssrc, ssrc);
+ if (associated_track) {
+ // One sender is associated with at most one track.
+ // One track may be associated with multiple senders.
+ video_track_by_sender_info_[&sender_info] = associated_track;
+ break;
+ }
+ }
+ }
+ for (auto& sender_info : video_media_info_->aggregated_senders) {
+ if (sender_info.ssrc() == 0)
+ continue; // Unconnected SSRC. bugs.webrtc.org/8673
+ RTC_DCHECK(video_info_by_sender_ssrc_.count(sender_info.ssrc()) == 0)
+ << "Duplicate video sender SSRC: " << sender_info.ssrc();
+ video_info_by_sender_ssrc_[sender_info.ssrc()] = &sender_info;
+ VideoTrackInterface* associated_track =
+ FindValueOrNull(local_video_track_by_ssrc, sender_info.ssrc());
+ if (associated_track) {
+ video_track_by_sender_info_[&sender_info] = associated_track;
+ }
+ }
+ for (auto& receiver_info : video_media_info_->receivers) {
+ VideoTrackInterface* associated_track =
+ FindValueOrNull(remote_video_track_by_ssrc, receiver_info.ssrc());
+ if (associated_track) {
+ // One receiver is associated with at most one track, which is uniquely
+ // associated with that receiver.
+ video_track_by_receiver_info_[&receiver_info] = associated_track;
+ } else if (unsignaled_video_track) {
+ video_track_by_receiver_info_[&receiver_info] = unsignaled_video_track;
+ }
+ RTC_DCHECK(video_info_by_receiver_ssrc_.count(receiver_info.ssrc()) == 0)
+ << "Duplicate video receiver SSRC: " << receiver_info.ssrc();
+ video_info_by_receiver_ssrc_[receiver_info.ssrc()] = &receiver_info;
+ }
+ }
+}
+
+const cricket::VoiceSenderInfo* TrackMediaInfoMap::GetVoiceSenderInfoBySsrc(
+ uint32_t ssrc) const {
+ RTC_DCHECK(is_initialized_);
+ return FindValueOrNull(voice_info_by_sender_ssrc_, ssrc);
+}
+const cricket::VoiceReceiverInfo* TrackMediaInfoMap::GetVoiceReceiverInfoBySsrc(
+ uint32_t ssrc) const {
+ RTC_DCHECK(is_initialized_);
+ return FindValueOrNull(voice_info_by_receiver_ssrc_, ssrc);
+}
+
+const cricket::VideoSenderInfo* TrackMediaInfoMap::GetVideoSenderInfoBySsrc(
+ uint32_t ssrc) const {
+ RTC_DCHECK(is_initialized_);
+ return FindValueOrNull(video_info_by_sender_ssrc_, ssrc);
+}
+
+const cricket::VideoReceiverInfo* TrackMediaInfoMap::GetVideoReceiverInfoBySsrc(
+ uint32_t ssrc) const {
+ RTC_DCHECK(is_initialized_);
+ return FindValueOrNull(video_info_by_receiver_ssrc_, ssrc);
+}
+
+rtc::scoped_refptr<AudioTrackInterface> TrackMediaInfoMap::GetAudioTrack(
+ const cricket::VoiceSenderInfo& voice_sender_info) const {
+ RTC_DCHECK(is_initialized_);
+ return FindValueOrNull(audio_track_by_sender_info_, &voice_sender_info);
+}
+
+rtc::scoped_refptr<AudioTrackInterface> TrackMediaInfoMap::GetAudioTrack(
+ const cricket::VoiceReceiverInfo& voice_receiver_info) const {
+ RTC_DCHECK(is_initialized_);
+ return FindValueOrNull(audio_track_by_receiver_info_, &voice_receiver_info);
+}
+
+rtc::scoped_refptr<VideoTrackInterface> TrackMediaInfoMap::GetVideoTrack(
+ const cricket::VideoSenderInfo& video_sender_info) const {
+ RTC_DCHECK(is_initialized_);
+ return FindValueOrNull(video_track_by_sender_info_, &video_sender_info);
+}
+
+rtc::scoped_refptr<VideoTrackInterface> TrackMediaInfoMap::GetVideoTrack(
+ const cricket::VideoReceiverInfo& video_receiver_info) const {
+ RTC_DCHECK(is_initialized_);
+ return FindValueOrNull(video_track_by_receiver_info_, &video_receiver_info);
+}
+
+absl::optional<int> TrackMediaInfoMap::GetAttachmentIdByTrack(
+ const MediaStreamTrackInterface* track) const {
+ RTC_DCHECK(is_initialized_);
+ auto it = attachment_id_by_track_.find(track);
+ return it != attachment_id_by_track_.end() ? absl::optional<int>(it->second)
+ : absl::nullopt;
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/track_media_info_map.h b/third_party/libwebrtc/pc/track_media_info_map.h
new file mode 100644
index 0000000000..98f8548a10
--- /dev/null
+++ b/third_party/libwebrtc/pc/track_media_info_map.h
@@ -0,0 +1,116 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_TRACK_MEDIA_INFO_MAP_H_
+#define PC_TRACK_MEDIA_INFO_MAP_H_
+
+#include <stdint.h>
+
+#include <map>
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "api/array_view.h"
+#include "api/media_stream_interface.h"
+#include "api/scoped_refptr.h"
+#include "media/base/media_channel.h"
+#include "pc/rtp_receiver.h"
+#include "pc/rtp_sender.h"
+#include "rtc_base/ref_count.h"
+
+namespace webrtc {
+
+// Audio/video tracks and sender/receiver statistical information are associated
+// with each other based on attachments to RTP senders/receivers. This class
+// maps that relationship so that "infos" can be obtained from SSRCs and tracks
+// can be obtained from "infos".
+class TrackMediaInfoMap {
+ public:
+ TrackMediaInfoMap();
+
+ // Takes ownership of the "infos". Does not affect the lifetime of the senders
+ // or receivers, but TrackMediaInfoMap will keep their associated tracks alive
+ // through reference counting until the map is destroyed.
+ void Initialize(
+ absl::optional<cricket::VoiceMediaInfo> voice_media_info,
+ absl::optional<cricket::VideoMediaInfo> video_media_info,
+ rtc::ArrayView<rtc::scoped_refptr<RtpSenderInternal>> rtp_senders,
+ rtc::ArrayView<rtc::scoped_refptr<RtpReceiverInternal>> rtp_receivers);
+
+ const absl::optional<cricket::VoiceMediaInfo>& voice_media_info() const {
+ RTC_DCHECK(is_initialized_);
+ return voice_media_info_;
+ }
+ const absl::optional<cricket::VideoMediaInfo>& video_media_info() const {
+ RTC_DCHECK(is_initialized_);
+ return video_media_info_;
+ }
+
+ const cricket::VoiceSenderInfo* GetVoiceSenderInfoBySsrc(uint32_t ssrc) const;
+ const cricket::VoiceReceiverInfo* GetVoiceReceiverInfoBySsrc(
+ uint32_t ssrc) const;
+ const cricket::VideoSenderInfo* GetVideoSenderInfoBySsrc(uint32_t ssrc) const;
+ const cricket::VideoReceiverInfo* GetVideoReceiverInfoBySsrc(
+ uint32_t ssrc) const;
+
+ rtc::scoped_refptr<AudioTrackInterface> GetAudioTrack(
+ const cricket::VoiceSenderInfo& voice_sender_info) const;
+ rtc::scoped_refptr<AudioTrackInterface> GetAudioTrack(
+ const cricket::VoiceReceiverInfo& voice_receiver_info) const;
+ rtc::scoped_refptr<VideoTrackInterface> GetVideoTrack(
+ const cricket::VideoSenderInfo& video_sender_info) const;
+ rtc::scoped_refptr<VideoTrackInterface> GetVideoTrack(
+ const cricket::VideoReceiverInfo& video_receiver_info) const;
+
+ // TODO(hta): Remove this function, and redesign the callers not to need it.
+ // It is not going to work if a track is attached multiple times, and
+ // it is not going to work if a received track is attached as a sending
+ // track (loopback).
+ absl::optional<int> GetAttachmentIdByTrack(
+ const MediaStreamTrackInterface* track) const;
+
+ private:
+ bool is_initialized_ = false;
+ absl::optional<cricket::VoiceMediaInfo> voice_media_info_;
+ absl::optional<cricket::VideoMediaInfo> video_media_info_;
+ // These maps map info objects to their corresponding tracks. They are always
+ // the inverse of the maps above. One info object always maps to only one
+ // track. The use of scoped_refptr<> here ensures the tracks outlive
+ // TrackMediaInfoMap.
+ std::map<const cricket::VoiceSenderInfo*,
+ rtc::scoped_refptr<AudioTrackInterface>>
+ audio_track_by_sender_info_;
+ std::map<const cricket::VoiceReceiverInfo*,
+ rtc::scoped_refptr<AudioTrackInterface>>
+ audio_track_by_receiver_info_;
+ std::map<const cricket::VideoSenderInfo*,
+ rtc::scoped_refptr<VideoTrackInterface>>
+ video_track_by_sender_info_;
+ std::map<const cricket::VideoReceiverInfo*,
+ rtc::scoped_refptr<VideoTrackInterface>>
+ video_track_by_receiver_info_;
+ // Map of tracks to attachment IDs.
+ // Necessary because senders and receivers live on the signaling thread,
+ // but the attachment IDs are needed while building stats on the networking
+ // thread, so we can't look them up in the senders/receivers without
+ // thread jumping.
+ std::map<const MediaStreamTrackInterface*, int> attachment_id_by_track_;
+ // These maps map SSRCs to the corresponding voice or video info objects.
+ std::map<uint32_t, cricket::VoiceSenderInfo*> voice_info_by_sender_ssrc_;
+ std::map<uint32_t, cricket::VoiceReceiverInfo*> voice_info_by_receiver_ssrc_;
+ std::map<uint32_t, cricket::VideoSenderInfo*> video_info_by_sender_ssrc_;
+ std::map<uint32_t, cricket::VideoReceiverInfo*> video_info_by_receiver_ssrc_;
+};
+
+} // namespace webrtc
+
+#endif // PC_TRACK_MEDIA_INFO_MAP_H_
diff --git a/third_party/libwebrtc/pc/track_media_info_map_unittest.cc b/third_party/libwebrtc/pc/track_media_info_map_unittest.cc
new file mode 100644
index 0000000000..bffa3eb866
--- /dev/null
+++ b/third_party/libwebrtc/pc/track_media_info_map_unittest.cc
@@ -0,0 +1,332 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/track_media_info_map.h"
+
+#include <stddef.h>
+
+#include <cstdint>
+#include <initializer_list>
+#include <string>
+#include <type_traits>
+#include <utility>
+#include <vector>
+
+#include "api/media_types.h"
+#include "api/rtp_parameters.h"
+#include "api/test/mock_video_track.h"
+#include "media/base/media_channel.h"
+#include "pc/audio_track.h"
+#include "pc/test/fake_video_track_source.h"
+#include "pc/test/mock_rtp_receiver_internal.h"
+#include "pc/test/mock_rtp_sender_internal.h"
+#include "pc/video_track.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/thread.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+
+using ::testing::ElementsAre;
+
+namespace webrtc {
+
+namespace {
+
+RtpParameters CreateRtpParametersWithSsrcs(
+ std::initializer_list<uint32_t> ssrcs) {
+ RtpParameters params;
+ for (uint32_t ssrc : ssrcs) {
+ RtpEncodingParameters encoding_params;
+ encoding_params.ssrc = ssrc;
+ params.encodings.push_back(encoding_params);
+ }
+ return params;
+}
+
+rtc::scoped_refptr<MockRtpSenderInternal> CreateMockRtpSender(
+ cricket::MediaType media_type,
+ std::initializer_list<uint32_t> ssrcs,
+ rtc::scoped_refptr<MediaStreamTrackInterface> track) {
+ uint32_t first_ssrc;
+ if (ssrcs.size()) {
+ first_ssrc = *ssrcs.begin();
+ } else {
+ first_ssrc = 0;
+ }
+ auto sender = rtc::make_ref_counted<MockRtpSenderInternal>();
+ EXPECT_CALL(*sender, track())
+ .WillRepeatedly(::testing::Return(std::move(track)));
+ EXPECT_CALL(*sender, ssrc()).WillRepeatedly(::testing::Return(first_ssrc));
+ EXPECT_CALL(*sender, media_type())
+ .WillRepeatedly(::testing::Return(media_type));
+ EXPECT_CALL(*sender, GetParameters())
+ .WillRepeatedly(::testing::Return(CreateRtpParametersWithSsrcs(ssrcs)));
+ EXPECT_CALL(*sender, AttachmentId()).WillRepeatedly(::testing::Return(1));
+ return sender;
+}
+
+rtc::scoped_refptr<MockRtpReceiverInternal> CreateMockRtpReceiver(
+ cricket::MediaType media_type,
+ std::initializer_list<uint32_t> ssrcs,
+ rtc::scoped_refptr<MediaStreamTrackInterface> track) {
+ auto receiver = rtc::make_ref_counted<MockRtpReceiverInternal>();
+ EXPECT_CALL(*receiver, track())
+ .WillRepeatedly(::testing::Return(std::move(track)));
+ EXPECT_CALL(*receiver, media_type())
+ .WillRepeatedly(::testing::Return(media_type));
+ EXPECT_CALL(*receiver, GetParameters())
+ .WillRepeatedly(::testing::Return(CreateRtpParametersWithSsrcs(ssrcs)));
+ EXPECT_CALL(*receiver, AttachmentId()).WillRepeatedly(::testing::Return(1));
+ return receiver;
+}
+
+rtc::scoped_refptr<VideoTrackInterface> CreateVideoTrack(
+ const std::string& id) {
+ return VideoTrack::Create(id, FakeVideoTrackSource::Create(false),
+ rtc::Thread::Current());
+}
+
+rtc::scoped_refptr<VideoTrackInterface> CreateMockVideoTrack(
+ const std::string& id) {
+ auto track = MockVideoTrack::Create();
+ EXPECT_CALL(*track, kind())
+ .WillRepeatedly(::testing::Return(VideoTrack::kVideoKind));
+ return track;
+}
+
+class TrackMediaInfoMapTest : public ::testing::Test {
+ public:
+ TrackMediaInfoMapTest() : TrackMediaInfoMapTest(true) {}
+
+ explicit TrackMediaInfoMapTest(bool use_real_video_track)
+ : local_audio_track_(AudioTrack::Create("LocalAudioTrack", nullptr)),
+ remote_audio_track_(AudioTrack::Create("RemoteAudioTrack", nullptr)),
+ local_video_track_(use_real_video_track
+ ? CreateVideoTrack("LocalVideoTrack")
+ : CreateMockVideoTrack("LocalVideoTrack")),
+ remote_video_track_(use_real_video_track
+ ? CreateVideoTrack("RemoteVideoTrack")
+ : CreateMockVideoTrack("LocalVideoTrack")) {}
+
+ void AddRtpSenderWithSsrcs(std::initializer_list<uint32_t> ssrcs,
+ MediaStreamTrackInterface* local_track) {
+ rtc::scoped_refptr<MockRtpSenderInternal> rtp_sender = CreateMockRtpSender(
+ local_track->kind() == MediaStreamTrackInterface::kAudioKind
+ ? cricket::MEDIA_TYPE_AUDIO
+ : cricket::MEDIA_TYPE_VIDEO,
+ ssrcs, rtc::scoped_refptr<MediaStreamTrackInterface>(local_track));
+ rtp_senders_.push_back(rtp_sender);
+
+ if (local_track->kind() == MediaStreamTrackInterface::kAudioKind) {
+ cricket::VoiceSenderInfo voice_sender_info;
+ size_t i = 0;
+ for (uint32_t ssrc : ssrcs) {
+ voice_sender_info.local_stats.push_back(cricket::SsrcSenderInfo());
+ voice_sender_info.local_stats[i++].ssrc = ssrc;
+ }
+ voice_media_info_.senders.push_back(voice_sender_info);
+ } else {
+ cricket::VideoSenderInfo video_sender_info;
+ size_t i = 0;
+ for (uint32_t ssrc : ssrcs) {
+ video_sender_info.local_stats.push_back(cricket::SsrcSenderInfo());
+ video_sender_info.local_stats[i++].ssrc = ssrc;
+ }
+ video_media_info_.senders.push_back(video_sender_info);
+ video_media_info_.aggregated_senders.push_back(video_sender_info);
+ }
+ }
+
+ void AddRtpReceiverWithSsrcs(std::initializer_list<uint32_t> ssrcs,
+ MediaStreamTrackInterface* remote_track) {
+ auto rtp_receiver = CreateMockRtpReceiver(
+ remote_track->kind() == MediaStreamTrackInterface::kAudioKind
+ ? cricket::MEDIA_TYPE_AUDIO
+ : cricket::MEDIA_TYPE_VIDEO,
+ ssrcs, rtc::scoped_refptr<MediaStreamTrackInterface>(remote_track));
+ rtp_receivers_.push_back(rtp_receiver);
+
+ if (remote_track->kind() == MediaStreamTrackInterface::kAudioKind) {
+ cricket::VoiceReceiverInfo voice_receiver_info;
+ size_t i = 0;
+ for (uint32_t ssrc : ssrcs) {
+ voice_receiver_info.local_stats.push_back(cricket::SsrcReceiverInfo());
+ voice_receiver_info.local_stats[i++].ssrc = ssrc;
+ }
+ voice_media_info_.receivers.push_back(voice_receiver_info);
+ } else {
+ cricket::VideoReceiverInfo video_receiver_info;
+ size_t i = 0;
+ for (uint32_t ssrc : ssrcs) {
+ video_receiver_info.local_stats.push_back(cricket::SsrcReceiverInfo());
+ video_receiver_info.local_stats[i++].ssrc = ssrc;
+ }
+ video_media_info_.receivers.push_back(video_receiver_info);
+ }
+ }
+
+ // Copies the current state of `voice_media_info_` and `video_media_info_`
+ // into the map.
+ void InitializeMap() {
+ map_.Initialize(voice_media_info_, video_media_info_, rtp_senders_,
+ rtp_receivers_);
+ }
+
+ private:
+ rtc::AutoThread main_thread_;
+ cricket::VoiceMediaInfo voice_media_info_;
+ cricket::VideoMediaInfo video_media_info_;
+
+ protected:
+ std::vector<rtc::scoped_refptr<RtpSenderInternal>> rtp_senders_;
+ std::vector<rtc::scoped_refptr<RtpReceiverInternal>> rtp_receivers_;
+ TrackMediaInfoMap map_;
+ rtc::scoped_refptr<AudioTrack> local_audio_track_;
+ rtc::scoped_refptr<AudioTrack> remote_audio_track_;
+ rtc::scoped_refptr<VideoTrackInterface> local_video_track_;
+ rtc::scoped_refptr<VideoTrackInterface> remote_video_track_;
+};
+
+} // namespace
+
+TEST_F(TrackMediaInfoMapTest, SingleSenderReceiverPerTrackWithOneSsrc) {
+ AddRtpSenderWithSsrcs({1}, local_audio_track_.get());
+ AddRtpReceiverWithSsrcs({2}, remote_audio_track_.get());
+ AddRtpSenderWithSsrcs({3}, local_video_track_.get());
+ AddRtpReceiverWithSsrcs({4}, remote_video_track_.get());
+ InitializeMap();
+ // RTP audio sender -> local audio track
+ EXPECT_EQ(map_.GetAudioTrack(map_.voice_media_info()->senders[0]),
+ local_audio_track_.get());
+ // RTP audio receiver -> remote audio track
+ EXPECT_EQ(map_.GetAudioTrack(map_.voice_media_info()->receivers[0]),
+ remote_audio_track_.get());
+ // RTP video sender -> local video track
+ EXPECT_EQ(map_.GetVideoTrack(map_.video_media_info()->senders[0]),
+ local_video_track_.get());
+ // RTP video receiver -> remote video track
+ EXPECT_EQ(map_.GetVideoTrack(map_.video_media_info()->receivers[0]),
+ remote_video_track_.get());
+}
+
+TEST_F(TrackMediaInfoMapTest,
+ SingleSenderReceiverPerTrackWithAudioAndVideoUseSameSsrc) {
+ AddRtpSenderWithSsrcs({1}, local_audio_track_.get());
+ AddRtpReceiverWithSsrcs({2}, remote_audio_track_.get());
+ AddRtpSenderWithSsrcs({1}, local_video_track_.get());
+ AddRtpReceiverWithSsrcs({2}, remote_video_track_.get());
+ InitializeMap();
+ // RTP audio sender -> local audio track
+ EXPECT_EQ(map_.GetAudioTrack(map_.voice_media_info()->senders[0]),
+ local_audio_track_.get());
+ // RTP audio receiver -> remote audio track
+ EXPECT_EQ(map_.GetAudioTrack(map_.voice_media_info()->receivers[0]),
+ remote_audio_track_.get());
+ // RTP video sender -> local video track
+ EXPECT_EQ(map_.GetVideoTrack(map_.video_media_info()->senders[0]),
+ local_video_track_.get());
+ // RTP video receiver -> remote video track
+ EXPECT_EQ(map_.GetVideoTrack(map_.video_media_info()->receivers[0]),
+ remote_video_track_.get());
+}
+
+TEST_F(TrackMediaInfoMapTest, SingleMultiSsrcSenderPerTrack) {
+ AddRtpSenderWithSsrcs({1, 2}, local_audio_track_.get());
+ AddRtpSenderWithSsrcs({3, 4}, local_video_track_.get());
+ InitializeMap();
+ // RTP audio senders -> local audio track
+ EXPECT_EQ(map_.GetAudioTrack(map_.voice_media_info()->senders[0]),
+ local_audio_track_.get());
+ // RTP video senders -> local video track
+ EXPECT_EQ(map_.GetVideoTrack(map_.video_media_info()->senders[0]),
+ local_video_track_.get());
+}
+
+TEST_F(TrackMediaInfoMapTest, MultipleOneSsrcSendersPerTrack) {
+ AddRtpSenderWithSsrcs({1}, local_audio_track_.get());
+ AddRtpSenderWithSsrcs({2}, local_audio_track_.get());
+ AddRtpSenderWithSsrcs({3}, local_video_track_.get());
+ AddRtpSenderWithSsrcs({4}, local_video_track_.get());
+ InitializeMap();
+ // RTP audio senders -> local audio track
+ EXPECT_EQ(map_.GetAudioTrack(map_.voice_media_info()->senders[0]),
+ local_audio_track_.get());
+ EXPECT_EQ(map_.GetAudioTrack(map_.voice_media_info()->senders[1]),
+ local_audio_track_.get());
+ // RTP video senders -> local video track
+ EXPECT_EQ(map_.GetVideoTrack(map_.video_media_info()->senders[0]),
+ local_video_track_.get());
+ EXPECT_EQ(map_.GetVideoTrack(map_.video_media_info()->senders[1]),
+ local_video_track_.get());
+}
+
+TEST_F(TrackMediaInfoMapTest, MultipleMultiSsrcSendersPerTrack) {
+ AddRtpSenderWithSsrcs({1, 2}, local_audio_track_.get());
+ AddRtpSenderWithSsrcs({3, 4}, local_audio_track_.get());
+ AddRtpSenderWithSsrcs({5, 6}, local_video_track_.get());
+ AddRtpSenderWithSsrcs({7, 8}, local_video_track_.get());
+ InitializeMap();
+ // RTP audio senders -> local audio track
+ EXPECT_EQ(map_.GetAudioTrack(map_.voice_media_info()->senders[0]),
+ local_audio_track_.get());
+ EXPECT_EQ(map_.GetAudioTrack(map_.voice_media_info()->senders[1]),
+ local_audio_track_.get());
+ // RTP video senders -> local video track
+ EXPECT_EQ(map_.GetVideoTrack(map_.video_media_info()->senders[0]),
+ local_video_track_.get());
+ EXPECT_EQ(map_.GetVideoTrack(map_.video_media_info()->senders[1]),
+ local_video_track_.get());
+}
+
+// SSRCs can be reused for send and receive in loopback.
+TEST_F(TrackMediaInfoMapTest, SingleSenderReceiverPerTrackWithSsrcNotUnique) {
+ AddRtpSenderWithSsrcs({1}, local_audio_track_.get());
+ AddRtpReceiverWithSsrcs({1}, remote_audio_track_.get());
+ AddRtpSenderWithSsrcs({2}, local_video_track_.get());
+ AddRtpReceiverWithSsrcs({2}, remote_video_track_.get());
+ InitializeMap();
+ // RTP audio senders -> local audio track
+ EXPECT_EQ(map_.GetAudioTrack(map_.voice_media_info()->senders[0]),
+ local_audio_track_.get());
+ // RTP audio receiver -> remote audio track
+ EXPECT_EQ(map_.GetAudioTrack(map_.voice_media_info()->receivers[0]),
+ remote_audio_track_.get());
+ // RTP video senders -> local video track
+ EXPECT_EQ(map_.GetVideoTrack(map_.video_media_info()->senders[0]),
+ local_video_track_.get());
+ // RTP video receiver -> remote video track
+ EXPECT_EQ(map_.GetVideoTrack(map_.video_media_info()->receivers[0]),
+ remote_video_track_.get());
+}
+
+TEST_F(TrackMediaInfoMapTest, SsrcLookupFunction) {
+ AddRtpSenderWithSsrcs({1}, local_audio_track_.get());
+ AddRtpReceiverWithSsrcs({2}, remote_audio_track_.get());
+ AddRtpSenderWithSsrcs({3}, local_video_track_.get());
+ AddRtpReceiverWithSsrcs({4}, remote_video_track_.get());
+ InitializeMap();
+ EXPECT_TRUE(map_.GetVoiceSenderInfoBySsrc(1));
+ EXPECT_TRUE(map_.GetVoiceReceiverInfoBySsrc(2));
+ EXPECT_TRUE(map_.GetVideoSenderInfoBySsrc(3));
+ EXPECT_TRUE(map_.GetVideoReceiverInfoBySsrc(4));
+ EXPECT_FALSE(map_.GetVoiceSenderInfoBySsrc(2));
+ EXPECT_FALSE(map_.GetVoiceSenderInfoBySsrc(1024));
+}
+
+TEST_F(TrackMediaInfoMapTest, GetAttachmentIdByTrack) {
+ AddRtpSenderWithSsrcs({1}, local_audio_track_.get());
+ InitializeMap();
+ EXPECT_EQ(rtp_senders_[0]->AttachmentId(),
+ map_.GetAttachmentIdByTrack(local_audio_track_.get()));
+ EXPECT_EQ(absl::nullopt,
+ map_.GetAttachmentIdByTrack(local_video_track_.get()));
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/transceiver_list.cc b/third_party/libwebrtc/pc/transceiver_list.cc
new file mode 100644
index 0000000000..250dfbc9e2
--- /dev/null
+++ b/third_party/libwebrtc/pc/transceiver_list.cc
@@ -0,0 +1,88 @@
+/*
+ * Copyright 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/transceiver_list.h"
+
+#include <string>
+
+#include "rtc_base/checks.h"
+
+namespace webrtc {
+
+void TransceiverStableState::set_newly_created() {
+ RTC_DCHECK(!has_m_section_);
+ newly_created_ = true;
+}
+
+void TransceiverStableState::SetMSectionIfUnset(
+ absl::optional<std::string> mid,
+ absl::optional<size_t> mline_index) {
+ if (!has_m_section_) {
+ mid_ = mid;
+ mline_index_ = mline_index;
+ has_m_section_ = true;
+ }
+}
+
+void TransceiverStableState::SetRemoteStreamIds(
+ const std::vector<std::string>& ids) {
+ if (!remote_stream_ids_.has_value()) {
+ remote_stream_ids_ = ids;
+ }
+}
+
+void TransceiverStableState::SetInitSendEncodings(
+ const std::vector<RtpEncodingParameters>& encodings) {
+ init_send_encodings_ = encodings;
+}
+
+std::vector<RtpTransceiver*> TransceiverList::ListInternal() const {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ std::vector<RtpTransceiver*> internals;
+ for (auto transceiver : transceivers_) {
+ internals.push_back(transceiver->internal());
+ }
+ return internals;
+}
+
+RtpTransceiverProxyRefPtr TransceiverList::FindBySender(
+ rtc::scoped_refptr<RtpSenderInterface> sender) const {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ for (auto transceiver : transceivers_) {
+ if (transceiver->sender() == sender) {
+ return transceiver;
+ }
+ }
+ return nullptr;
+}
+
+RtpTransceiverProxyRefPtr TransceiverList::FindByMid(
+ const std::string& mid) const {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ for (auto transceiver : transceivers_) {
+ if (transceiver->mid() == mid) {
+ return transceiver;
+ }
+ }
+ return nullptr;
+}
+
+RtpTransceiverProxyRefPtr TransceiverList::FindByMLineIndex(
+ size_t mline_index) const {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ for (auto transceiver : transceivers_) {
+ if (transceiver->internal()->mline_index() == mline_index) {
+ return transceiver;
+ }
+ }
+ return nullptr;
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/transceiver_list.h b/third_party/libwebrtc/pc/transceiver_list.h
new file mode 100644
index 0000000000..848ccc2c3b
--- /dev/null
+++ b/third_party/libwebrtc/pc/transceiver_list.h
@@ -0,0 +1,162 @@
+/*
+ * Copyright 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_TRANSCEIVER_LIST_H_
+#define PC_TRANSCEIVER_LIST_H_
+
+#include <stddef.h>
+
+#include <algorithm>
+#include <map>
+#include <string>
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "api/media_types.h"
+#include "api/rtc_error.h"
+#include "api/rtp_parameters.h"
+#include "api/rtp_sender_interface.h"
+#include "api/scoped_refptr.h"
+#include "api/sequence_checker.h"
+#include "pc/rtp_transceiver.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/system/no_unique_address.h"
+#include "rtc_base/thread_annotations.h"
+
+namespace webrtc {
+
+typedef rtc::scoped_refptr<RtpTransceiverProxyWithInternal<RtpTransceiver>>
+ RtpTransceiverProxyRefPtr;
+
+// Captures partial state to be used for rollback. Applicable only in
+// Unified Plan.
+class TransceiverStableState {
+ public:
+ TransceiverStableState() {}
+ void set_newly_created();
+ void SetMSectionIfUnset(absl::optional<std::string> mid,
+ absl::optional<size_t> mline_index);
+ void SetRemoteStreamIds(const std::vector<std::string>& ids);
+ void SetInitSendEncodings(
+ const std::vector<RtpEncodingParameters>& encodings);
+ void SetFiredDirection(
+ absl::optional<RtpTransceiverDirection> fired_direction) {
+ fired_direction_ = fired_direction;
+ }
+ absl::optional<std::string> mid() const { return mid_; }
+ absl::optional<size_t> mline_index() const { return mline_index_; }
+ absl::optional<std::vector<std::string>> remote_stream_ids() const {
+ return remote_stream_ids_;
+ }
+ absl::optional<std::vector<RtpEncodingParameters>> init_send_encodings()
+ const {
+ return init_send_encodings_;
+ }
+ bool has_m_section() const { return has_m_section_; }
+ bool newly_created() const { return newly_created_; }
+ bool did_set_fired_direction() const { return fired_direction_.has_value(); }
+ // Because fired_direction() is nullable, did_set_fired_direction() is used to
+ // distinguish beteen "no value" and "null value".
+ absl::optional<RtpTransceiverDirection> fired_direction() const {
+ RTC_DCHECK(did_set_fired_direction());
+ return fired_direction_.value();
+ }
+
+ private:
+ absl::optional<std::string> mid_;
+ absl::optional<size_t> mline_index_;
+ absl::optional<std::vector<std::string>> remote_stream_ids_;
+ absl::optional<std::vector<RtpEncodingParameters>> init_send_encodings_;
+ // Indicates that mid value from stable state has been captured and
+ // that rollback has to restore the transceiver. Also protects against
+ // subsequent overwrites.
+ bool has_m_section_ = false;
+ // Indicates that the transceiver was created as part of applying a
+ // description to track potential need for removing transceiver during
+ // rollback.
+ bool newly_created_ = false;
+ // `fired_direction_` is nullable, so an optional of an optional is used to
+ // distinguish between null and not set (sorry if this hurts your eyes).
+ absl::optional<absl::optional<RtpTransceiverDirection>> fired_direction_;
+};
+
+// This class encapsulates the active list of transceivers on a
+// PeerConnection, and offers convenient functions on that list.
+// It is a single-thread class; all operations must be performed
+// on the same thread.
+class TransceiverList {
+ public:
+ // Returns a copy of the currently active list of transceivers. The
+ // list consists of rtc::scoped_refptrs, which will keep the transceivers
+ // from being deallocated, even if they are removed from the TransceiverList.
+ std::vector<RtpTransceiverProxyRefPtr> List() const {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ return transceivers_;
+ }
+ // As above, but does not check thread ownership. Unsafe.
+ // TODO(bugs.webrtc.org/12692): Refactor and remove
+ std::vector<RtpTransceiverProxyRefPtr> UnsafeList() const {
+ return transceivers_;
+ }
+
+ // Returns a list of the internal() pointers of the currently active list
+ // of transceivers. These raw pointers are not thread-safe, so need to
+ // be consumed on the same thread.
+ std::vector<RtpTransceiver*> ListInternal() const;
+
+ void Add(RtpTransceiverProxyRefPtr transceiver) {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ transceivers_.push_back(transceiver);
+ }
+ void Remove(RtpTransceiverProxyRefPtr transceiver) {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ transceivers_.erase(
+ std::remove(transceivers_.begin(), transceivers_.end(), transceiver),
+ transceivers_.end());
+ }
+ RtpTransceiverProxyRefPtr FindBySender(
+ rtc::scoped_refptr<RtpSenderInterface> sender) const;
+ RtpTransceiverProxyRefPtr FindByMid(const std::string& mid) const;
+ RtpTransceiverProxyRefPtr FindByMLineIndex(size_t mline_index) const;
+
+ // Find or create the stable state for a transceiver.
+ TransceiverStableState* StableState(RtpTransceiverProxyRefPtr transceiver) {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ return &(transceiver_stable_states_by_transceivers_[transceiver]);
+ }
+
+ void DiscardStableStates() {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ transceiver_stable_states_by_transceivers_.clear();
+ }
+
+ std::map<RtpTransceiverProxyRefPtr, TransceiverStableState>& StableStates() {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ return transceiver_stable_states_by_transceivers_;
+ }
+
+ private:
+ RTC_NO_UNIQUE_ADDRESS SequenceChecker sequence_checker_;
+ std::vector<RtpTransceiverProxyRefPtr> transceivers_;
+ // TODO(bugs.webrtc.org/12692): Add RTC_GUARDED_BY(sequence_checker_);
+
+ // Holds changes made to transceivers during applying descriptors for
+ // potential rollback. Gets cleared once signaling state goes to stable.
+ std::map<RtpTransceiverProxyRefPtr, TransceiverStableState>
+ transceiver_stable_states_by_transceivers_
+ RTC_GUARDED_BY(sequence_checker_);
+ // Holds remote stream ids for transceivers from stable state.
+ std::map<RtpTransceiverProxyRefPtr, std::vector<std::string>>
+ remote_stream_ids_by_transceivers_ RTC_GUARDED_BY(sequence_checker_);
+};
+
+} // namespace webrtc
+
+#endif // PC_TRANSCEIVER_LIST_H_
diff --git a/third_party/libwebrtc/pc/transport_stats.cc b/third_party/libwebrtc/pc/transport_stats.cc
new file mode 100644
index 0000000000..8049c07a77
--- /dev/null
+++ b/third_party/libwebrtc/pc/transport_stats.cc
@@ -0,0 +1,21 @@
+/*
+ * Copyright 2018 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "pc/transport_stats.h"
+
+namespace cricket {
+
+TransportChannelStats::TransportChannelStats() = default;
+
+TransportChannelStats::TransportChannelStats(const TransportChannelStats&) =
+ default;
+
+TransportChannelStats::~TransportChannelStats() = default;
+
+} // namespace cricket
diff --git a/third_party/libwebrtc/pc/transport_stats.h b/third_party/libwebrtc/pc/transport_stats.h
new file mode 100644
index 0000000000..46dccc97f8
--- /dev/null
+++ b/third_party/libwebrtc/pc/transport_stats.h
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2018 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_TRANSPORT_STATS_H_
+#define PC_TRANSPORT_STATS_H_
+
+#include <string>
+#include <vector>
+
+#include "api/dtls_transport_interface.h"
+#include "p2p/base/dtls_transport_internal.h"
+#include "p2p/base/ice_transport_internal.h"
+#include "p2p/base/port.h"
+#include "rtc_base/ssl_stream_adapter.h"
+
+namespace cricket {
+
+struct TransportChannelStats {
+ TransportChannelStats();
+ TransportChannelStats(const TransportChannelStats&);
+ ~TransportChannelStats();
+
+ int component = 0;
+ int ssl_version_bytes = 0;
+ int srtp_crypto_suite = rtc::kSrtpInvalidCryptoSuite;
+ int ssl_cipher_suite = rtc::kTlsNullWithNullNull;
+ absl::optional<rtc::SSLRole> dtls_role;
+ webrtc::DtlsTransportState dtls_state = webrtc::DtlsTransportState::kNew;
+ IceTransportStats ice_transport_stats;
+ uint16_t ssl_peer_signature_algorithm = rtc::kSslSignatureAlgorithmUnknown;
+};
+
+// Information about all the channels of a transport.
+// TODO(hta): Consider if a simple vector is as good as a map.
+typedef std::vector<TransportChannelStats> TransportChannelStatsList;
+
+// Information about the stats of a transport.
+struct TransportStats {
+ std::string transport_name;
+ TransportChannelStatsList channel_stats;
+};
+
+} // namespace cricket
+
+#endif // PC_TRANSPORT_STATS_H_
diff --git a/third_party/libwebrtc/pc/usage_pattern.cc b/third_party/libwebrtc/pc/usage_pattern.cc
new file mode 100644
index 0000000000..848472148f
--- /dev/null
+++ b/third_party/libwebrtc/pc/usage_pattern.cc
@@ -0,0 +1,49 @@
+/*
+ * Copyright 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/usage_pattern.h"
+
+#include "api/peer_connection_interface.h"
+#include "rtc_base/logging.h"
+#include "system_wrappers/include/metrics.h"
+
+namespace webrtc {
+
+void UsagePattern::NoteUsageEvent(UsageEvent event) {
+ usage_event_accumulator_ |= static_cast<int>(event);
+}
+
+void UsagePattern::ReportUsagePattern(PeerConnectionObserver* observer) const {
+ RTC_DLOG(LS_INFO) << "Usage signature is " << usage_event_accumulator_;
+ RTC_HISTOGRAM_ENUMERATION_SPARSE("WebRTC.PeerConnection.UsagePattern",
+ usage_event_accumulator_,
+ static_cast<int>(UsageEvent::MAX_VALUE));
+ const int bad_bits =
+ static_cast<int>(UsageEvent::SET_LOCAL_DESCRIPTION_SUCCEEDED) |
+ static_cast<int>(UsageEvent::CANDIDATE_COLLECTED);
+ const int good_bits =
+ static_cast<int>(UsageEvent::SET_REMOTE_DESCRIPTION_SUCCEEDED) |
+ static_cast<int>(UsageEvent::REMOTE_CANDIDATE_ADDED) |
+ static_cast<int>(UsageEvent::ICE_STATE_CONNECTED);
+ if ((usage_event_accumulator_ & bad_bits) == bad_bits &&
+ (usage_event_accumulator_ & good_bits) == 0) {
+ // If called after close(), we can't report, because observer may have
+ // been deallocated, and therefore pointer is null. Write to log instead.
+ if (observer) {
+ observer->OnInterestingUsage(usage_event_accumulator_);
+ } else {
+ RTC_LOG(LS_INFO) << "Interesting usage signature "
+ << usage_event_accumulator_
+ << " observed after observer shutdown";
+ }
+ }
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/usage_pattern.h b/third_party/libwebrtc/pc/usage_pattern.h
new file mode 100644
index 0000000000..14373303ed
--- /dev/null
+++ b/third_party/libwebrtc/pc/usage_pattern.h
@@ -0,0 +1,77 @@
+/*
+ * Copyright 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_USAGE_PATTERN_H_
+#define PC_USAGE_PATTERN_H_
+
+#include "api/peer_connection_interface.h"
+
+namespace webrtc {
+
+class PeerConnectionObserver;
+
+// A bit in the usage pattern is registered when its defining event occurs
+// at least once.
+enum class UsageEvent : int {
+ TURN_SERVER_ADDED = 0x01,
+ STUN_SERVER_ADDED = 0x02,
+ DATA_ADDED = 0x04,
+ AUDIO_ADDED = 0x08,
+ VIDEO_ADDED = 0x10,
+ // `SetLocalDescription` returns successfully.
+ SET_LOCAL_DESCRIPTION_SUCCEEDED = 0x20,
+ // `SetRemoteDescription` returns successfully.
+ SET_REMOTE_DESCRIPTION_SUCCEEDED = 0x40,
+ // A local candidate (with type host, server-reflexive, or relay) is
+ // collected.
+ CANDIDATE_COLLECTED = 0x80,
+ // A remote candidate is successfully added via `AddIceCandidate`.
+ ADD_ICE_CANDIDATE_SUCCEEDED = 0x100,
+ ICE_STATE_CONNECTED = 0x200,
+ CLOSE_CALLED = 0x400,
+ // A local candidate with private IP is collected.
+ PRIVATE_CANDIDATE_COLLECTED = 0x800,
+ // A remote candidate with private IP is added, either via AddiceCandidate
+ // or from the remote description.
+ REMOTE_PRIVATE_CANDIDATE_ADDED = 0x1000,
+ // A local mDNS candidate is collected.
+ MDNS_CANDIDATE_COLLECTED = 0x2000,
+ // A remote mDNS candidate is added, either via AddIceCandidate or from the
+ // remote description.
+ REMOTE_MDNS_CANDIDATE_ADDED = 0x4000,
+ // A local candidate with IPv6 address is collected.
+ IPV6_CANDIDATE_COLLECTED = 0x8000,
+ // A remote candidate with IPv6 address is added, either via AddIceCandidate
+ // or from the remote description.
+ REMOTE_IPV6_CANDIDATE_ADDED = 0x10000,
+ // A remote candidate (with type host, server-reflexive, or relay) is
+ // successfully added, either via AddIceCandidate or from the remote
+ // description.
+ REMOTE_CANDIDATE_ADDED = 0x20000,
+ // An explicit host-host candidate pair is selected, i.e. both the local and
+ // the remote candidates have the host type. This does not include candidate
+ // pairs formed with equivalent prflx remote candidates, e.g. a host-prflx
+ // pair where the prflx candidate has the same base as a host candidate of
+ // the remote peer.
+ DIRECT_CONNECTION_SELECTED = 0x40000,
+ MAX_VALUE = 0x80000,
+};
+
+class UsagePattern {
+ public:
+ void NoteUsageEvent(UsageEvent event);
+ void ReportUsagePattern(PeerConnectionObserver* observer) const;
+
+ private:
+ int usage_event_accumulator_ = 0;
+};
+
+} // namespace webrtc
+#endif // PC_USAGE_PATTERN_H_
diff --git a/third_party/libwebrtc/pc/used_ids.h b/third_party/libwebrtc/pc/used_ids.h
new file mode 100644
index 0000000000..6b342cbea8
--- /dev/null
+++ b/third_party/libwebrtc/pc/used_ids.h
@@ -0,0 +1,192 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef PC_USED_IDS_H_
+#define PC_USED_IDS_H_
+
+#include <set>
+#include <vector>
+
+#include "api/rtp_parameters.h"
+#include "media/base/codec.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+
+namespace cricket {
+template <typename IdStruct>
+class UsedIds {
+ public:
+ UsedIds(int min_allowed_id, int max_allowed_id)
+ : min_allowed_id_(min_allowed_id),
+ max_allowed_id_(max_allowed_id),
+ next_id_(max_allowed_id) {}
+ virtual ~UsedIds() {}
+
+ // Loops through all Id in `ids` and changes its id if it is
+ // already in use by another IdStruct. Call this methods with all Id
+ // in a session description to make sure no duplicate ids exists.
+ // Note that typename Id must be a type of IdStruct.
+ template <typename Id>
+ void FindAndSetIdUsed(std::vector<Id>* ids) {
+ for (const Id& id : *ids) {
+ FindAndSetIdUsed(&id);
+ }
+ }
+
+ // Finds and sets an unused id if the `idstruct` id is already in use.
+ void FindAndSetIdUsed(IdStruct* idstruct) {
+ const int original_id = idstruct->id;
+ int new_id = idstruct->id;
+
+ if (original_id > max_allowed_id_ || original_id < min_allowed_id_) {
+ // If the original id is not in range - this is an id that can't be
+ // dynamically changed.
+ return;
+ }
+
+ if (IsIdUsed(original_id)) {
+ new_id = FindUnusedId();
+ // Duplicate id found. Reassign from the original id to the new.
+ idstruct->id = new_id;
+ }
+ SetIdUsed(new_id);
+ }
+
+ protected:
+ virtual bool IsIdUsed(int new_id) {
+ return id_set_.find(new_id) != id_set_.end();
+ }
+ const int min_allowed_id_;
+ const int max_allowed_id_;
+
+ private:
+ // Returns the first unused id in reverse order.
+ // This hopefully reduces the risk of more collisions. We want to change the
+ // default ids as little as possible. This function is virtual and can be
+ // overriden if the search for unused IDs should follow a specific pattern.
+ virtual int FindUnusedId() {
+ while (IsIdUsed(next_id_) && next_id_ >= min_allowed_id_) {
+ --next_id_;
+ }
+ RTC_DCHECK(next_id_ >= min_allowed_id_);
+ return next_id_;
+ }
+
+ void SetIdUsed(int new_id) {
+ RTC_DCHECK(new_id >= min_allowed_id_);
+ RTC_DCHECK(new_id <= max_allowed_id_);
+ RTC_DCHECK(!IsIdUsed(new_id));
+ id_set_.insert(new_id);
+ }
+ int next_id_;
+ std::set<int> id_set_;
+};
+
+// Helper class used for finding duplicate RTP payload types among audio, video
+// and data codecs. When bundle is used the payload types may not collide.
+class UsedPayloadTypes : public UsedIds<Codec> {
+ public:
+ UsedPayloadTypes()
+ : UsedIds<Codec>(kFirstDynamicPayloadTypeLowerRange,
+ kLastDynamicPayloadTypeUpperRange) {}
+
+ // Check if a payload type is valid. The range [64-95] is forbidden
+ // when rtcp-mux is used.
+ static bool IsIdValid(Codec codec, bool rtcp_mux) {
+ if (rtcp_mux && (codec.id > kLastDynamicPayloadTypeLowerRange &&
+ codec.id < kFirstDynamicPayloadTypeUpperRange)) {
+ return false;
+ }
+ return codec.id >= 0 && codec.id <= kLastDynamicPayloadTypeUpperRange;
+ }
+
+ protected:
+ bool IsIdUsed(int new_id) override {
+ // Range marked for RTCP avoidance is "used".
+ if (new_id > kLastDynamicPayloadTypeLowerRange &&
+ new_id < kFirstDynamicPayloadTypeUpperRange)
+ return true;
+ return UsedIds<Codec>::IsIdUsed(new_id);
+ }
+
+ private:
+ static const int kFirstDynamicPayloadTypeLowerRange = 35;
+ static const int kLastDynamicPayloadTypeLowerRange = 63;
+
+ static const int kFirstDynamicPayloadTypeUpperRange = 96;
+ static const int kLastDynamicPayloadTypeUpperRange = 127;
+};
+
+// Helper class used for finding duplicate RTP Header extension ids among
+// audio and video extensions.
+class UsedRtpHeaderExtensionIds : public UsedIds<webrtc::RtpExtension> {
+ public:
+ enum class IdDomain {
+ // Only allocate IDs that fit in one-byte header extensions.
+ kOneByteOnly,
+ // Prefer to allocate one-byte header extension IDs, but overflow to
+ // two-byte if none are left.
+ kTwoByteAllowed,
+ };
+
+ explicit UsedRtpHeaderExtensionIds(IdDomain id_domain)
+ : UsedIds<webrtc::RtpExtension>(
+ webrtc::RtpExtension::kMinId,
+ id_domain == IdDomain::kTwoByteAllowed
+ ? webrtc::RtpExtension::kMaxId
+ : webrtc::RtpExtension::kOneByteHeaderExtensionMaxId),
+ id_domain_(id_domain),
+ next_extension_id_(webrtc::RtpExtension::kOneByteHeaderExtensionMaxId) {
+ }
+
+ private:
+ // Returns the first unused id in reverse order from the max id of one byte
+ // header extensions. This hopefully reduce the risk of more collisions. We
+ // want to change the default ids as little as possible. If no unused id is
+ // found and two byte header extensions are enabled (i.e.,
+ // `extmap_allow_mixed_` is true), search for unused ids from 15 to 255.
+ int FindUnusedId() override {
+ if (next_extension_id_ <=
+ webrtc::RtpExtension::kOneByteHeaderExtensionMaxId) {
+ // First search in reverse order from the max id of one byte header
+ // extensions.
+ while (IsIdUsed(next_extension_id_) &&
+ next_extension_id_ >= min_allowed_id_) {
+ --next_extension_id_;
+ }
+ }
+
+ if (id_domain_ == IdDomain::kTwoByteAllowed) {
+ if (next_extension_id_ < min_allowed_id_) {
+ // We have searched among all one-byte IDs without finding an unused ID,
+ // continue at the first two-byte ID.
+ next_extension_id_ =
+ webrtc::RtpExtension::kOneByteHeaderExtensionMaxId + 1;
+ }
+
+ if (next_extension_id_ >
+ webrtc::RtpExtension::kOneByteHeaderExtensionMaxId) {
+ while (IsIdUsed(next_extension_id_) &&
+ next_extension_id_ <= max_allowed_id_) {
+ ++next_extension_id_;
+ }
+ }
+ }
+ RTC_DCHECK(next_extension_id_ >= min_allowed_id_);
+ RTC_DCHECK(next_extension_id_ <= max_allowed_id_);
+ return next_extension_id_;
+ }
+
+ const IdDomain id_domain_;
+ int next_extension_id_;
+};
+
+} // namespace cricket
+
+#endif // PC_USED_IDS_H_
diff --git a/third_party/libwebrtc/pc/used_ids_unittest.cc b/third_party/libwebrtc/pc/used_ids_unittest.cc
new file mode 100644
index 0000000000..6362f2773a
--- /dev/null
+++ b/third_party/libwebrtc/pc/used_ids_unittest.cc
@@ -0,0 +1,178 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/used_ids.h"
+
+#include "absl/strings/string_view.h"
+#include "test/gtest.h"
+
+using cricket::UsedIds;
+using cricket::UsedRtpHeaderExtensionIds;
+
+struct Foo {
+ int id;
+};
+
+TEST(UsedIdsTest, UniqueIdsAreUnchanged) {
+ UsedIds<Foo> used_ids(1, 5);
+ for (int i = 1; i <= 5; ++i) {
+ Foo id = {i};
+ used_ids.FindAndSetIdUsed(&id);
+ EXPECT_EQ(id.id, i);
+ }
+}
+
+TEST(UsedIdsTest, IdsOutsideRangeAreUnchanged) {
+ UsedIds<Foo> used_ids(1, 5);
+
+ Foo id_11 = {11};
+ Foo id_12 = {12};
+ Foo id_12_collision = {12};
+ Foo id_13 = {13};
+ Foo id_13_collision = {13};
+
+ used_ids.FindAndSetIdUsed(&id_11);
+ EXPECT_EQ(id_11.id, 11);
+ used_ids.FindAndSetIdUsed(&id_12);
+ EXPECT_EQ(id_12.id, 12);
+ used_ids.FindAndSetIdUsed(&id_12_collision);
+ EXPECT_EQ(id_12_collision.id, 12);
+ used_ids.FindAndSetIdUsed(&id_13);
+ EXPECT_EQ(id_13.id, 13);
+ used_ids.FindAndSetIdUsed(&id_13_collision);
+ EXPECT_EQ(id_13_collision.id, 13);
+}
+
+TEST(UsedIdsTest, CollisionsAreReassignedIdsInReverseOrder) {
+ UsedIds<Foo> used_ids(1, 10);
+ Foo id_1 = {1};
+ Foo id_2 = {2};
+ Foo id_2_collision = {2};
+ Foo id_3 = {3};
+ Foo id_3_collision = {3};
+
+ used_ids.FindAndSetIdUsed(&id_1);
+ used_ids.FindAndSetIdUsed(&id_2);
+ used_ids.FindAndSetIdUsed(&id_2_collision);
+ EXPECT_EQ(id_2_collision.id, 10);
+ used_ids.FindAndSetIdUsed(&id_3);
+ used_ids.FindAndSetIdUsed(&id_3_collision);
+ EXPECT_EQ(id_3_collision.id, 9);
+}
+
+struct TestParams {
+ UsedRtpHeaderExtensionIds::IdDomain id_domain;
+ int max_id;
+};
+
+class UsedRtpHeaderExtensionIdsTest
+ : public ::testing::TestWithParam<TestParams> {};
+
+constexpr TestParams kOneByteTestParams = {
+ UsedRtpHeaderExtensionIds::IdDomain::kOneByteOnly, 14};
+constexpr TestParams kTwoByteTestParams = {
+ UsedRtpHeaderExtensionIds::IdDomain::kTwoByteAllowed, 255};
+
+INSTANTIATE_TEST_SUITE_P(All,
+ UsedRtpHeaderExtensionIdsTest,
+ ::testing::Values(kOneByteTestParams,
+ kTwoByteTestParams));
+
+TEST_P(UsedRtpHeaderExtensionIdsTest, UniqueIdsAreUnchanged) {
+ UsedRtpHeaderExtensionIds used_ids(GetParam().id_domain);
+
+ // Fill all IDs.
+ for (int j = 1; j <= GetParam().max_id; ++j) {
+ webrtc::RtpExtension extension("", j);
+ used_ids.FindAndSetIdUsed(&extension);
+ EXPECT_EQ(extension.id, j);
+ }
+}
+
+TEST_P(UsedRtpHeaderExtensionIdsTest, PrioritizeReassignmentToOneByteIds) {
+ UsedRtpHeaderExtensionIds used_ids(GetParam().id_domain);
+ webrtc::RtpExtension id_1("", 1);
+ webrtc::RtpExtension id_2("", 2);
+ webrtc::RtpExtension id_2_collision("", 2);
+ webrtc::RtpExtension id_3("", 3);
+ webrtc::RtpExtension id_3_collision("", 3);
+
+ // Expect that colliding IDs are reassigned to one-byte IDs.
+ used_ids.FindAndSetIdUsed(&id_1);
+ used_ids.FindAndSetIdUsed(&id_2);
+ used_ids.FindAndSetIdUsed(&id_2_collision);
+ EXPECT_EQ(id_2_collision.id, 14);
+ used_ids.FindAndSetIdUsed(&id_3);
+ used_ids.FindAndSetIdUsed(&id_3_collision);
+ EXPECT_EQ(id_3_collision.id, 13);
+}
+
+TEST_F(UsedRtpHeaderExtensionIdsTest, TwoByteIdsAllowed) {
+ UsedRtpHeaderExtensionIds used_ids(
+ UsedRtpHeaderExtensionIds::IdDomain::kTwoByteAllowed);
+
+ // Fill all one byte IDs.
+ for (int i = 1; i < 15; ++i) {
+ webrtc::RtpExtension id("", i);
+ used_ids.FindAndSetIdUsed(&id);
+ }
+
+ // Add new extensions with colliding IDs.
+ webrtc::RtpExtension id1_collision("", 1);
+ webrtc::RtpExtension id2_collision("", 2);
+ webrtc::RtpExtension id3_collision("", 3);
+
+ // Expect to reassign to two-byte header extension IDs.
+ used_ids.FindAndSetIdUsed(&id1_collision);
+ EXPECT_EQ(id1_collision.id, 15);
+ used_ids.FindAndSetIdUsed(&id2_collision);
+ EXPECT_EQ(id2_collision.id, 16);
+ used_ids.FindAndSetIdUsed(&id3_collision);
+ EXPECT_EQ(id3_collision.id, 17);
+}
+
+// Death tests.
+// Disabled on Android because death tests misbehave on Android, see
+// base/test/gtest_util.h.
+#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
+TEST(UsedIdsDeathTest, DieWhenAllIdsAreOccupied) {
+ UsedIds<Foo> used_ids(1, 5);
+ for (int i = 1; i <= 5; ++i) {
+ Foo id = {i};
+ used_ids.FindAndSetIdUsed(&id);
+ }
+ Foo id_collision = {3};
+ EXPECT_DEATH(used_ids.FindAndSetIdUsed(&id_collision), "");
+}
+
+using UsedRtpHeaderExtensionIdsDeathTest = UsedRtpHeaderExtensionIdsTest;
+INSTANTIATE_TEST_SUITE_P(All,
+ UsedRtpHeaderExtensionIdsDeathTest,
+ ::testing::Values(kOneByteTestParams,
+ kTwoByteTestParams));
+
+TEST_P(UsedRtpHeaderExtensionIdsDeathTest, DieWhenAllIdsAreOccupied) {
+ UsedRtpHeaderExtensionIds used_ids(GetParam().id_domain);
+
+ // Fill all IDs.
+ for (int j = 1; j <= GetParam().max_id; ++j) {
+ webrtc::RtpExtension id("", j);
+ used_ids.FindAndSetIdUsed(&id);
+ }
+
+ webrtc::RtpExtension id1_collision("", 1);
+ webrtc::RtpExtension id2_collision("", 2);
+ webrtc::RtpExtension id3_collision("", GetParam().max_id);
+
+ EXPECT_DEATH(used_ids.FindAndSetIdUsed(&id1_collision), "");
+ EXPECT_DEATH(used_ids.FindAndSetIdUsed(&id2_collision), "");
+ EXPECT_DEATH(used_ids.FindAndSetIdUsed(&id3_collision), "");
+}
+#endif // RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
diff --git a/third_party/libwebrtc/pc/video_rtp_receiver.cc b/third_party/libwebrtc/pc/video_rtp_receiver.cc
new file mode 100644
index 0000000000..4432982027
--- /dev/null
+++ b/third_party/libwebrtc/pc/video_rtp_receiver.cc
@@ -0,0 +1,383 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/video_rtp_receiver.h"
+
+#include <stddef.h>
+
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "api/video/recordable_encoded_frame.h"
+#include "pc/video_track.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+
+namespace webrtc {
+
+VideoRtpReceiver::VideoRtpReceiver(rtc::Thread* worker_thread,
+ std::string receiver_id,
+ std::vector<std::string> stream_ids)
+ : VideoRtpReceiver(worker_thread,
+ receiver_id,
+ CreateStreamsFromIds(std::move(stream_ids))) {}
+
+VideoRtpReceiver::VideoRtpReceiver(
+ rtc::Thread* worker_thread,
+ const std::string& receiver_id,
+ const std::vector<rtc::scoped_refptr<MediaStreamInterface>>& streams)
+ : worker_thread_(worker_thread),
+ id_(receiver_id),
+ source_(rtc::make_ref_counted<VideoRtpTrackSource>(&source_callback_)),
+ track_(VideoTrackProxyWithInternal<VideoTrack>::Create(
+ rtc::Thread::Current(),
+ worker_thread,
+ VideoTrack::Create(receiver_id, source_, worker_thread))),
+ attachment_id_(GenerateUniqueId()) {
+ RTC_DCHECK(worker_thread_);
+ SetStreams(streams);
+ RTC_DCHECK_EQ(source_->state(), MediaSourceInterface::kInitializing);
+}
+
+VideoRtpReceiver::~VideoRtpReceiver() {
+ RTC_DCHECK_RUN_ON(&signaling_thread_checker_);
+ RTC_DCHECK(!media_channel_);
+}
+
+std::vector<std::string> VideoRtpReceiver::stream_ids() const {
+ RTC_DCHECK_RUN_ON(&signaling_thread_checker_);
+ std::vector<std::string> stream_ids(streams_.size());
+ for (size_t i = 0; i < streams_.size(); ++i)
+ stream_ids[i] = streams_[i]->id();
+ return stream_ids;
+}
+
+rtc::scoped_refptr<DtlsTransportInterface> VideoRtpReceiver::dtls_transport()
+ const {
+ RTC_DCHECK_RUN_ON(&signaling_thread_checker_);
+ return dtls_transport_;
+}
+
+std::vector<rtc::scoped_refptr<MediaStreamInterface>>
+VideoRtpReceiver::streams() const {
+ RTC_DCHECK_RUN_ON(&signaling_thread_checker_);
+ return streams_;
+}
+
+RtpParameters VideoRtpReceiver::GetParameters() const {
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ if (!media_channel_)
+ return RtpParameters();
+ auto current_ssrc = ssrc();
+ return current_ssrc.has_value()
+ ? media_channel_->GetRtpReceiverParameters(current_ssrc.value())
+ : media_channel_->GetDefaultRtpReceiveParameters();
+}
+
+void VideoRtpReceiver::SetFrameDecryptor(
+ rtc::scoped_refptr<FrameDecryptorInterface> frame_decryptor) {
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ frame_decryptor_ = std::move(frame_decryptor);
+ // Special Case: Set the frame decryptor to any value on any existing channel.
+ if (media_channel_ && signaled_ssrc_) {
+ media_channel_->SetFrameDecryptor(*signaled_ssrc_, frame_decryptor_);
+ }
+}
+
+rtc::scoped_refptr<FrameDecryptorInterface>
+VideoRtpReceiver::GetFrameDecryptor() const {
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ return frame_decryptor_;
+}
+
+void VideoRtpReceiver::SetDepacketizerToDecoderFrameTransformer(
+ rtc::scoped_refptr<FrameTransformerInterface> frame_transformer) {
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ frame_transformer_ = std::move(frame_transformer);
+ if (media_channel_) {
+ media_channel_->SetDepacketizerToDecoderFrameTransformer(
+ signaled_ssrc_.value_or(0), frame_transformer_);
+ }
+}
+
+void VideoRtpReceiver::Stop() {
+ RTC_DCHECK_RUN_ON(&signaling_thread_checker_);
+ source_->SetState(MediaSourceInterface::kEnded);
+ track_->internal()->set_ended();
+}
+
+void VideoRtpReceiver::RestartMediaChannel(absl::optional<uint32_t> ssrc) {
+ RTC_DCHECK_RUN_ON(&signaling_thread_checker_);
+ MediaSourceInterface::SourceState state = source_->state();
+ // TODO(tommi): Can we restart the media channel without blocking?
+ worker_thread_->BlockingCall([&] {
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ RestartMediaChannel_w(std::move(ssrc), state);
+ });
+ source_->SetState(MediaSourceInterface::kLive);
+}
+
+void VideoRtpReceiver::RestartMediaChannel_w(
+ absl::optional<uint32_t> ssrc,
+ MediaSourceInterface::SourceState state) {
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ if (!media_channel_) {
+ return; // Can't restart.
+ }
+
+ const bool encoded_sink_enabled = saved_encoded_sink_enabled_;
+
+ if (state != MediaSourceInterface::kInitializing) {
+ if (ssrc == signaled_ssrc_)
+ return;
+
+ // Disconnect from a previous ssrc.
+ SetSink(nullptr);
+
+ if (encoded_sink_enabled)
+ SetEncodedSinkEnabled(false);
+ }
+
+ // Set up the new ssrc.
+ signaled_ssrc_ = std::move(ssrc);
+ SetSink(source_->sink());
+ if (encoded_sink_enabled) {
+ SetEncodedSinkEnabled(true);
+ }
+
+ if (frame_transformer_ && media_channel_) {
+ media_channel_->SetDepacketizerToDecoderFrameTransformer(
+ signaled_ssrc_.value_or(0), frame_transformer_);
+ }
+
+ if (media_channel_ && signaled_ssrc_) {
+ if (frame_decryptor_) {
+ media_channel_->SetFrameDecryptor(*signaled_ssrc_, frame_decryptor_);
+ }
+
+ media_channel_->SetBaseMinimumPlayoutDelayMs(*signaled_ssrc_,
+ delay_.GetMs());
+ }
+}
+
+void VideoRtpReceiver::SetSink(rtc::VideoSinkInterface<VideoFrame>* sink) {
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ if (signaled_ssrc_) {
+ media_channel_->SetSink(*signaled_ssrc_, sink);
+ } else {
+ media_channel_->SetDefaultSink(sink);
+ }
+}
+
+void VideoRtpReceiver::SetupMediaChannel(uint32_t ssrc) {
+ RTC_DCHECK_RUN_ON(&signaling_thread_checker_);
+ RestartMediaChannel(ssrc);
+}
+
+void VideoRtpReceiver::SetupUnsignaledMediaChannel() {
+ RTC_DCHECK_RUN_ON(&signaling_thread_checker_);
+ RestartMediaChannel(absl::nullopt);
+}
+
+absl::optional<uint32_t> VideoRtpReceiver::ssrc() const {
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ if (!signaled_ssrc_.has_value() && media_channel_) {
+ return media_channel_->GetUnsignaledSsrc();
+ }
+ return signaled_ssrc_;
+}
+
+void VideoRtpReceiver::set_stream_ids(std::vector<std::string> stream_ids) {
+ RTC_DCHECK_RUN_ON(&signaling_thread_checker_);
+ SetStreams(CreateStreamsFromIds(std::move(stream_ids)));
+}
+
+void VideoRtpReceiver::set_transport(
+ rtc::scoped_refptr<DtlsTransportInterface> dtls_transport) {
+ RTC_DCHECK_RUN_ON(&signaling_thread_checker_);
+ dtls_transport_ = std::move(dtls_transport);
+}
+
+void VideoRtpReceiver::SetStreams(
+ const std::vector<rtc::scoped_refptr<MediaStreamInterface>>& streams) {
+ RTC_DCHECK_RUN_ON(&signaling_thread_checker_);
+ // Remove remote track from any streams that are going away.
+ for (const auto& existing_stream : streams_) {
+ bool removed = true;
+ for (const auto& stream : streams) {
+ if (existing_stream->id() == stream->id()) {
+ RTC_DCHECK_EQ(existing_stream.get(), stream.get());
+ removed = false;
+ break;
+ }
+ }
+ if (removed) {
+ existing_stream->RemoveTrack(video_track());
+ }
+ }
+ // Add remote track to any streams that are new.
+ for (const auto& stream : streams) {
+ bool added = true;
+ for (const auto& existing_stream : streams_) {
+ if (stream->id() == existing_stream->id()) {
+ RTC_DCHECK_EQ(stream.get(), existing_stream.get());
+ added = false;
+ break;
+ }
+ }
+ if (added) {
+ stream->AddTrack(video_track());
+ }
+ }
+ streams_ = streams;
+}
+
+void VideoRtpReceiver::SetObserver(RtpReceiverObserverInterface* observer) {
+ RTC_DCHECK_RUN_ON(&signaling_thread_checker_);
+ observer_ = observer;
+ // Deliver any notifications the observer may have missed by being set late.
+ if (received_first_packet_ && observer_) {
+ observer_->OnFirstPacketReceived(media_type());
+ }
+}
+
+void VideoRtpReceiver::SetJitterBufferMinimumDelay(
+ absl::optional<double> delay_seconds) {
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ delay_.Set(delay_seconds);
+ if (media_channel_ && signaled_ssrc_)
+ media_channel_->SetBaseMinimumPlayoutDelayMs(*signaled_ssrc_,
+ delay_.GetMs());
+}
+
+void VideoRtpReceiver::SetMediaChannel(
+ cricket::MediaReceiveChannelInterface* media_channel) {
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ RTC_DCHECK(media_channel == nullptr ||
+ media_channel->media_type() == media_type());
+
+ SetMediaChannel_w(media_channel);
+}
+
+void VideoRtpReceiver::SetMediaChannel_w(
+ cricket::MediaReceiveChannelInterface* media_channel) {
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ if (media_channel == media_channel_)
+ return;
+
+ if (!media_channel) {
+ SetSink(nullptr);
+ }
+
+ bool encoded_sink_enabled = saved_encoded_sink_enabled_;
+ if (encoded_sink_enabled && media_channel_) {
+ // Turn off the old sink, if any.
+ SetEncodedSinkEnabled(false);
+ }
+
+ if (media_channel) {
+ media_channel_ = media_channel->AsVideoReceiveChannel();
+ } else {
+ media_channel_ = nullptr;
+ }
+
+ if (media_channel_) {
+ if (saved_generate_keyframe_) {
+ // TODO(bugs.webrtc.org/8694): Stop using 0 to mean unsignalled SSRC
+ media_channel_->RequestRecvKeyFrame(signaled_ssrc_.value_or(0));
+ saved_generate_keyframe_ = false;
+ }
+ if (encoded_sink_enabled) {
+ SetEncodedSinkEnabled(true);
+ }
+ if (frame_transformer_) {
+ media_channel_->SetDepacketizerToDecoderFrameTransformer(
+ signaled_ssrc_.value_or(0), frame_transformer_);
+ }
+ }
+
+ if (!media_channel)
+ source_->ClearCallback();
+}
+
+void VideoRtpReceiver::NotifyFirstPacketReceived() {
+ RTC_DCHECK_RUN_ON(&signaling_thread_checker_);
+ if (observer_) {
+ observer_->OnFirstPacketReceived(media_type());
+ }
+ received_first_packet_ = true;
+}
+
+std::vector<RtpSource> VideoRtpReceiver::GetSources() const {
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ auto current_ssrc = ssrc();
+ if (!media_channel_ || !current_ssrc.has_value()) {
+ return {};
+ }
+ return media_channel_->GetSources(current_ssrc.value());
+}
+
+void VideoRtpReceiver::SetupMediaChannel(
+ absl::optional<uint32_t> ssrc,
+ cricket::MediaReceiveChannelInterface* media_channel) {
+ RTC_DCHECK_RUN_ON(&signaling_thread_checker_);
+ RTC_DCHECK(media_channel);
+ MediaSourceInterface::SourceState state = source_->state();
+ worker_thread_->BlockingCall([&] {
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ SetMediaChannel_w(media_channel);
+ RestartMediaChannel_w(std::move(ssrc), state);
+ });
+ source_->SetState(MediaSourceInterface::kLive);
+}
+
+void VideoRtpReceiver::OnGenerateKeyFrame() {
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ if (!media_channel_) {
+ RTC_LOG(LS_ERROR)
+ << "VideoRtpReceiver::OnGenerateKeyFrame: No video channel exists.";
+ return;
+ }
+ // TODO(bugs.webrtc.org/8694): Stop using 0 to mean unsignalled SSRC
+ media_channel_->RequestRecvKeyFrame(signaled_ssrc_.value_or(0));
+ // We need to remember to request generation of a new key frame if the media
+ // channel changes, because there's no feedback whether the keyframe
+ // generation has completed on the channel.
+ saved_generate_keyframe_ = true;
+}
+
+void VideoRtpReceiver::OnEncodedSinkEnabled(bool enable) {
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ SetEncodedSinkEnabled(enable);
+ // Always save the latest state of the callback in case the media_channel_
+ // changes.
+ saved_encoded_sink_enabled_ = enable;
+}
+
+void VideoRtpReceiver::SetEncodedSinkEnabled(bool enable) {
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ if (!media_channel_)
+ return;
+
+ // TODO(bugs.webrtc.org/8694): Stop using 0 to mean unsignalled SSRC
+ const auto ssrc = signaled_ssrc_.value_or(0);
+
+ if (enable) {
+ media_channel_->SetRecordableEncodedFrameCallback(
+ ssrc, [source = source_](const RecordableEncodedFrame& frame) {
+ source->BroadcastRecordableEncodedFrame(frame);
+ });
+ } else {
+ media_channel_->ClearRecordableEncodedFrameCallback(ssrc);
+ }
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/video_rtp_receiver.h b/third_party/libwebrtc/pc/video_rtp_receiver.h
new file mode 100644
index 0000000000..ef88016052
--- /dev/null
+++ b/third_party/libwebrtc/pc/video_rtp_receiver.h
@@ -0,0 +1,184 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_VIDEO_RTP_RECEIVER_H_
+#define PC_VIDEO_RTP_RECEIVER_H_
+
+#include <stdint.h>
+
+#include <string>
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "api/crypto/frame_decryptor_interface.h"
+#include "api/dtls_transport_interface.h"
+#include "api/frame_transformer_interface.h"
+#include "api/media_stream_interface.h"
+#include "api/media_types.h"
+#include "api/rtp_parameters.h"
+#include "api/rtp_receiver_interface.h"
+#include "api/scoped_refptr.h"
+#include "api/sequence_checker.h"
+#include "api/transport/rtp/rtp_source.h"
+#include "api/video/video_frame.h"
+#include "api/video/video_sink_interface.h"
+#include "api/video/video_source_interface.h"
+#include "media/base/media_channel.h"
+#include "pc/jitter_buffer_delay.h"
+#include "pc/media_stream_track_proxy.h"
+#include "pc/rtp_receiver.h"
+#include "pc/video_rtp_track_source.h"
+#include "pc/video_track.h"
+#include "rtc_base/system/no_unique_address.h"
+#include "rtc_base/thread.h"
+#include "rtc_base/thread_annotations.h"
+
+namespace webrtc {
+
+class VideoRtpReceiver : public RtpReceiverInternal {
+ public:
+ // An SSRC of 0 will create a receiver that will match the first SSRC it
+ // sees. Must be called on signaling thread.
+ VideoRtpReceiver(rtc::Thread* worker_thread,
+ std::string receiver_id,
+ std::vector<std::string> streams_ids);
+ // TODO(hbos): Remove this when streams() is removed.
+ // https://crbug.com/webrtc/9480
+ VideoRtpReceiver(
+ rtc::Thread* worker_thread,
+ const std::string& receiver_id,
+ const std::vector<rtc::scoped_refptr<MediaStreamInterface>>& streams);
+
+ virtual ~VideoRtpReceiver();
+
+ rtc::scoped_refptr<VideoTrackInterface> video_track() const { return track_; }
+
+ // RtpReceiverInterface implementation
+ rtc::scoped_refptr<MediaStreamTrackInterface> track() const override {
+ return track_;
+ }
+ rtc::scoped_refptr<DtlsTransportInterface> dtls_transport() const override;
+ std::vector<std::string> stream_ids() const override;
+ std::vector<rtc::scoped_refptr<MediaStreamInterface>> streams()
+ const override;
+ cricket::MediaType media_type() const override {
+ return cricket::MEDIA_TYPE_VIDEO;
+ }
+
+ std::string id() const override { return id_; }
+
+ RtpParameters GetParameters() const override;
+
+ void SetFrameDecryptor(
+ rtc::scoped_refptr<FrameDecryptorInterface> frame_decryptor) override;
+
+ rtc::scoped_refptr<FrameDecryptorInterface> GetFrameDecryptor()
+ const override;
+
+ void SetDepacketizerToDecoderFrameTransformer(
+ rtc::scoped_refptr<FrameTransformerInterface> frame_transformer) override;
+
+ // RtpReceiverInternal implementation.
+ void Stop() override;
+ void SetupMediaChannel(uint32_t ssrc) override;
+ void SetupUnsignaledMediaChannel() override;
+ absl::optional<uint32_t> ssrc() const override;
+ void NotifyFirstPacketReceived() override;
+ void set_stream_ids(std::vector<std::string> stream_ids) override;
+ void set_transport(
+ rtc::scoped_refptr<DtlsTransportInterface> dtls_transport) override;
+ void SetStreams(const std::vector<rtc::scoped_refptr<MediaStreamInterface>>&
+ streams) override;
+
+ void SetObserver(RtpReceiverObserverInterface* observer) override;
+
+ void SetJitterBufferMinimumDelay(
+ absl::optional<double> delay_seconds) override;
+
+ void SetMediaChannel(
+ cricket::MediaReceiveChannelInterface* media_channel) override;
+
+ int AttachmentId() const override { return attachment_id_; }
+
+ std::vector<RtpSource> GetSources() const override;
+
+ // Combines SetMediaChannel, SetupMediaChannel and
+ // SetupUnsignaledMediaChannel.
+ void SetupMediaChannel(absl::optional<uint32_t> ssrc,
+ cricket::MediaReceiveChannelInterface* media_channel);
+
+ private:
+ void RestartMediaChannel(absl::optional<uint32_t> ssrc)
+ RTC_RUN_ON(&signaling_thread_checker_);
+ void RestartMediaChannel_w(absl::optional<uint32_t> ssrc,
+ MediaSourceInterface::SourceState state)
+ RTC_RUN_ON(worker_thread_);
+ void SetSink(rtc::VideoSinkInterface<VideoFrame>* sink)
+ RTC_RUN_ON(worker_thread_);
+ void SetMediaChannel_w(cricket::MediaReceiveChannelInterface* media_channel)
+ RTC_RUN_ON(worker_thread_);
+
+ // VideoRtpTrackSource::Callback
+ void OnGenerateKeyFrame();
+ void OnEncodedSinkEnabled(bool enable);
+
+ void SetEncodedSinkEnabled(bool enable) RTC_RUN_ON(worker_thread_);
+
+ class SourceCallback : public VideoRtpTrackSource::Callback {
+ public:
+ explicit SourceCallback(VideoRtpReceiver* receiver) : receiver_(receiver) {}
+ ~SourceCallback() override = default;
+
+ private:
+ void OnGenerateKeyFrame() override { receiver_->OnGenerateKeyFrame(); }
+ void OnEncodedSinkEnabled(bool enable) override {
+ receiver_->OnEncodedSinkEnabled(enable);
+ }
+
+ VideoRtpReceiver* const receiver_;
+ } source_callback_{this};
+
+ RTC_NO_UNIQUE_ADDRESS SequenceChecker signaling_thread_checker_;
+ rtc::Thread* const worker_thread_;
+
+ const std::string id_;
+ cricket::VideoMediaReceiveChannelInterface* media_channel_
+ RTC_GUARDED_BY(worker_thread_) = nullptr;
+ absl::optional<uint32_t> signaled_ssrc_ RTC_GUARDED_BY(worker_thread_);
+ // `source_` is held here to be able to change the state of the source when
+ // the VideoRtpReceiver is stopped.
+ const rtc::scoped_refptr<VideoRtpTrackSource> source_;
+ const rtc::scoped_refptr<VideoTrackProxyWithInternal<VideoTrack>> track_;
+ std::vector<rtc::scoped_refptr<MediaStreamInterface>> streams_
+ RTC_GUARDED_BY(&signaling_thread_checker_);
+ RtpReceiverObserverInterface* observer_
+ RTC_GUARDED_BY(&signaling_thread_checker_) = nullptr;
+ bool received_first_packet_ RTC_GUARDED_BY(&signaling_thread_checker_) =
+ false;
+ const int attachment_id_;
+ rtc::scoped_refptr<FrameDecryptorInterface> frame_decryptor_
+ RTC_GUARDED_BY(worker_thread_);
+ rtc::scoped_refptr<DtlsTransportInterface> dtls_transport_
+ RTC_GUARDED_BY(&signaling_thread_checker_);
+ rtc::scoped_refptr<FrameTransformerInterface> frame_transformer_
+ RTC_GUARDED_BY(worker_thread_);
+ // Stores the minimum jitter buffer delay. Handles caching cases
+ // if `SetJitterBufferMinimumDelay` is called before start.
+ JitterBufferDelay delay_ RTC_GUARDED_BY(worker_thread_);
+
+ // Records if we should generate a keyframe when `media_channel_` gets set up
+ // or switched.
+ bool saved_generate_keyframe_ RTC_GUARDED_BY(worker_thread_) = false;
+ bool saved_encoded_sink_enabled_ RTC_GUARDED_BY(worker_thread_) = false;
+};
+
+} // namespace webrtc
+
+#endif // PC_VIDEO_RTP_RECEIVER_H_
diff --git a/third_party/libwebrtc/pc/video_rtp_receiver_unittest.cc b/third_party/libwebrtc/pc/video_rtp_receiver_unittest.cc
new file mode 100644
index 0000000000..5ff736084f
--- /dev/null
+++ b/third_party/libwebrtc/pc/video_rtp_receiver_unittest.cc
@@ -0,0 +1,209 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/video_rtp_receiver.h"
+
+#include <functional>
+#include <memory>
+
+#include "api/task_queue/task_queue_base.h"
+#include "api/video/recordable_encoded_frame.h"
+#include "api/video/test/mock_recordable_encoded_frame.h"
+#include "media/base/fake_media_engine.h"
+#include "media/base/media_channel.h"
+#include "rtc_base/task_queue_for_test.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+
+using ::testing::_;
+using ::testing::AnyNumber;
+using ::testing::InSequence;
+using ::testing::Mock;
+using ::testing::NiceMock;
+using ::testing::SaveArg;
+using ::testing::StrictMock;
+
+namespace webrtc {
+namespace {
+
+class VideoRtpReceiverTest : public testing::Test {
+ protected:
+ class MockVideoMediaSendChannel : public cricket::FakeVideoMediaSendChannel {
+ public:
+ MockVideoMediaSendChannel(
+ const cricket::VideoOptions& options,
+ TaskQueueBase* network_thread = rtc::Thread::Current())
+ : FakeVideoMediaSendChannel(options, network_thread) {}
+ MOCK_METHOD(void,
+ GenerateSendKeyFrame,
+ (uint32_t, const std::vector<std::string>&),
+ (override));
+ };
+
+ class MockVideoMediaReceiveChannel
+ : public cricket::FakeVideoMediaReceiveChannel {
+ public:
+ MockVideoMediaReceiveChannel(
+ const cricket::VideoOptions& options,
+ TaskQueueBase* network_thread = rtc::Thread::Current())
+ : FakeVideoMediaReceiveChannel(options, network_thread) {}
+ MOCK_METHOD(void,
+ SetRecordableEncodedFrameCallback,
+ (uint32_t, std::function<void(const RecordableEncodedFrame&)>),
+ (override));
+ MOCK_METHOD(void,
+ ClearRecordableEncodedFrameCallback,
+ (uint32_t),
+ (override));
+ MOCK_METHOD(void, RequestRecvKeyFrame, (uint32_t), (override));
+ };
+
+ class MockVideoSink : public rtc::VideoSinkInterface<RecordableEncodedFrame> {
+ public:
+ MOCK_METHOD(void, OnFrame, (const RecordableEncodedFrame&), (override));
+ };
+
+ VideoRtpReceiverTest()
+ : worker_thread_(rtc::Thread::Create()),
+ channel_(cricket::VideoOptions()),
+ receiver_(rtc::make_ref_counted<VideoRtpReceiver>(
+ worker_thread_.get(),
+ std::string("receiver"),
+ std::vector<std::string>({"stream"}))) {
+ worker_thread_->Start();
+ SetMediaChannel(&channel_);
+ }
+
+ ~VideoRtpReceiverTest() override {
+ // Clear expectations that tests may have set up before calling
+ // SetMediaChannel(nullptr).
+ Mock::VerifyAndClearExpectations(&channel_);
+ receiver_->Stop();
+ SetMediaChannel(nullptr);
+ }
+
+ void SetMediaChannel(cricket::MediaReceiveChannelInterface* media_channel) {
+ SendTask(worker_thread_.get(),
+ [&]() { receiver_->SetMediaChannel(media_channel); });
+ }
+
+ webrtc::VideoTrackSourceInterface* Source() {
+ return receiver_->streams()[0]->FindVideoTrack("receiver")->GetSource();
+ }
+
+ rtc::AutoThread main_thread_;
+ std::unique_ptr<rtc::Thread> worker_thread_;
+ NiceMock<MockVideoMediaReceiveChannel> channel_;
+ rtc::scoped_refptr<VideoRtpReceiver> receiver_;
+};
+
+TEST_F(VideoRtpReceiverTest, SupportsEncodedOutput) {
+ EXPECT_TRUE(Source()->SupportsEncodedOutput());
+}
+
+TEST_F(VideoRtpReceiverTest, GeneratesKeyFrame) {
+ EXPECT_CALL(channel_, RequestRecvKeyFrame(0));
+ Source()->GenerateKeyFrame();
+}
+
+TEST_F(VideoRtpReceiverTest,
+ GenerateKeyFrameOnChannelSwitchUnlessGenerateKeyframeCalled) {
+ // A channel switch without previous call to GenerateKeyFrame shouldn't
+ // cause a call to happen on the new channel.
+ MockVideoMediaReceiveChannel channel2{cricket::VideoOptions()};
+ EXPECT_CALL(channel_, RequestRecvKeyFrame).Times(0);
+ EXPECT_CALL(channel2, RequestRecvKeyFrame).Times(0);
+ SetMediaChannel(&channel2);
+ Mock::VerifyAndClearExpectations(&channel2);
+
+ // Generate a key frame. When we switch channel next time, we will have to
+ // re-generate it as we don't know if it was eventually received
+ EXPECT_CALL(channel2, RequestRecvKeyFrame).Times(1);
+ Source()->GenerateKeyFrame();
+ MockVideoMediaReceiveChannel channel3{cricket::VideoOptions()};
+ EXPECT_CALL(channel3, RequestRecvKeyFrame);
+ SetMediaChannel(&channel3);
+
+ // Switching to a new channel should now not cause calls to GenerateKeyFrame.
+ StrictMock<MockVideoMediaReceiveChannel> channel4{cricket::VideoOptions()};
+ SetMediaChannel(&channel4);
+
+ // We must call SetMediaChannel(nullptr) here since the mock media channels
+ // live on the stack and `receiver_` still has a pointer to those objects.
+ SetMediaChannel(nullptr);
+}
+
+TEST_F(VideoRtpReceiverTest, EnablesEncodedOutput) {
+ EXPECT_CALL(channel_, SetRecordableEncodedFrameCallback(/*ssrc=*/0, _));
+ EXPECT_CALL(channel_, ClearRecordableEncodedFrameCallback).Times(0);
+ MockVideoSink sink;
+ Source()->AddEncodedSink(&sink);
+}
+
+TEST_F(VideoRtpReceiverTest, DisablesEncodedOutput) {
+ EXPECT_CALL(channel_, ClearRecordableEncodedFrameCallback(/*ssrc=*/0));
+ MockVideoSink sink;
+ Source()->AddEncodedSink(&sink);
+ Source()->RemoveEncodedSink(&sink);
+}
+
+TEST_F(VideoRtpReceiverTest, DisablesEnablesEncodedOutputOnChannelSwitch) {
+ InSequence s;
+ EXPECT_CALL(channel_, SetRecordableEncodedFrameCallback);
+ EXPECT_CALL(channel_, ClearRecordableEncodedFrameCallback);
+ MockVideoSink sink;
+ Source()->AddEncodedSink(&sink);
+ MockVideoMediaReceiveChannel channel2{cricket::VideoOptions()};
+ EXPECT_CALL(channel2, SetRecordableEncodedFrameCallback);
+ SetMediaChannel(&channel2);
+ Mock::VerifyAndClearExpectations(&channel2);
+
+ // When clearing encoded frame buffer function, we need channel switches
+ // to NOT set the callback again.
+ EXPECT_CALL(channel2, ClearRecordableEncodedFrameCallback);
+ Source()->RemoveEncodedSink(&sink);
+ StrictMock<MockVideoMediaReceiveChannel> channel3{cricket::VideoOptions()};
+ SetMediaChannel(&channel3);
+
+ // We must call SetMediaChannel(nullptr) here since the mock media channels
+ // live on the stack and `receiver_` still has a pointer to those objects.
+ SetMediaChannel(nullptr);
+}
+
+TEST_F(VideoRtpReceiverTest, BroadcastsEncodedFramesWhenEnabled) {
+ std::function<void(const RecordableEncodedFrame&)> broadcast;
+ EXPECT_CALL(channel_, SetRecordableEncodedFrameCallback(_, _))
+ .WillRepeatedly(SaveArg<1>(&broadcast));
+ MockVideoSink sink;
+ Source()->AddEncodedSink(&sink);
+
+ // Make sure SetEncodedFrameBufferFunction completes.
+ Mock::VerifyAndClearExpectations(&channel_);
+
+ // Pass two frames on different contexts.
+ EXPECT_CALL(sink, OnFrame).Times(2);
+ MockRecordableEncodedFrame frame;
+ broadcast(frame);
+ SendTask(worker_thread_.get(), [&] { broadcast(frame); });
+}
+
+TEST_F(VideoRtpReceiverTest, EnablesEncodedOutputOnChannelRestart) {
+ InSequence s;
+ MockVideoSink sink;
+ Source()->AddEncodedSink(&sink);
+ EXPECT_CALL(channel_, SetRecordableEncodedFrameCallback(4711, _));
+ receiver_->SetupMediaChannel(4711);
+ EXPECT_CALL(channel_, ClearRecordableEncodedFrameCallback(4711));
+ EXPECT_CALL(channel_, SetRecordableEncodedFrameCallback(0, _));
+ receiver_->SetupUnsignaledMediaChannel();
+}
+
+} // namespace
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/video_rtp_track_source.cc b/third_party/libwebrtc/pc/video_rtp_track_source.cc
new file mode 100644
index 0000000000..e4b333c7c2
--- /dev/null
+++ b/third_party/libwebrtc/pc/video_rtp_track_source.cc
@@ -0,0 +1,89 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/video_rtp_track_source.h"
+
+#include <stddef.h>
+
+#include <algorithm>
+
+#include "rtc_base/checks.h"
+
+namespace webrtc {
+
+VideoRtpTrackSource::VideoRtpTrackSource(Callback* callback)
+ : VideoTrackSource(true /* remote */), callback_(callback) {}
+
+void VideoRtpTrackSource::ClearCallback() {
+ RTC_DCHECK_RUN_ON(&worker_sequence_checker_);
+ callback_ = nullptr;
+}
+
+rtc::VideoSourceInterface<VideoFrame>* VideoRtpTrackSource::source() {
+ return &broadcaster_;
+}
+rtc::VideoSinkInterface<VideoFrame>* VideoRtpTrackSource::sink() {
+ return &broadcaster_;
+}
+
+void VideoRtpTrackSource::BroadcastRecordableEncodedFrame(
+ const RecordableEncodedFrame& frame) const {
+ MutexLock lock(&mu_);
+ for (rtc::VideoSinkInterface<RecordableEncodedFrame>* sink : encoded_sinks_) {
+ sink->OnFrame(frame);
+ }
+}
+
+bool VideoRtpTrackSource::SupportsEncodedOutput() const {
+ return true;
+}
+
+void VideoRtpTrackSource::GenerateKeyFrame() {
+ RTC_DCHECK_RUN_ON(&worker_sequence_checker_);
+ if (callback_) {
+ callback_->OnGenerateKeyFrame();
+ }
+}
+
+void VideoRtpTrackSource::AddEncodedSink(
+ rtc::VideoSinkInterface<RecordableEncodedFrame>* sink) {
+ RTC_DCHECK_RUN_ON(&worker_sequence_checker_);
+ RTC_DCHECK(sink);
+ size_t size = 0;
+ {
+ MutexLock lock(&mu_);
+ RTC_DCHECK(std::find(encoded_sinks_.begin(), encoded_sinks_.end(), sink) ==
+ encoded_sinks_.end());
+ encoded_sinks_.push_back(sink);
+ size = encoded_sinks_.size();
+ }
+ if (size == 1 && callback_) {
+ callback_->OnEncodedSinkEnabled(true);
+ }
+}
+
+void VideoRtpTrackSource::RemoveEncodedSink(
+ rtc::VideoSinkInterface<RecordableEncodedFrame>* sink) {
+ RTC_DCHECK_RUN_ON(&worker_sequence_checker_);
+ size_t size = 0;
+ {
+ MutexLock lock(&mu_);
+ auto it = std::find(encoded_sinks_.begin(), encoded_sinks_.end(), sink);
+ if (it != encoded_sinks_.end()) {
+ encoded_sinks_.erase(it);
+ }
+ size = encoded_sinks_.size();
+ }
+ if (size == 0 && callback_) {
+ callback_->OnEncodedSinkEnabled(false);
+ }
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/video_rtp_track_source.h b/third_party/libwebrtc/pc/video_rtp_track_source.h
new file mode 100644
index 0000000000..bf7da99f98
--- /dev/null
+++ b/third_party/libwebrtc/pc/video_rtp_track_source.h
@@ -0,0 +1,93 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_VIDEO_RTP_TRACK_SOURCE_H_
+#define PC_VIDEO_RTP_TRACK_SOURCE_H_
+
+#include <vector>
+
+#include "api/sequence_checker.h"
+#include "api/video/recordable_encoded_frame.h"
+#include "api/video/video_frame.h"
+#include "api/video/video_sink_interface.h"
+#include "api/video/video_source_interface.h"
+#include "media/base/video_broadcaster.h"
+#include "pc/video_track_source.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "rtc_base/system/no_unique_address.h"
+#include "rtc_base/thread_annotations.h"
+
+namespace webrtc {
+
+// Video track source in use by VideoRtpReceiver
+class VideoRtpTrackSource : public VideoTrackSource {
+ public:
+ class Callback {
+ public:
+ virtual ~Callback() = default;
+
+ // Called when a keyframe should be generated
+ virtual void OnGenerateKeyFrame() = 0;
+
+ // Called when the implementor should eventually start to serve encoded
+ // frames using BroadcastEncodedFrameBuffer.
+ // The implementor should cause a keyframe to be eventually generated.
+ virtual void OnEncodedSinkEnabled(bool enable) = 0;
+ };
+
+ explicit VideoRtpTrackSource(Callback* callback);
+
+ VideoRtpTrackSource(const VideoRtpTrackSource&) = delete;
+ VideoRtpTrackSource& operator=(const VideoRtpTrackSource&) = delete;
+
+ // Call before the object implementing Callback finishes it's destructor. No
+ // more callbacks will be fired after completion. Must be called on the
+ // worker thread
+ void ClearCallback();
+
+ // Call to broadcast an encoded frame to registered sinks.
+ // This method can be called on any thread or queue.
+ void BroadcastRecordableEncodedFrame(
+ const RecordableEncodedFrame& frame) const;
+
+ // VideoTrackSource
+ rtc::VideoSourceInterface<VideoFrame>* source() override;
+ rtc::VideoSinkInterface<VideoFrame>* sink();
+
+ // Returns true. This method can be called on any thread.
+ bool SupportsEncodedOutput() const override;
+
+ // Generates a key frame. Must be called on the worker thread.
+ void GenerateKeyFrame() override;
+
+ // Adds an encoded sink. Must be called on the worker thread.
+ void AddEncodedSink(
+ rtc::VideoSinkInterface<RecordableEncodedFrame>* sink) override;
+
+ // Removes an encoded sink. Must be called on the worker thread.
+ void RemoveEncodedSink(
+ rtc::VideoSinkInterface<RecordableEncodedFrame>* sink) override;
+
+ private:
+ RTC_NO_UNIQUE_ADDRESS SequenceChecker worker_sequence_checker_{
+ SequenceChecker::kDetached};
+ // `broadcaster_` is needed since the decoder can only handle one sink.
+ // It might be better if the decoder can handle multiple sinks and consider
+ // the VideoSinkWants.
+ rtc::VideoBroadcaster broadcaster_;
+ mutable Mutex mu_;
+ std::vector<rtc::VideoSinkInterface<RecordableEncodedFrame>*> encoded_sinks_
+ RTC_GUARDED_BY(mu_);
+ Callback* callback_ RTC_GUARDED_BY(worker_sequence_checker_);
+};
+
+} // namespace webrtc
+
+#endif // PC_VIDEO_RTP_TRACK_SOURCE_H_
diff --git a/third_party/libwebrtc/pc/video_rtp_track_source_unittest.cc b/third_party/libwebrtc/pc/video_rtp_track_source_unittest.cc
new file mode 100644
index 0000000000..13728c7eff
--- /dev/null
+++ b/third_party/libwebrtc/pc/video_rtp_track_source_unittest.cc
@@ -0,0 +1,140 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/video_rtp_track_source.h"
+
+#include "absl/types/optional.h"
+#include "api/scoped_refptr.h"
+#include "api/units/timestamp.h"
+#include "api/video/color_space.h"
+#include "api/video/encoded_image.h"
+#include "api/video/video_codec_type.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+namespace {
+
+class MockCallback : public VideoRtpTrackSource::Callback {
+ public:
+ MOCK_METHOD(void, OnGenerateKeyFrame, (), (override));
+ MOCK_METHOD(void, OnEncodedSinkEnabled, (bool), (override));
+};
+
+class MockSink : public rtc::VideoSinkInterface<RecordableEncodedFrame> {
+ public:
+ MOCK_METHOD(void, OnFrame, (const RecordableEncodedFrame&), (override));
+};
+
+rtc::scoped_refptr<VideoRtpTrackSource> MakeSource(
+ VideoRtpTrackSource::Callback* callback) {
+ return rtc::make_ref_counted<VideoRtpTrackSource>(callback);
+}
+
+TEST(VideoRtpTrackSourceTest, CreatesWithRemoteAtttributeSet) {
+ EXPECT_TRUE(MakeSource(nullptr)->remote());
+}
+
+TEST(VideoRtpTrackSourceTest, EnablesEncodingOutputOnAddingSink) {
+ MockCallback mock_callback;
+ EXPECT_CALL(mock_callback, OnGenerateKeyFrame).Times(0);
+ auto source = MakeSource(&mock_callback);
+ MockSink sink;
+ EXPECT_CALL(mock_callback, OnEncodedSinkEnabled(true));
+ source->AddEncodedSink(&sink);
+}
+
+TEST(VideoRtpTrackSourceTest, EnablesEncodingOutputOnceOnAddingTwoSinks) {
+ MockCallback mock_callback;
+ EXPECT_CALL(mock_callback, OnGenerateKeyFrame).Times(0);
+ auto source = MakeSource(&mock_callback);
+ MockSink sink;
+ EXPECT_CALL(mock_callback, OnEncodedSinkEnabled(true)).Times(1);
+ source->AddEncodedSink(&sink);
+ MockSink sink2;
+ source->AddEncodedSink(&sink2);
+}
+
+TEST(VideoRtpTrackSourceTest, DisablesEncodingOutputOnOneSinkRemoved) {
+ MockCallback mock_callback;
+ EXPECT_CALL(mock_callback, OnGenerateKeyFrame).Times(0);
+ EXPECT_CALL(mock_callback, OnEncodedSinkEnabled(true));
+ EXPECT_CALL(mock_callback, OnEncodedSinkEnabled(false)).Times(0);
+ auto source = MakeSource(&mock_callback);
+ MockSink sink;
+ source->AddEncodedSink(&sink);
+ testing::Mock::VerifyAndClearExpectations(&mock_callback);
+ EXPECT_CALL(mock_callback, OnEncodedSinkEnabled(false));
+ source->RemoveEncodedSink(&sink);
+}
+
+TEST(VideoRtpTrackSourceTest, DisablesEncodingOutputOnLastSinkRemoved) {
+ MockCallback mock_callback;
+ EXPECT_CALL(mock_callback, OnGenerateKeyFrame).Times(0);
+ EXPECT_CALL(mock_callback, OnEncodedSinkEnabled(true));
+ auto source = MakeSource(&mock_callback);
+ MockSink sink;
+ source->AddEncodedSink(&sink);
+ MockSink sink2;
+ source->AddEncodedSink(&sink2);
+ source->RemoveEncodedSink(&sink);
+ testing::Mock::VerifyAndClearExpectations(&mock_callback);
+ EXPECT_CALL(mock_callback, OnEncodedSinkEnabled(false));
+ source->RemoveEncodedSink(&sink2);
+}
+
+TEST(VideoRtpTrackSourceTest, GeneratesKeyFrameWhenRequested) {
+ MockCallback mock_callback;
+ auto source = MakeSource(&mock_callback);
+ EXPECT_CALL(mock_callback, OnGenerateKeyFrame);
+ source->GenerateKeyFrame();
+}
+
+TEST(VideoRtpTrackSourceTest, NoCallbacksAfterClearedCallback) {
+ testing::StrictMock<MockCallback> mock_callback;
+ auto source = MakeSource(&mock_callback);
+ source->ClearCallback();
+ MockSink sink;
+ source->AddEncodedSink(&sink);
+ source->GenerateKeyFrame();
+ source->RemoveEncodedSink(&sink);
+}
+
+class TestFrame : public RecordableEncodedFrame {
+ public:
+ rtc::scoped_refptr<const webrtc::EncodedImageBufferInterface> encoded_buffer()
+ const override {
+ return nullptr;
+ }
+ absl::optional<webrtc::ColorSpace> color_space() const override {
+ return absl::nullopt;
+ }
+ VideoCodecType codec() const override { return kVideoCodecGeneric; }
+ bool is_key_frame() const override { return false; }
+ EncodedResolution resolution() const override {
+ return EncodedResolution{0, 0};
+ }
+ Timestamp render_time() const override { return Timestamp::Zero(); }
+};
+
+TEST(VideoRtpTrackSourceTest, BroadcastsFrames) {
+ auto source = MakeSource(nullptr);
+ MockSink sink;
+ source->AddEncodedSink(&sink);
+ MockSink sink2;
+ source->AddEncodedSink(&sink2);
+ TestFrame frame;
+ EXPECT_CALL(sink, OnFrame);
+ EXPECT_CALL(sink2, OnFrame);
+ source->BroadcastRecordableEncodedFrame(frame);
+}
+
+} // namespace
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/video_track.cc b/third_party/libwebrtc/pc/video_track.cc
new file mode 100644
index 0000000000..0bf8687af3
--- /dev/null
+++ b/third_party/libwebrtc/pc/video_track.cc
@@ -0,0 +1,144 @@
+/*
+ * Copyright 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/video_track.h"
+
+#include <utility>
+#include <vector>
+
+#include "api/notifier.h"
+#include "api/sequence_checker.h"
+#include "rtc_base/checks.h"
+
+namespace webrtc {
+
+VideoTrack::VideoTrack(
+ absl::string_view label,
+ rtc::scoped_refptr<
+ VideoTrackSourceProxyWithInternal<VideoTrackSourceInterface>> source,
+ rtc::Thread* worker_thread)
+ : MediaStreamTrack<VideoTrackInterface>(label),
+ worker_thread_(worker_thread),
+ video_source_(std::move(source)),
+ content_hint_(ContentHint::kNone) {
+ RTC_DCHECK_RUN_ON(&signaling_thread_);
+ // Detach the thread checker for VideoSourceBaseGuarded since we'll make calls
+ // to VideoSourceBaseGuarded on the worker thread, but we're currently on the
+ // signaling thread.
+ source_sequence_.Detach();
+ video_source_->RegisterObserver(this);
+}
+
+VideoTrack::~VideoTrack() {
+ RTC_DCHECK_RUN_ON(&signaling_thread_);
+ video_source_->UnregisterObserver(this);
+}
+
+std::string VideoTrack::kind() const {
+ return kVideoKind;
+}
+
+// AddOrUpdateSink and RemoveSink should be called on the worker
+// thread.
+void VideoTrack::AddOrUpdateSink(rtc::VideoSinkInterface<VideoFrame>* sink,
+ const rtc::VideoSinkWants& wants) {
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ VideoSourceBaseGuarded::AddOrUpdateSink(sink, wants);
+ rtc::VideoSinkWants modified_wants = wants;
+ modified_wants.black_frames = !enabled_w_;
+ video_source_->internal()->AddOrUpdateSink(sink, modified_wants);
+}
+
+void VideoTrack::RemoveSink(rtc::VideoSinkInterface<VideoFrame>* sink) {
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ VideoSourceBaseGuarded::RemoveSink(sink);
+ video_source_->internal()->RemoveSink(sink);
+}
+
+void VideoTrack::RequestRefreshFrame() {
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ video_source_->internal()->RequestRefreshFrame();
+}
+
+VideoTrackSourceInterface* VideoTrack::GetSource() const {
+ // Callable from any thread.
+ return video_source_.get();
+}
+
+VideoTrackSourceInterface* VideoTrack::GetSourceInternal() const {
+ return video_source_->internal();
+}
+
+VideoTrackInterface::ContentHint VideoTrack::content_hint() const {
+ RTC_DCHECK_RUN_ON(&signaling_thread_);
+ return content_hint_;
+}
+
+void VideoTrack::set_content_hint(ContentHint hint) {
+ RTC_DCHECK_RUN_ON(&signaling_thread_);
+ if (content_hint_ == hint)
+ return;
+ content_hint_ = hint;
+ Notifier<VideoTrackInterface>::FireOnChanged();
+}
+
+bool VideoTrack::set_enabled(bool enable) {
+ RTC_DCHECK_RUN_ON(&signaling_thread_);
+
+ bool ret = MediaStreamTrack<VideoTrackInterface>::set_enabled(enable);
+
+ worker_thread_->BlockingCall([&]() {
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ enabled_w_ = enable;
+ for (auto& sink_pair : sink_pairs()) {
+ rtc::VideoSinkWants modified_wants = sink_pair.wants;
+ modified_wants.black_frames = !enable;
+ video_source_->AddOrUpdateSink(sink_pair.sink, modified_wants);
+ }
+ });
+
+ return ret;
+}
+
+bool VideoTrack::enabled() const {
+ if (worker_thread_->IsCurrent()) {
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ return enabled_w_;
+ }
+ RTC_DCHECK_RUN_ON(&signaling_thread_);
+ return MediaStreamTrack<VideoTrackInterface>::enabled();
+}
+
+MediaStreamTrackInterface::TrackState VideoTrack::state() const {
+ RTC_DCHECK_RUN_ON(worker_thread_);
+ return MediaStreamTrack<VideoTrackInterface>::state();
+}
+
+void VideoTrack::OnChanged() {
+ RTC_DCHECK_RUN_ON(&signaling_thread_);
+ rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
+ MediaSourceInterface::SourceState state = video_source_->state();
+ set_state(state == MediaSourceInterface::kEnded ? kEnded : kLive);
+}
+
+rtc::scoped_refptr<VideoTrack> VideoTrack::Create(
+ absl::string_view id,
+ rtc::scoped_refptr<VideoTrackSourceInterface> source,
+ rtc::Thread* worker_thread) {
+ rtc::scoped_refptr<
+ VideoTrackSourceProxyWithInternal<VideoTrackSourceInterface>>
+ source_proxy = VideoTrackSourceProxy::Create(
+ rtc::Thread::Current(), worker_thread, std::move(source));
+
+ return rtc::make_ref_counted<VideoTrack>(id, std::move(source_proxy),
+ worker_thread);
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/video_track.h b/third_party/libwebrtc/pc/video_track.h
new file mode 100644
index 0000000000..13a51c454b
--- /dev/null
+++ b/third_party/libwebrtc/pc/video_track.h
@@ -0,0 +1,88 @@
+/*
+ * Copyright 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_VIDEO_TRACK_H_
+#define PC_VIDEO_TRACK_H_
+
+#include <string>
+
+#include "absl/types/optional.h"
+#include "api/media_stream_interface.h"
+#include "api/media_stream_track.h"
+#include "api/scoped_refptr.h"
+#include "api/sequence_checker.h"
+#include "api/video/video_frame.h"
+#include "api/video/video_sink_interface.h"
+#include "api/video/video_source_interface.h"
+#include "media/base/video_source_base.h"
+#include "pc/video_track_source_proxy.h"
+#include "rtc_base/system/no_unique_address.h"
+#include "rtc_base/thread.h"
+#include "rtc_base/thread_annotations.h"
+
+namespace webrtc {
+
+// TODO(tommi): Instead of inheriting from `MediaStreamTrack<>`, implement the
+// properties directly in this class. `MediaStreamTrack` doesn't guard against
+// conflicting access, so we'd need to override those methods anyway in this
+// class in order to make sure things are correctly checked.
+class VideoTrack : public MediaStreamTrack<VideoTrackInterface>,
+ public rtc::VideoSourceBaseGuarded,
+ public ObserverInterface {
+ public:
+ static rtc::scoped_refptr<VideoTrack> Create(
+ absl::string_view label,
+ rtc::scoped_refptr<VideoTrackSourceInterface> source,
+ rtc::Thread* worker_thread);
+
+ void AddOrUpdateSink(rtc::VideoSinkInterface<VideoFrame>* sink,
+ const rtc::VideoSinkWants& wants) override;
+ void RemoveSink(rtc::VideoSinkInterface<VideoFrame>* sink) override;
+ void RequestRefreshFrame() override;
+ VideoTrackSourceInterface* GetSource() const override;
+
+ ContentHint content_hint() const override;
+ void set_content_hint(ContentHint hint) override;
+ bool set_enabled(bool enable) override;
+ bool enabled() const override;
+ MediaStreamTrackInterface::TrackState state() const override;
+ std::string kind() const override;
+
+ // Direct access to the non-proxied source object for internal implementation.
+ VideoTrackSourceInterface* GetSourceInternal() const;
+
+ protected:
+ VideoTrack(
+ absl::string_view id,
+ rtc::scoped_refptr<
+ VideoTrackSourceProxyWithInternal<VideoTrackSourceInterface>> source,
+ rtc::Thread* worker_thread);
+ ~VideoTrack();
+
+ private:
+ // Implements ObserverInterface. Observes `video_source_` state.
+ void OnChanged() override;
+
+ RTC_NO_UNIQUE_ADDRESS webrtc::SequenceChecker signaling_thread_;
+ rtc::Thread* const worker_thread_;
+ const rtc::scoped_refptr<
+ VideoTrackSourceProxyWithInternal<VideoTrackSourceInterface>>
+ video_source_;
+ ContentHint content_hint_ RTC_GUARDED_BY(&signaling_thread_);
+ // Cached `enabled` state for the worker thread. This is kept in sync with
+ // the state maintained on the signaling thread via set_enabled() but can
+ // be queried without blocking on the worker thread by callers that don't
+ // use an api proxy to call the `enabled()` method.
+ bool enabled_w_ RTC_GUARDED_BY(worker_thread_) = true;
+};
+
+} // namespace webrtc
+
+#endif // PC_VIDEO_TRACK_H_
diff --git a/third_party/libwebrtc/pc/video_track_source.cc b/third_party/libwebrtc/pc/video_track_source.cc
new file mode 100644
index 0000000000..d4b7f55055
--- /dev/null
+++ b/third_party/libwebrtc/pc/video_track_source.cc
@@ -0,0 +1,40 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/video_track_source.h"
+
+#include "rtc_base/checks.h"
+
+namespace webrtc {
+
+VideoTrackSource::VideoTrackSource(bool remote)
+ : state_(kInitializing), remote_(remote) {}
+
+void VideoTrackSource::SetState(SourceState new_state) {
+ RTC_DCHECK_RUN_ON(&signaling_thread_checker_);
+ if (state_ != new_state) {
+ state_ = new_state;
+ FireOnChanged();
+ }
+}
+
+void VideoTrackSource::AddOrUpdateSink(
+ rtc::VideoSinkInterface<VideoFrame>* sink,
+ const rtc::VideoSinkWants& wants) {
+ RTC_DCHECK(worker_thread_checker_.IsCurrent());
+ source()->AddOrUpdateSink(sink, wants);
+}
+
+void VideoTrackSource::RemoveSink(rtc::VideoSinkInterface<VideoFrame>* sink) {
+ RTC_DCHECK(worker_thread_checker_.IsCurrent());
+ source()->RemoveSink(sink);
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/video_track_source.h b/third_party/libwebrtc/pc/video_track_source.h
new file mode 100644
index 0000000000..6aae178f37
--- /dev/null
+++ b/third_party/libwebrtc/pc/video_track_source.h
@@ -0,0 +1,74 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_VIDEO_TRACK_SOURCE_H_
+#define PC_VIDEO_TRACK_SOURCE_H_
+
+#include "absl/types/optional.h"
+#include "api/media_stream_interface.h"
+#include "api/notifier.h"
+#include "api/sequence_checker.h"
+#include "api/video/recordable_encoded_frame.h"
+#include "api/video/video_frame.h"
+#include "api/video/video_sink_interface.h"
+#include "api/video/video_source_interface.h"
+#include "media/base/media_channel.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/system/no_unique_address.h"
+#include "rtc_base/system/rtc_export.h"
+#include "rtc_base/thread_annotations.h"
+
+namespace webrtc {
+
+// VideoTrackSource is a convenience base class for implementations of
+// VideoTrackSourceInterface.
+class RTC_EXPORT VideoTrackSource : public Notifier<VideoTrackSourceInterface> {
+ public:
+ explicit VideoTrackSource(bool remote);
+ void SetState(SourceState new_state);
+
+ SourceState state() const override {
+ RTC_DCHECK_RUN_ON(&signaling_thread_checker_);
+ return state_;
+ }
+ bool remote() const override { return remote_; }
+
+ bool is_screencast() const override { return false; }
+ absl::optional<bool> needs_denoising() const override {
+ return absl::nullopt;
+ }
+
+ bool GetStats(Stats* stats) override { return false; }
+
+ void AddOrUpdateSink(rtc::VideoSinkInterface<VideoFrame>* sink,
+ const rtc::VideoSinkWants& wants) override;
+ void RemoveSink(rtc::VideoSinkInterface<VideoFrame>* sink) override;
+
+ bool SupportsEncodedOutput() const override { return false; }
+ void GenerateKeyFrame() override {}
+ void AddEncodedSink(
+ rtc::VideoSinkInterface<RecordableEncodedFrame>* sink) override {}
+ void RemoveEncodedSink(
+ rtc::VideoSinkInterface<RecordableEncodedFrame>* sink) override {}
+
+ protected:
+ virtual rtc::VideoSourceInterface<VideoFrame>* source() = 0;
+
+ private:
+ RTC_NO_UNIQUE_ADDRESS SequenceChecker worker_thread_checker_{
+ SequenceChecker::kDetached};
+ RTC_NO_UNIQUE_ADDRESS SequenceChecker signaling_thread_checker_;
+ SourceState state_ RTC_GUARDED_BY(&signaling_thread_checker_);
+ const bool remote_;
+};
+
+} // namespace webrtc
+
+#endif // PC_VIDEO_TRACK_SOURCE_H_
diff --git a/third_party/libwebrtc/pc/video_track_source_proxy.cc b/third_party/libwebrtc/pc/video_track_source_proxy.cc
new file mode 100644
index 0000000000..c3e95e23cc
--- /dev/null
+++ b/third_party/libwebrtc/pc/video_track_source_proxy.cc
@@ -0,0 +1,29 @@
+/*
+ * Copyright 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/video_track_source_proxy.h"
+
+#include "api/media_stream_interface.h"
+#include "api/scoped_refptr.h"
+#include "api/video_track_source_proxy_factory.h"
+#include "rtc_base/thread.h"
+
+namespace webrtc {
+
+rtc::scoped_refptr<VideoTrackSourceInterface> CreateVideoTrackSourceProxy(
+ rtc::Thread* signaling_thread,
+ rtc::Thread* worker_thread,
+ VideoTrackSourceInterface* source) {
+ return VideoTrackSourceProxy::Create(
+ signaling_thread, worker_thread,
+ rtc::scoped_refptr<VideoTrackSourceInterface>(source));
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/video_track_source_proxy.h b/third_party/libwebrtc/pc/video_track_source_proxy.h
new file mode 100644
index 0000000000..8500a98766
--- /dev/null
+++ b/third_party/libwebrtc/pc/video_track_source_proxy.h
@@ -0,0 +1,60 @@
+/*
+ * Copyright 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_VIDEO_TRACK_SOURCE_PROXY_H_
+#define PC_VIDEO_TRACK_SOURCE_PROXY_H_
+
+#include "absl/types/optional.h"
+#include "api/media_stream_interface.h"
+#include "api/video/recordable_encoded_frame.h"
+#include "api/video/video_frame.h"
+#include "api/video/video_sink_interface.h"
+#include "api/video/video_source_interface.h"
+#include "api/video_track_source_constraints.h"
+#include "pc/proxy.h"
+
+namespace webrtc {
+
+// Makes sure the real VideoTrackSourceInterface implementation is destroyed on
+// the signaling thread and marshals all method calls to the signaling thread.
+// TODO(deadbeef): Move this to .cc file. What threads methods are called on is
+// an implementation detail.
+BEGIN_PROXY_MAP(VideoTrackSource)
+
+PROXY_PRIMARY_THREAD_DESTRUCTOR()
+PROXY_CONSTMETHOD0(SourceState, state)
+BYPASS_PROXY_CONSTMETHOD0(bool, remote)
+BYPASS_PROXY_CONSTMETHOD0(bool, is_screencast)
+PROXY_CONSTMETHOD0(absl::optional<bool>, needs_denoising)
+PROXY_METHOD1(bool, GetStats, Stats*)
+PROXY_SECONDARY_METHOD2(void,
+ AddOrUpdateSink,
+ rtc::VideoSinkInterface<VideoFrame>*,
+ const rtc::VideoSinkWants&)
+PROXY_SECONDARY_METHOD1(void, RemoveSink, rtc::VideoSinkInterface<VideoFrame>*)
+PROXY_SECONDARY_METHOD0(void, RequestRefreshFrame)
+PROXY_METHOD1(void, RegisterObserver, ObserverInterface*)
+PROXY_METHOD1(void, UnregisterObserver, ObserverInterface*)
+PROXY_CONSTMETHOD0(bool, SupportsEncodedOutput)
+PROXY_SECONDARY_METHOD0(void, GenerateKeyFrame)
+PROXY_SECONDARY_METHOD1(void,
+ AddEncodedSink,
+ rtc::VideoSinkInterface<RecordableEncodedFrame>*)
+PROXY_SECONDARY_METHOD1(void,
+ RemoveEncodedSink,
+ rtc::VideoSinkInterface<RecordableEncodedFrame>*)
+PROXY_SECONDARY_METHOD1(void,
+ ProcessConstraints,
+ const webrtc::VideoTrackSourceConstraints&)
+END_PROXY_MAP(VideoTrackSource)
+
+} // namespace webrtc
+
+#endif // PC_VIDEO_TRACK_SOURCE_PROXY_H_
diff --git a/third_party/libwebrtc/pc/video_track_unittest.cc b/third_party/libwebrtc/pc/video_track_unittest.cc
new file mode 100644
index 0000000000..e75fd034b3
--- /dev/null
+++ b/third_party/libwebrtc/pc/video_track_unittest.cc
@@ -0,0 +1,103 @@
+/*
+ * Copyright 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/video_track.h"
+
+#include <memory>
+
+#include "media/base/fake_frame_source.h"
+#include "pc/test/fake_video_track_renderer.h"
+#include "pc/test/fake_video_track_source.h"
+#include "pc/video_track_source.h"
+#include "rtc_base/time_utils.h"
+#include "test/gtest.h"
+
+using webrtc::FakeVideoTrackRenderer;
+using webrtc::FakeVideoTrackSource;
+using webrtc::MediaSourceInterface;
+using webrtc::MediaStreamTrackInterface;
+using webrtc::VideoTrack;
+using webrtc::VideoTrackInterface;
+using webrtc::VideoTrackSource;
+
+class VideoTrackTest : public ::testing::Test {
+ public:
+ VideoTrackTest() : frame_source_(640, 480, rtc::kNumMicrosecsPerSec / 30) {
+ static const char kVideoTrackId[] = "track_id";
+ video_track_source_ = rtc::make_ref_counted<FakeVideoTrackSource>(
+ /*is_screencast=*/false);
+ video_track_ = VideoTrack::Create(kVideoTrackId, video_track_source_,
+ rtc::Thread::Current());
+ }
+
+ protected:
+ rtc::AutoThread main_thread_;
+ rtc::scoped_refptr<FakeVideoTrackSource> video_track_source_;
+ rtc::scoped_refptr<VideoTrack> video_track_;
+ cricket::FakeFrameSource frame_source_;
+};
+
+// VideoTrack::Create will create an API proxy around the source object.
+// The `GetSource` method provides access to the proxy object intented for API
+// use while the GetSourceInternal() provides direct access to the source object
+// as provided to the `VideoTrack::Create` factory function.
+TEST_F(VideoTrackTest, CheckApiProxyAndInternalSource) {
+ EXPECT_NE(video_track_->GetSource(), video_track_source_.get());
+ EXPECT_EQ(video_track_->GetSourceInternal(), video_track_source_.get());
+}
+
+// Test changing the source state also changes the track state.
+TEST_F(VideoTrackTest, SourceStateChangeTrackState) {
+ EXPECT_EQ(MediaStreamTrackInterface::kLive, video_track_->state());
+ video_track_source_->SetState(MediaSourceInterface::kEnded);
+ EXPECT_EQ(MediaStreamTrackInterface::kEnded, video_track_->state());
+}
+
+// Test adding renderers to a video track and render to them by providing
+// frames to the source.
+TEST_F(VideoTrackTest, RenderVideo) {
+ // FakeVideoTrackRenderer register itself to `video_track_`
+ std::unique_ptr<FakeVideoTrackRenderer> renderer_1(
+ new FakeVideoTrackRenderer(video_track_.get()));
+
+ video_track_source_->InjectFrame(frame_source_.GetFrame());
+ EXPECT_EQ(1, renderer_1->num_rendered_frames());
+
+ // FakeVideoTrackRenderer register itself to `video_track_`
+ std::unique_ptr<FakeVideoTrackRenderer> renderer_2(
+ new FakeVideoTrackRenderer(video_track_.get()));
+ video_track_source_->InjectFrame(frame_source_.GetFrame());
+ EXPECT_EQ(2, renderer_1->num_rendered_frames());
+ EXPECT_EQ(1, renderer_2->num_rendered_frames());
+
+ renderer_1.reset(nullptr);
+ video_track_source_->InjectFrame(frame_source_.GetFrame());
+ EXPECT_EQ(2, renderer_2->num_rendered_frames());
+}
+
+// Test that disabling the track results in blacked out frames.
+TEST_F(VideoTrackTest, DisableTrackBlackout) {
+ std::unique_ptr<FakeVideoTrackRenderer> renderer(
+ new FakeVideoTrackRenderer(video_track_.get()));
+
+ video_track_source_->InjectFrame(frame_source_.GetFrame());
+ EXPECT_EQ(1, renderer->num_rendered_frames());
+ EXPECT_FALSE(renderer->black_frame());
+
+ video_track_->set_enabled(false);
+ video_track_source_->InjectFrame(frame_source_.GetFrame());
+ EXPECT_EQ(2, renderer->num_rendered_frames());
+ EXPECT_TRUE(renderer->black_frame());
+
+ video_track_->set_enabled(true);
+ video_track_source_->InjectFrame(frame_source_.GetFrame());
+ EXPECT_EQ(3, renderer->num_rendered_frames());
+ EXPECT_FALSE(renderer->black_frame());
+}
diff --git a/third_party/libwebrtc/pc/webrtc_sdp.cc b/third_party/libwebrtc/pc/webrtc_sdp.cc
new file mode 100644
index 0000000000..2a3173cb02
--- /dev/null
+++ b/third_party/libwebrtc/pc/webrtc_sdp.cc
@@ -0,0 +1,3835 @@
+/*
+ * Copyright 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/webrtc_sdp.h"
+
+#include <ctype.h>
+#include <limits.h>
+
+#include <algorithm>
+#include <cstddef>
+#include <cstdint>
+#include <map>
+#include <memory>
+#include <set>
+#include <string>
+#include <type_traits>
+#include <unordered_map>
+#include <utility>
+#include <vector>
+
+#include "absl/algorithm/container.h"
+#include "absl/strings/ascii.h"
+#include "absl/strings/match.h"
+#include "api/candidate.h"
+#include "api/crypto_params.h"
+#include "api/jsep_ice_candidate.h"
+#include "api/jsep_session_description.h"
+#include "api/media_types.h"
+// for RtpExtension
+#include "absl/strings/string_view.h"
+#include "absl/types/optional.h"
+#include "api/rtc_error.h"
+#include "api/rtp_parameters.h"
+#include "api/rtp_transceiver_direction.h"
+#include "media/base/codec.h"
+#include "media/base/media_constants.h"
+#include "media/base/rid_description.h"
+#include "media/base/rtp_utils.h"
+#include "media/base/stream_params.h"
+#include "media/sctp/sctp_transport_internal.h"
+#include "p2p/base/candidate_pair_interface.h"
+#include "p2p/base/ice_transport_internal.h"
+#include "p2p/base/p2p_constants.h"
+#include "p2p/base/port.h"
+#include "p2p/base/port_interface.h"
+#include "p2p/base/transport_description.h"
+#include "p2p/base/transport_info.h"
+#include "pc/media_protocol_names.h"
+#include "pc/media_session.h"
+#include "pc/session_description.h"
+#include "pc/simulcast_description.h"
+#include "pc/simulcast_sdp_serializer.h"
+#include "rtc_base/arraysize.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/helpers.h"
+#include "rtc_base/ip_address.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/net_helper.h"
+#include "rtc_base/network_constants.h"
+#include "rtc_base/socket_address.h"
+#include "rtc_base/ssl_fingerprint.h"
+#include "rtc_base/string_encode.h"
+#include "rtc_base/string_utils.h"
+#include "rtc_base/strings/string_builder.h"
+
+using cricket::AudioContentDescription;
+using cricket::Candidate;
+using cricket::Candidates;
+using cricket::ContentInfo;
+using cricket::CryptoParams;
+using cricket::ICE_CANDIDATE_COMPONENT_RTCP;
+using cricket::ICE_CANDIDATE_COMPONENT_RTP;
+using cricket::kApplicationSpecificBandwidth;
+using cricket::kCodecParamMaxPTime;
+using cricket::kCodecParamMinPTime;
+using cricket::kCodecParamPTime;
+using cricket::kTransportSpecificBandwidth;
+using cricket::MediaContentDescription;
+using cricket::MediaProtocolType;
+using cricket::MediaType;
+using cricket::RidDescription;
+using cricket::RtpHeaderExtensions;
+using cricket::SctpDataContentDescription;
+using cricket::SimulcastDescription;
+using cricket::SimulcastLayer;
+using cricket::SimulcastLayerList;
+using cricket::SsrcGroup;
+using cricket::StreamParams;
+using cricket::StreamParamsVec;
+using cricket::TransportDescription;
+using cricket::TransportInfo;
+using cricket::UnsupportedContentDescription;
+using cricket::VideoContentDescription;
+using rtc::SocketAddress;
+
+// TODO(deadbeef): Switch to using anonymous namespace rather than declaring
+// everything "static".
+namespace webrtc {
+
+// Line type
+// RFC 4566
+// An SDP session description consists of a number of lines of text of
+// the form:
+// <type>=<value>
+// where <type> MUST be exactly one case-significant character.
+
+// Check if passed character is a "token-char" from RFC 4566.
+// https://datatracker.ietf.org/doc/html/rfc4566#section-9
+// token-char = %x21 / %x23-27 / %x2A-2B / %x2D-2E / %x30-39
+// / %x41-5A / %x5E-7E
+bool IsTokenChar(char ch) {
+ return ch == 0x21 || (ch >= 0x23 && ch <= 0x27) || ch == 0x2a || ch == 0x2b ||
+ ch == 0x2d || ch == 0x2e || (ch >= 0x30 && ch <= 0x39) ||
+ (ch >= 0x41 && ch <= 0x5a) || (ch >= 0x5e && ch <= 0x7e);
+}
+static const int kLinePrefixLength = 2; // Length of <type>=
+static const char kLineTypeVersion = 'v';
+static const char kLineTypeOrigin = 'o';
+static const char kLineTypeSessionName = 's';
+static const char kLineTypeSessionInfo = 'i';
+static const char kLineTypeSessionUri = 'u';
+static const char kLineTypeSessionEmail = 'e';
+static const char kLineTypeSessionPhone = 'p';
+static const char kLineTypeSessionBandwidth = 'b';
+static const char kLineTypeTiming = 't';
+static const char kLineTypeRepeatTimes = 'r';
+static const char kLineTypeTimeZone = 'z';
+static const char kLineTypeEncryptionKey = 'k';
+static const char kLineTypeMedia = 'm';
+static const char kLineTypeConnection = 'c';
+static const char kLineTypeAttributes = 'a';
+
+// Attributes
+static const char kAttributeGroup[] = "group";
+static const char kAttributeMid[] = "mid";
+static const char kAttributeMsid[] = "msid";
+static const char kAttributeBundleOnly[] = "bundle-only";
+static const char kAttributeRtcpMux[] = "rtcp-mux";
+static const char kAttributeRtcpReducedSize[] = "rtcp-rsize";
+static const char kAttributeSsrc[] = "ssrc";
+static const char kSsrcAttributeCname[] = "cname";
+static const char kAttributeExtmapAllowMixed[] = "extmap-allow-mixed";
+static const char kAttributeExtmap[] = "extmap";
+// draft-alvestrand-mmusic-msid-01
+// a=msid-semantic: WMS
+// This is a legacy field supported only for Plan B semantics.
+static const char kAttributeMsidSemantics[] = "msid-semantic";
+static const char kMediaStreamSemantic[] = "WMS";
+static const char kSsrcAttributeMsid[] = "msid";
+static const char kDefaultMsid[] = "default";
+static const char kNoStreamMsid[] = "-";
+static const char kAttributeSsrcGroup[] = "ssrc-group";
+static const char kAttributeCrypto[] = "crypto";
+static const char kAttributeCandidate[] = "candidate";
+static const char kAttributeCandidateTyp[] = "typ";
+static const char kAttributeCandidateRaddr[] = "raddr";
+static const char kAttributeCandidateRport[] = "rport";
+static const char kAttributeCandidateUfrag[] = "ufrag";
+static const char kAttributeCandidatePwd[] = "pwd";
+static const char kAttributeCandidateGeneration[] = "generation";
+static const char kAttributeCandidateNetworkId[] = "network-id";
+static const char kAttributeCandidateNetworkCost[] = "network-cost";
+static const char kAttributeFingerprint[] = "fingerprint";
+static const char kAttributeSetup[] = "setup";
+static const char kAttributeFmtp[] = "fmtp";
+static const char kAttributeRtpmap[] = "rtpmap";
+static const char kAttributeSctpmap[] = "sctpmap";
+static const char kAttributeRtcp[] = "rtcp";
+static const char kAttributeIceUfrag[] = "ice-ufrag";
+static const char kAttributeIcePwd[] = "ice-pwd";
+static const char kAttributeIceLite[] = "ice-lite";
+static const char kAttributeIceOption[] = "ice-options";
+static const char kAttributeSendOnly[] = "sendonly";
+static const char kAttributeRecvOnly[] = "recvonly";
+static const char kAttributeRtcpFb[] = "rtcp-fb";
+static const char kAttributeSendRecv[] = "sendrecv";
+static const char kAttributeInactive[] = "inactive";
+// draft-ietf-mmusic-sctp-sdp-26
+// a=sctp-port, a=max-message-size
+static const char kAttributeSctpPort[] = "sctp-port";
+static const char kAttributeMaxMessageSize[] = "max-message-size";
+static const int kDefaultSctpMaxMessageSize = 65536;
+// draft-ietf-mmusic-sdp-simulcast-13
+// a=simulcast
+static const char kAttributeSimulcast[] = "simulcast";
+// draft-ietf-mmusic-rid-15
+// a=rid
+static const char kAttributeRid[] = "rid";
+static const char kAttributePacketization[] = "packetization";
+
+// Experimental flags
+static const char kAttributeXGoogleFlag[] = "x-google-flag";
+static const char kValueConference[] = "conference";
+
+static const char kAttributeRtcpRemoteEstimate[] = "remote-net-estimate";
+
+// Candidate
+static const char kCandidateHost[] = "host";
+static const char kCandidateSrflx[] = "srflx";
+static const char kCandidatePrflx[] = "prflx";
+static const char kCandidateRelay[] = "relay";
+static const char kTcpCandidateType[] = "tcptype";
+
+// rtc::StringBuilder doesn't have a << overload for chars, while rtc::split and
+// rtc::tokenize_first both take a char delimiter. To handle both cases these
+// constants come in pairs of a chars and length-one strings.
+static const char kSdpDelimiterEqual[] = "=";
+static const char kSdpDelimiterEqualChar = '=';
+static const char kSdpDelimiterSpace[] = " ";
+static const char kSdpDelimiterSpaceChar = ' ';
+static const char kSdpDelimiterColon[] = ":";
+static const char kSdpDelimiterColonChar = ':';
+static const char kSdpDelimiterSemicolon[] = ";";
+static const char kSdpDelimiterSemicolonChar = ';';
+static const char kSdpDelimiterSlashChar = '/';
+static const char kNewLineChar = '\n';
+static const char kReturnChar = '\r';
+static const char kLineBreak[] = "\r\n";
+
+// TODO(deadbeef): Generate the Session and Time description
+// instead of hardcoding.
+static const char kSessionVersion[] = "v=0";
+// RFC 4566
+static const char kSessionOriginUsername[] = "-";
+static const char kSessionOriginSessionId[] = "0";
+static const char kSessionOriginSessionVersion[] = "0";
+static const char kSessionOriginNettype[] = "IN";
+static const char kSessionOriginAddrtype[] = "IP4";
+static const char kSessionOriginAddress[] = "127.0.0.1";
+static const char kSessionName[] = "s=-";
+static const char kTimeDescription[] = "t=0 0";
+static const char kAttrGroup[] = "a=group:BUNDLE";
+static const char kConnectionNettype[] = "IN";
+static const char kConnectionIpv4Addrtype[] = "IP4";
+static const char kConnectionIpv6Addrtype[] = "IP6";
+static const char kMediaTypeVideo[] = "video";
+static const char kMediaTypeAudio[] = "audio";
+static const char kMediaTypeData[] = "application";
+static const char kMediaPortRejected[] = "0";
+// draft-ietf-mmusic-trickle-ice-01
+// When no candidates have been gathered, set the connection
+// address to IP6 ::.
+// TODO(perkj): FF can not parse IP6 ::. See http://crbug/430333
+// Use IPV4 per default.
+static const char kDummyAddress[] = "0.0.0.0";
+static const char kDummyPort[] = "9";
+
+static const char kDefaultSctpmapProtocol[] = "webrtc-datachannel";
+
+// RTP payload type is in the 0-127 range. Use -1 to indicate "all" payload
+// types.
+const int kWildcardPayloadType = -1;
+
+// Maximum number of channels allowed.
+static const size_t kMaxNumberOfChannels = 24;
+
+struct SsrcInfo {
+ uint32_t ssrc_id;
+ std::string cname;
+ std::string stream_id;
+ std::string track_id;
+};
+typedef std::vector<SsrcInfo> SsrcInfoVec;
+typedef std::vector<SsrcGroup> SsrcGroupVec;
+
+template <class T>
+static void AddFmtpLine(const T& codec, std::string* message);
+static void BuildMediaDescription(const ContentInfo* content_info,
+ const TransportInfo* transport_info,
+ const cricket::MediaType media_type,
+ const std::vector<Candidate>& candidates,
+ int msid_signaling,
+ std::string* message);
+static void BuildMediaLine(const cricket::MediaType media_type,
+ const ContentInfo* content_info,
+ const MediaContentDescription* media_desc,
+ std::string* message);
+static void BuildRtpContentAttributes(const MediaContentDescription* media_desc,
+ const cricket::MediaType media_type,
+ int msid_signaling,
+ std::string* message);
+static void BuildRtpHeaderExtensions(const RtpHeaderExtensions& extensions,
+ std::string* message);
+static void BuildRtpmap(const MediaContentDescription* media_desc,
+ const cricket::MediaType media_type,
+ std::string* message);
+static void BuildCandidate(const std::vector<Candidate>& candidates,
+ bool include_ufrag,
+ std::string* message);
+static void BuildIceUfragPwd(const TransportInfo* transport_info,
+ std::string* message);
+static void BuildDtlsFingerprintSetup(const TransportInfo* transport_info,
+ std::string* message);
+static void BuildIceOptions(const std::vector<std::string>& transport_options,
+ std::string* message);
+static bool ParseSessionDescription(absl::string_view message,
+ size_t* pos,
+ std::string* session_id,
+ std::string* session_version,
+ TransportDescription* session_td,
+ RtpHeaderExtensions* session_extmaps,
+ rtc::SocketAddress* connection_addr,
+ cricket::SessionDescription* desc,
+ SdpParseError* error);
+static bool ParseMediaDescription(
+ absl::string_view message,
+ const TransportDescription& session_td,
+ const RtpHeaderExtensions& session_extmaps,
+ size_t* pos,
+ const rtc::SocketAddress& session_connection_addr,
+ cricket::SessionDescription* desc,
+ std::vector<std::unique_ptr<JsepIceCandidate>>* candidates,
+ SdpParseError* error);
+static bool ParseContent(
+ absl::string_view message,
+ const cricket::MediaType media_type,
+ int mline_index,
+ absl::string_view protocol,
+ const std::vector<int>& payload_types,
+ size_t* pos,
+ std::string* content_name,
+ bool* bundle_only,
+ int* msid_signaling,
+ MediaContentDescription* media_desc,
+ TransportDescription* transport,
+ std::vector<std::unique_ptr<JsepIceCandidate>>* candidates,
+ SdpParseError* error);
+static bool ParseGroupAttribute(absl::string_view line,
+ cricket::SessionDescription* desc,
+ SdpParseError* error);
+static bool ParseSsrcAttribute(absl::string_view line,
+ SsrcInfoVec* ssrc_infos,
+ int* msid_signaling,
+ SdpParseError* error);
+static bool ParseSsrcGroupAttribute(absl::string_view line,
+ SsrcGroupVec* ssrc_groups,
+ SdpParseError* error);
+static bool ParseCryptoAttribute(absl::string_view line,
+ MediaContentDescription* media_desc,
+ SdpParseError* error);
+static bool ParseRtpmapAttribute(absl::string_view line,
+ const cricket::MediaType media_type,
+ const std::vector<int>& payload_types,
+ MediaContentDescription* media_desc,
+ SdpParseError* error);
+static bool ParseFmtpAttributes(absl::string_view line,
+ const cricket::MediaType media_type,
+ MediaContentDescription* media_desc,
+ SdpParseError* error);
+static bool ParseFmtpParam(absl::string_view line,
+ std::string* parameter,
+ std::string* value,
+ SdpParseError* error);
+static bool ParsePacketizationAttribute(absl::string_view line,
+ const cricket::MediaType media_type,
+ MediaContentDescription* media_desc,
+ SdpParseError* error);
+static bool ParseRtcpFbAttribute(absl::string_view line,
+ const cricket::MediaType media_type,
+ MediaContentDescription* media_desc,
+ SdpParseError* error);
+static bool ParseIceOptions(absl::string_view line,
+ std::vector<std::string>* transport_options,
+ SdpParseError* error);
+static bool ParseExtmap(absl::string_view line,
+ RtpExtension* extmap,
+ SdpParseError* error);
+static bool ParseFingerprintAttribute(
+ absl::string_view line,
+ std::unique_ptr<rtc::SSLFingerprint>* fingerprint,
+ SdpParseError* error);
+static bool ParseDtlsSetup(absl::string_view line,
+ cricket::ConnectionRole* role,
+ SdpParseError* error);
+static bool ParseMsidAttribute(absl::string_view line,
+ std::vector<std::string>* stream_ids,
+ std::string* track_id,
+ SdpParseError* error);
+
+static void RemoveInvalidRidDescriptions(const std::vector<int>& payload_types,
+ std::vector<RidDescription>* rids);
+
+static SimulcastLayerList RemoveRidsFromSimulcastLayerList(
+ const std::set<std::string>& to_remove,
+ const SimulcastLayerList& layers);
+
+static void RemoveInvalidRidsFromSimulcast(
+ const std::vector<RidDescription>& rids,
+ SimulcastDescription* simulcast);
+
+// Helper functions
+
+// Below ParseFailed*** functions output the line that caused the parsing
+// failure and the detailed reason (`description`) of the failure to `error`.
+// The functions always return false so that they can be used directly in the
+// following way when error happens:
+// "return ParseFailed***(...);"
+
+// The line starting at `line_start` of `message` is the failing line.
+// The reason for the failure should be provided in the `description`.
+// An example of a description could be "unknown character".
+static bool ParseFailed(absl::string_view message,
+ size_t line_start,
+ std::string description,
+ SdpParseError* error) {
+ // Get the first line of `message` from `line_start`.
+ absl::string_view first_line;
+ size_t line_end = message.find(kNewLineChar, line_start);
+ if (line_end != std::string::npos) {
+ if (line_end > 0 && (message.at(line_end - 1) == kReturnChar)) {
+ --line_end;
+ }
+ first_line = message.substr(line_start, (line_end - line_start));
+ } else {
+ first_line = message.substr(line_start);
+ }
+
+ RTC_LOG(LS_ERROR) << "Failed to parse: \"" << first_line
+ << "\". Reason: " << description;
+ if (error) {
+ // TODO(bugs.webrtc.org/13220): In C++17, we can use plain assignment, with
+ // a string_view on the right hand side.
+ error->line.assign(first_line.data(), first_line.size());
+ error->description = std::move(description);
+ }
+ return false;
+}
+
+// `line` is the failing line. The reason for the failure should be
+// provided in the `description`.
+static bool ParseFailed(absl::string_view line,
+ std::string description,
+ SdpParseError* error) {
+ return ParseFailed(line, 0, std::move(description), error);
+}
+
+// Parses failure where the failing SDP line isn't know or there are multiple
+// failing lines.
+static bool ParseFailed(std::string description, SdpParseError* error) {
+ return ParseFailed("", std::move(description), error);
+}
+
+// `line` is the failing line. The failure is due to the fact that `line`
+// doesn't have `expected_fields` fields.
+static bool ParseFailedExpectFieldNum(absl::string_view line,
+ int expected_fields,
+ SdpParseError* error) {
+ rtc::StringBuilder description;
+ description << "Expects " << expected_fields << " fields.";
+ return ParseFailed(line, description.Release(), error);
+}
+
+// `line` is the failing line. The failure is due to the fact that `line` has
+// less than `expected_min_fields` fields.
+static bool ParseFailedExpectMinFieldNum(absl::string_view line,
+ int expected_min_fields,
+ SdpParseError* error) {
+ rtc::StringBuilder description;
+ description << "Expects at least " << expected_min_fields << " fields.";
+ return ParseFailed(line, description.Release(), error);
+}
+
+// `line` is the failing line. The failure is due to the fact that it failed to
+// get the value of `attribute`.
+static bool ParseFailedGetValue(absl::string_view line,
+ absl::string_view attribute,
+ SdpParseError* error) {
+ rtc::StringBuilder description;
+ description << "Failed to get the value of attribute: " << attribute;
+ return ParseFailed(line, description.Release(), error);
+}
+
+// The line starting at `line_start` of `message` is the failing line. The
+// failure is due to the line type (e.g. the "m" part of the "m-line")
+// not matching what is expected. The expected line type should be
+// provided as `line_type`.
+static bool ParseFailedExpectLine(absl::string_view message,
+ size_t line_start,
+ const char line_type,
+ absl::string_view line_value,
+ SdpParseError* error) {
+ rtc::StringBuilder description;
+ description << "Expect line: " << std::string(1, line_type) << "="
+ << line_value;
+ return ParseFailed(message, line_start, description.Release(), error);
+}
+
+static bool AddLine(absl::string_view line, std::string* message) {
+ if (!message)
+ return false;
+
+ message->append(line.data(), line.size());
+ message->append(kLineBreak);
+ return true;
+}
+
+// Trim return character, if any.
+static absl::string_view TrimReturnChar(absl::string_view line) {
+ if (!line.empty() && line.back() == kReturnChar) {
+ line.remove_suffix(1);
+ }
+ return line;
+}
+
+// Gets line of `message` starting at `pos`, and checks overall SDP syntax. On
+// success, advances `pos` to the next line.
+static absl::optional<absl::string_view> GetLine(absl::string_view message,
+ size_t* pos) {
+ size_t line_end = message.find(kNewLineChar, *pos);
+ if (line_end == absl::string_view::npos) {
+ return absl::nullopt;
+ }
+ absl::string_view line =
+ TrimReturnChar(message.substr(*pos, line_end - *pos));
+
+ // RFC 4566
+ // An SDP session description consists of a number of lines of text of
+ // the form:
+ // <type>=<value>
+ // where <type> MUST be exactly one case-significant character and
+ // <value> is structured text whose format depends on <type>.
+ // Whitespace MUST NOT be used on either side of the "=" sign.
+ //
+ // However, an exception to the whitespace rule is made for "s=", since
+ // RFC4566 also says:
+ //
+ // If a session has no meaningful name, the value "s= " SHOULD be used
+ // (i.e., a single space as the session name).
+ if (line.length() < 3 || !islower(static_cast<unsigned char>(line[0])) ||
+ line[1] != kSdpDelimiterEqualChar ||
+ (line[0] != kLineTypeSessionName && line[2] == kSdpDelimiterSpaceChar)) {
+ return absl::nullopt;
+ }
+ *pos = line_end + 1;
+ return line;
+}
+
+// Init `os` to "`type`=`value`".
+static void InitLine(const char type,
+ absl::string_view value,
+ rtc::StringBuilder* os) {
+ os->Clear();
+ *os << std::string(1, type) << kSdpDelimiterEqual << value;
+}
+
+// Init `os` to "a=`attribute`".
+static void InitAttrLine(absl::string_view attribute, rtc::StringBuilder* os) {
+ InitLine(kLineTypeAttributes, attribute, os);
+}
+
+// Writes a SDP attribute line based on `attribute` and `value` to `message`.
+static void AddAttributeLine(absl::string_view attribute,
+ int value,
+ std::string* message) {
+ rtc::StringBuilder os;
+ InitAttrLine(attribute, &os);
+ os << kSdpDelimiterColon << value;
+ AddLine(os.str(), message);
+}
+
+static bool IsLineType(absl::string_view message,
+ const char type,
+ size_t line_start) {
+ if (message.size() < line_start + kLinePrefixLength) {
+ return false;
+ }
+ return (message[line_start] == type &&
+ message[line_start + 1] == kSdpDelimiterEqualChar);
+}
+
+static bool IsLineType(absl::string_view line, const char type) {
+ return IsLineType(line, type, 0);
+}
+
+static absl::optional<absl::string_view>
+GetLineWithType(absl::string_view message, size_t* pos, const char type) {
+ if (IsLineType(message, type, *pos)) {
+ return GetLine(message, pos);
+ }
+ return absl::nullopt;
+}
+
+static bool HasAttribute(absl::string_view line, absl::string_view attribute) {
+ if (line.compare(kLinePrefixLength, attribute.size(), attribute) == 0) {
+ // Make sure that the match is not only a partial match. If length of
+ // strings doesn't match, the next character of the line must be ':' or ' '.
+ // This function is also used for media descriptions (e.g., "m=audio 9..."),
+ // hence the need to also allow space in the end.
+ RTC_CHECK_LE(kLinePrefixLength + attribute.size(), line.size());
+ if ((kLinePrefixLength + attribute.size()) == line.size() ||
+ line[kLinePrefixLength + attribute.size()] == kSdpDelimiterColonChar ||
+ line[kLinePrefixLength + attribute.size()] == kSdpDelimiterSpaceChar) {
+ return true;
+ }
+ }
+ return false;
+}
+
+static bool AddSsrcLine(uint32_t ssrc_id,
+ absl::string_view attribute,
+ absl::string_view value,
+ std::string* message) {
+ // RFC 5576
+ // a=ssrc:<ssrc-id> <attribute>:<value>
+ rtc::StringBuilder os;
+ InitAttrLine(kAttributeSsrc, &os);
+ os << kSdpDelimiterColon << ssrc_id << kSdpDelimiterSpace << attribute
+ << kSdpDelimiterColon << value;
+ return AddLine(os.str(), message);
+}
+
+// Get value only from <attribute>:<value>.
+static bool GetValue(absl::string_view message,
+ absl::string_view attribute,
+ std::string* value,
+ SdpParseError* error) {
+ std::string leftpart;
+ if (!rtc::tokenize_first(message, kSdpDelimiterColonChar, &leftpart, value)) {
+ return ParseFailedGetValue(message, attribute, error);
+ }
+ // The left part should end with the expected attribute.
+ if (leftpart.length() < attribute.length() ||
+ absl::string_view(leftpart).compare(
+ leftpart.length() - attribute.length(), attribute.length(),
+ attribute) != 0) {
+ return ParseFailedGetValue(message, attribute, error);
+ }
+ return true;
+}
+
+// Get a single [token] from <attribute>:<token>
+static bool GetSingleTokenValue(absl::string_view message,
+ absl::string_view attribute,
+ std::string* value,
+ SdpParseError* error) {
+ if (!GetValue(message, attribute, value, error)) {
+ return false;
+ }
+ if (!absl::c_all_of(absl::string_view(*value), IsTokenChar)) {
+ rtc::StringBuilder description;
+ description << "Illegal character found in the value of " << attribute;
+ return ParseFailed(message, description.Release(), error);
+ }
+ return true;
+}
+
+static bool CaseInsensitiveFind(std::string str1, std::string str2) {
+ absl::c_transform(str1, str1.begin(), ::tolower);
+ absl::c_transform(str2, str2.begin(), ::tolower);
+ return str1.find(str2) != std::string::npos;
+}
+
+template <class T>
+static bool GetValueFromString(absl::string_view line,
+ absl::string_view s,
+ T* t,
+ SdpParseError* error) {
+ if (!rtc::FromString(s, t)) {
+ rtc::StringBuilder description;
+ description << "Invalid value: " << s << ".";
+ return ParseFailed(line, description.Release(), error);
+ }
+ return true;
+}
+
+static bool GetPayloadTypeFromString(absl::string_view line,
+ absl::string_view s,
+ int* payload_type,
+ SdpParseError* error) {
+ return GetValueFromString(line, s, payload_type, error) &&
+ cricket::IsValidRtpPayloadType(*payload_type);
+}
+
+// Creates a StreamParams track in the case when no SSRC lines are signaled.
+// This is a track that does not contain SSRCs and only contains
+// stream_ids/track_id if it's signaled with a=msid lines.
+void CreateTrackWithNoSsrcs(const std::vector<std::string>& msid_stream_ids,
+ absl::string_view msid_track_id,
+ const std::vector<RidDescription>& rids,
+ StreamParamsVec* tracks) {
+ StreamParams track;
+ if (msid_track_id.empty() && rids.empty()) {
+ // We only create an unsignaled track if a=msid lines were signaled.
+ RTC_LOG(LS_INFO) << "MSID not signaled, skipping creation of StreamParams";
+ return;
+ }
+ track.set_stream_ids(msid_stream_ids);
+ track.id = std::string(msid_track_id);
+ track.set_rids(rids);
+ tracks->push_back(track);
+}
+
+// Creates the StreamParams tracks, for the case when SSRC lines are signaled.
+// `msid_stream_ids` and `msid_track_id` represent the stream/track ID from the
+// "a=msid" attribute, if it exists. They are empty if the attribute does not
+// exist. We prioritize getting stream_ids/track_ids signaled in a=msid lines.
+void CreateTracksFromSsrcInfos(const SsrcInfoVec& ssrc_infos,
+ const std::vector<std::string>& msid_stream_ids,
+ absl::string_view msid_track_id,
+ StreamParamsVec* tracks,
+ int msid_signaling) {
+ RTC_DCHECK(tracks);
+ for (const SsrcInfo& ssrc_info : ssrc_infos) {
+ // According to https://tools.ietf.org/html/rfc5576#section-6.1, the CNAME
+ // attribute is mandatory, but we relax that restriction.
+ if (ssrc_info.cname.empty()) {
+ RTC_LOG(LS_WARNING) << "CNAME attribute missing for SSRC "
+ << ssrc_info.ssrc_id;
+ }
+ std::vector<std::string> stream_ids;
+ std::string track_id;
+ if (msid_signaling & cricket::kMsidSignalingMediaSection) {
+ // This is the case with Unified Plan SDP msid signaling.
+ stream_ids = msid_stream_ids;
+ track_id = std::string(msid_track_id);
+ } else if (msid_signaling & cricket::kMsidSignalingSsrcAttribute) {
+ // This is the case with Plan B SDP msid signaling.
+ stream_ids.push_back(ssrc_info.stream_id);
+ track_id = ssrc_info.track_id;
+ } else {
+ // Since no media streams isn't supported with older SDP signaling, we
+ // use a default stream id.
+ stream_ids.push_back(kDefaultMsid);
+ }
+
+ auto track_it = absl::c_find_if(
+ *tracks,
+ [track_id](const StreamParams& track) { return track.id == track_id; });
+ if (track_it == tracks->end()) {
+ // If we don't find an existing track, create a new one.
+ tracks->push_back(StreamParams());
+ track_it = tracks->end() - 1;
+ }
+ StreamParams& track = *track_it;
+ track.add_ssrc(ssrc_info.ssrc_id);
+ track.cname = ssrc_info.cname;
+ track.set_stream_ids(stream_ids);
+ track.id = track_id;
+ }
+ for (StreamParams& stream : *tracks) {
+ // If a track ID wasn't populated from the SSRC attributes OR the
+ // msid attribute, use default/random values. This happens after
+ // deduplication.
+ if (stream.id.empty()) {
+ stream.id = rtc::CreateRandomString(8);
+ }
+ }
+}
+
+void GetMediaStreamIds(const ContentInfo* content,
+ std::set<std::string>* labels) {
+ for (const StreamParams& stream_params :
+ content->media_description()->streams()) {
+ for (const std::string& stream_id : stream_params.stream_ids()) {
+ labels->insert(stream_id);
+ }
+ }
+}
+
+// RFC 5245
+// It is RECOMMENDED that default candidates be chosen based on the
+// likelihood of those candidates to work with the peer that is being
+// contacted. It is RECOMMENDED that relayed > reflexive > host.
+static const int kPreferenceUnknown = 0;
+static const int kPreferenceHost = 1;
+static const int kPreferenceReflexive = 2;
+static const int kPreferenceRelayed = 3;
+
+static int GetCandidatePreferenceFromType(absl::string_view type) {
+ int preference = kPreferenceUnknown;
+ if (type == cricket::LOCAL_PORT_TYPE) {
+ preference = kPreferenceHost;
+ } else if (type == cricket::STUN_PORT_TYPE) {
+ preference = kPreferenceReflexive;
+ } else if (type == cricket::RELAY_PORT_TYPE) {
+ preference = kPreferenceRelayed;
+ } else {
+ RTC_DCHECK_NOTREACHED();
+ }
+ return preference;
+}
+
+// Get ip and port of the default destination from the `candidates` with the
+// given value of `component_id`. The default candidate should be the one most
+// likely to work, typically IPv4 relay.
+// RFC 5245
+// The value of `component_id` currently supported are 1 (RTP) and 2 (RTCP).
+// TODO(deadbeef): Decide the default destination in webrtcsession and
+// pass it down via SessionDescription.
+static void GetDefaultDestination(const std::vector<Candidate>& candidates,
+ int component_id,
+ std::string* port,
+ std::string* ip,
+ std::string* addr_type) {
+ *addr_type = kConnectionIpv4Addrtype;
+ *port = kDummyPort;
+ *ip = kDummyAddress;
+ int current_preference = kPreferenceUnknown;
+ int current_family = AF_UNSPEC;
+ for (const Candidate& candidate : candidates) {
+ if (candidate.component() != component_id) {
+ continue;
+ }
+ // Default destination should be UDP only.
+ if (candidate.protocol() != cricket::UDP_PROTOCOL_NAME) {
+ continue;
+ }
+ const int preference = GetCandidatePreferenceFromType(candidate.type());
+ const int family = candidate.address().ipaddr().family();
+ // See if this candidate is more preferable then the current one if it's the
+ // same family. Or if the current family is IPv4 already so we could safely
+ // ignore all IPv6 ones. WebRTC bug 4269.
+ // http://code.google.com/p/webrtc/issues/detail?id=4269
+ if ((preference <= current_preference && current_family == family) ||
+ (current_family == AF_INET && family == AF_INET6)) {
+ continue;
+ }
+ if (family == AF_INET) {
+ addr_type->assign(kConnectionIpv4Addrtype);
+ } else if (family == AF_INET6) {
+ addr_type->assign(kConnectionIpv6Addrtype);
+ }
+ current_preference = preference;
+ current_family = family;
+ *port = candidate.address().PortAsString();
+ *ip = candidate.address().ipaddr().ToString();
+ }
+}
+
+// Gets "a=rtcp" line if found default RTCP candidate from `candidates`.
+static std::string GetRtcpLine(const std::vector<Candidate>& candidates) {
+ std::string rtcp_line, rtcp_port, rtcp_ip, addr_type;
+ GetDefaultDestination(candidates, ICE_CANDIDATE_COMPONENT_RTCP, &rtcp_port,
+ &rtcp_ip, &addr_type);
+ // Found default RTCP candidate.
+ // RFC 5245
+ // If the agent is utilizing RTCP, it MUST encode the RTCP candidate
+ // using the a=rtcp attribute as defined in RFC 3605.
+
+ // RFC 3605
+ // rtcp-attribute = "a=rtcp:" port [nettype space addrtype space
+ // connection-address] CRLF
+ rtc::StringBuilder os;
+ InitAttrLine(kAttributeRtcp, &os);
+ os << kSdpDelimiterColon << rtcp_port << " " << kConnectionNettype << " "
+ << addr_type << " " << rtcp_ip;
+ rtcp_line = os.str();
+ return rtcp_line;
+}
+
+// Get candidates according to the mline index from SessionDescriptionInterface.
+static void GetCandidatesByMindex(const SessionDescriptionInterface& desci,
+ int mline_index,
+ std::vector<Candidate>* candidates) {
+ if (!candidates) {
+ return;
+ }
+ const IceCandidateCollection* cc = desci.candidates(mline_index);
+ for (size_t i = 0; i < cc->count(); ++i) {
+ const IceCandidateInterface* candidate = cc->at(i);
+ candidates->push_back(candidate->candidate());
+ }
+}
+
+static bool IsValidPort(int port) {
+ return port >= 0 && port <= 65535;
+}
+
+std::string SdpSerialize(const JsepSessionDescription& jdesc) {
+ const cricket::SessionDescription* desc = jdesc.description();
+ if (!desc) {
+ return "";
+ }
+
+ std::string message;
+
+ // Session Description.
+ AddLine(kSessionVersion, &message);
+ // Session Origin
+ // RFC 4566
+ // o=<username> <sess-id> <sess-version> <nettype> <addrtype>
+ // <unicast-address>
+ rtc::StringBuilder os;
+ InitLine(kLineTypeOrigin, kSessionOriginUsername, &os);
+ const std::string& session_id =
+ jdesc.session_id().empty() ? kSessionOriginSessionId : jdesc.session_id();
+ const std::string& session_version = jdesc.session_version().empty()
+ ? kSessionOriginSessionVersion
+ : jdesc.session_version();
+ os << " " << session_id << " " << session_version << " "
+ << kSessionOriginNettype << " " << kSessionOriginAddrtype << " "
+ << kSessionOriginAddress;
+ AddLine(os.str(), &message);
+ AddLine(kSessionName, &message);
+
+ // Time Description.
+ AddLine(kTimeDescription, &message);
+
+ // BUNDLE Groups
+ std::vector<const cricket::ContentGroup*> groups =
+ desc->GetGroupsByName(cricket::GROUP_TYPE_BUNDLE);
+ for (const cricket::ContentGroup* group : groups) {
+ std::string group_line = kAttrGroup;
+ RTC_DCHECK(group != NULL);
+ for (const std::string& content_name : group->content_names()) {
+ group_line.append(" ");
+ group_line.append(content_name);
+ }
+ AddLine(group_line, &message);
+ }
+
+ // Mixed one- and two-byte header extension.
+ if (desc->extmap_allow_mixed()) {
+ InitAttrLine(kAttributeExtmapAllowMixed, &os);
+ AddLine(os.str(), &message);
+ }
+
+ // MediaStream semantics
+ InitAttrLine(kAttributeMsidSemantics, &os);
+ os << kSdpDelimiterColon << " " << kMediaStreamSemantic;
+
+ std::set<std::string> media_stream_ids;
+ const ContentInfo* audio_content = GetFirstAudioContent(desc);
+ if (audio_content)
+ GetMediaStreamIds(audio_content, &media_stream_ids);
+
+ const ContentInfo* video_content = GetFirstVideoContent(desc);
+ if (video_content)
+ GetMediaStreamIds(video_content, &media_stream_ids);
+
+ for (const std::string& id : media_stream_ids) {
+ os << " " << id;
+ }
+ AddLine(os.str(), &message);
+
+ // a=ice-lite
+ //
+ // TODO(deadbeef): It's weird that we need to iterate TransportInfos for
+ // this, when it's a session-level attribute. It really should be moved to a
+ // session-level structure like SessionDescription.
+ for (const cricket::TransportInfo& transport : desc->transport_infos()) {
+ if (transport.description.ice_mode == cricket::ICEMODE_LITE) {
+ InitAttrLine(kAttributeIceLite, &os);
+ AddLine(os.str(), &message);
+ break;
+ }
+ }
+
+ // Preserve the order of the media contents.
+ int mline_index = -1;
+ for (const ContentInfo& content : desc->contents()) {
+ std::vector<Candidate> candidates;
+ GetCandidatesByMindex(jdesc, ++mline_index, &candidates);
+ BuildMediaDescription(&content, desc->GetTransportInfoByName(content.name),
+ content.media_description()->type(), candidates,
+ desc->msid_signaling(), &message);
+ }
+ return message;
+}
+
+// Serializes the passed in IceCandidateInterface to a SDP string.
+// candidate - The candidate to be serialized.
+std::string SdpSerializeCandidate(const IceCandidateInterface& candidate) {
+ return SdpSerializeCandidate(candidate.candidate());
+}
+
+// Serializes a cricket Candidate.
+std::string SdpSerializeCandidate(const cricket::Candidate& candidate) {
+ std::string message;
+ std::vector<cricket::Candidate> candidates(1, candidate);
+ BuildCandidate(candidates, true, &message);
+ // From WebRTC draft section 4.8.1.1 candidate-attribute will be
+ // just candidate:<candidate> not a=candidate:<blah>CRLF
+ RTC_DCHECK(message.find("a=") == 0);
+ message.erase(0, 2);
+ RTC_DCHECK(message.find(kLineBreak) == message.size() - 2);
+ message.resize(message.size() - 2);
+ return message;
+}
+
+bool SdpDeserialize(absl::string_view message,
+ JsepSessionDescription* jdesc,
+ SdpParseError* error) {
+ std::string session_id;
+ std::string session_version;
+ TransportDescription session_td("", "");
+ RtpHeaderExtensions session_extmaps;
+ rtc::SocketAddress session_connection_addr;
+ auto desc = std::make_unique<cricket::SessionDescription>();
+ size_t current_pos = 0;
+
+ // Session Description
+ if (!ParseSessionDescription(message, &current_pos, &session_id,
+ &session_version, &session_td, &session_extmaps,
+ &session_connection_addr, desc.get(), error)) {
+ return false;
+ }
+
+ // Media Description
+ std::vector<std::unique_ptr<JsepIceCandidate>> candidates;
+ if (!ParseMediaDescription(message, session_td, session_extmaps, &current_pos,
+ session_connection_addr, desc.get(), &candidates,
+ error)) {
+ return false;
+ }
+
+ jdesc->Initialize(std::move(desc), session_id, session_version);
+
+ for (const auto& candidate : candidates) {
+ jdesc->AddCandidate(candidate.get());
+ }
+ return true;
+}
+
+bool SdpDeserializeCandidate(absl::string_view message,
+ JsepIceCandidate* jcandidate,
+ SdpParseError* error) {
+ RTC_DCHECK(jcandidate != NULL);
+ Candidate candidate;
+ if (!ParseCandidate(message, &candidate, error, true)) {
+ return false;
+ }
+ jcandidate->SetCandidate(candidate);
+ return true;
+}
+
+bool SdpDeserializeCandidate(absl::string_view transport_name,
+ absl::string_view message,
+ cricket::Candidate* candidate,
+ SdpParseError* error) {
+ RTC_DCHECK(candidate != nullptr);
+ if (!ParseCandidate(message, candidate, error, true)) {
+ return false;
+ }
+ candidate->set_transport_name(transport_name);
+ return true;
+}
+
+bool ParseCandidate(absl::string_view message,
+ Candidate* candidate,
+ SdpParseError* error,
+ bool is_raw) {
+ RTC_DCHECK(candidate != NULL);
+
+ // Makes sure `message` contains only one line.
+ absl::string_view first_line;
+
+ size_t line_end = message.find(kNewLineChar);
+ if (line_end == absl::string_view::npos) {
+ first_line = message;
+ } else if (line_end + 1 == message.size()) {
+ first_line = message.substr(0, line_end);
+ } else {
+ return ParseFailed(message, 0, "Expect one line only", error);
+ }
+
+ // Trim return char, if any.
+ first_line = TrimReturnChar(first_line);
+
+ // From WebRTC draft section 4.8.1.1 candidate-attribute should be
+ // candidate:<candidate> when trickled, but we still support
+ // a=candidate:<blah>CRLF for backward compatibility and for parsing a line
+ // from the SDP.
+ if (IsLineType(first_line, kLineTypeAttributes)) {
+ first_line = first_line.substr(kLinePrefixLength);
+ }
+
+ std::string attribute_candidate;
+ std::string candidate_value;
+
+ // `first_line` must be in the form of "candidate:<value>".
+ if (!rtc::tokenize_first(first_line, kSdpDelimiterColonChar,
+ &attribute_candidate, &candidate_value) ||
+ attribute_candidate != kAttributeCandidate) {
+ if (is_raw) {
+ rtc::StringBuilder description;
+ description << "Expect line: " << kAttributeCandidate
+ << ":"
+ "<candidate-str>";
+ return ParseFailed(first_line, 0, description.Release(), error);
+ } else {
+ return ParseFailedExpectLine(first_line, 0, kLineTypeAttributes,
+ kAttributeCandidate, error);
+ }
+ }
+
+ std::vector<absl::string_view> fields =
+ rtc::split(candidate_value, kSdpDelimiterSpaceChar);
+
+ // RFC 5245
+ // a=candidate:<foundation> <component-id> <transport> <priority>
+ // <connection-address> <port> typ <candidate-types>
+ // [raddr <connection-address>] [rport <port>]
+ // *(SP extension-att-name SP extension-att-value)
+ const size_t expected_min_fields = 8;
+ if (fields.size() < expected_min_fields ||
+ (fields[6] != kAttributeCandidateTyp)) {
+ return ParseFailedExpectMinFieldNum(first_line, expected_min_fields, error);
+ }
+ const absl::string_view foundation = fields[0];
+
+ int component_id = 0;
+ if (!GetValueFromString(first_line, fields[1], &component_id, error)) {
+ return false;
+ }
+ const absl::string_view transport = fields[2];
+ uint32_t priority = 0;
+ if (!GetValueFromString(first_line, fields[3], &priority, error)) {
+ return false;
+ }
+ const absl::string_view connection_address = fields[4];
+ int port = 0;
+ if (!GetValueFromString(first_line, fields[5], &port, error)) {
+ return false;
+ }
+ if (!IsValidPort(port)) {
+ return ParseFailed(first_line, "Invalid port number.", error);
+ }
+ SocketAddress address(connection_address, port);
+
+ absl::optional<cricket::ProtocolType> protocol =
+ cricket::StringToProto(transport);
+ if (!protocol) {
+ return ParseFailed(first_line, "Unsupported transport type.", error);
+ }
+ bool tcp_protocol = false;
+ switch (*protocol) {
+ // Supported protocols.
+ case cricket::PROTO_UDP:
+ break;
+ case cricket::PROTO_TCP:
+ case cricket::PROTO_SSLTCP:
+ tcp_protocol = true;
+ break;
+ default:
+ return ParseFailed(first_line, "Unsupported transport type.", error);
+ }
+
+ std::string candidate_type;
+ const absl::string_view type = fields[7];
+ if (type == kCandidateHost) {
+ candidate_type = cricket::LOCAL_PORT_TYPE;
+ } else if (type == kCandidateSrflx) {
+ candidate_type = cricket::STUN_PORT_TYPE;
+ } else if (type == kCandidateRelay) {
+ candidate_type = cricket::RELAY_PORT_TYPE;
+ } else if (type == kCandidatePrflx) {
+ candidate_type = cricket::PRFLX_PORT_TYPE;
+ } else {
+ return ParseFailed(first_line, "Unsupported candidate type.", error);
+ }
+
+ size_t current_position = expected_min_fields;
+ SocketAddress related_address;
+ // The 2 optional fields for related address
+ // [raddr <connection-address>] [rport <port>]
+ if (fields.size() >= (current_position + 2) &&
+ fields[current_position] == kAttributeCandidateRaddr) {
+ related_address.SetIP(fields[++current_position]);
+ ++current_position;
+ }
+ if (fields.size() >= (current_position + 2) &&
+ fields[current_position] == kAttributeCandidateRport) {
+ int port = 0;
+ if (!GetValueFromString(first_line, fields[++current_position], &port,
+ error)) {
+ return false;
+ }
+ if (!IsValidPort(port)) {
+ return ParseFailed(first_line, "Invalid port number.", error);
+ }
+ related_address.SetPort(port);
+ ++current_position;
+ }
+
+ // If this is a TCP candidate, it has additional extension as defined in
+ // RFC 6544.
+ absl::string_view tcptype;
+ if (fields.size() >= (current_position + 2) &&
+ fields[current_position] == kTcpCandidateType) {
+ tcptype = fields[++current_position];
+ ++current_position;
+
+ if (tcptype != cricket::TCPTYPE_ACTIVE_STR &&
+ tcptype != cricket::TCPTYPE_PASSIVE_STR &&
+ tcptype != cricket::TCPTYPE_SIMOPEN_STR) {
+ return ParseFailed(first_line, "Invalid TCP candidate type.", error);
+ }
+
+ if (!tcp_protocol) {
+ return ParseFailed(first_line, "Invalid non-TCP candidate", error);
+ }
+ } else if (tcp_protocol) {
+ // We allow the tcptype to be missing, for backwards compatibility,
+ // treating it as a passive candidate.
+ // TODO(bugs.webrtc.org/11466): Treat a missing tcptype as an error?
+ tcptype = cricket::TCPTYPE_PASSIVE_STR;
+ }
+
+ // Extension
+ // Though non-standard, we support the ICE ufrag and pwd being signaled on
+ // the candidate to avoid issues with confusing which generation a candidate
+ // belongs to when trickling multiple generations at the same time.
+ absl::string_view username;
+ absl::string_view password;
+ uint32_t generation = 0;
+ uint16_t network_id = 0;
+ uint16_t network_cost = 0;
+ for (size_t i = current_position; i + 1 < fields.size(); ++i) {
+ // RFC 5245
+ // *(SP extension-att-name SP extension-att-value)
+ if (fields[i] == kAttributeCandidateGeneration) {
+ if (!GetValueFromString(first_line, fields[++i], &generation, error)) {
+ return false;
+ }
+ } else if (fields[i] == kAttributeCandidateUfrag) {
+ username = fields[++i];
+ } else if (fields[i] == kAttributeCandidatePwd) {
+ password = fields[++i];
+ } else if (fields[i] == kAttributeCandidateNetworkId) {
+ if (!GetValueFromString(first_line, fields[++i], &network_id, error)) {
+ return false;
+ }
+ } else if (fields[i] == kAttributeCandidateNetworkCost) {
+ if (!GetValueFromString(first_line, fields[++i], &network_cost, error)) {
+ return false;
+ }
+ network_cost = std::min(network_cost, rtc::kNetworkCostMax);
+ } else {
+ // Skip the unknown extension.
+ ++i;
+ }
+ }
+
+ *candidate = Candidate(component_id, cricket::ProtoToString(*protocol),
+ address, priority, username, password, candidate_type,
+ generation, foundation, network_id, network_cost);
+ candidate->set_related_address(related_address);
+ candidate->set_tcptype(tcptype);
+ return true;
+}
+
+bool ParseIceOptions(absl::string_view line,
+ std::vector<std::string>* transport_options,
+ SdpParseError* error) {
+ std::string ice_options;
+ if (!GetValue(line, kAttributeIceOption, &ice_options, error)) {
+ return false;
+ }
+ std::vector<absl::string_view> fields =
+ rtc::split(ice_options, kSdpDelimiterSpaceChar);
+ for (size_t i = 0; i < fields.size(); ++i) {
+ transport_options->emplace_back(fields[i]);
+ }
+ return true;
+}
+
+bool ParseSctpPort(absl::string_view line,
+ int* sctp_port,
+ SdpParseError* error) {
+ // draft-ietf-mmusic-sctp-sdp-26
+ // a=sctp-port
+ const size_t expected_min_fields = 2;
+ std::vector<absl::string_view> fields =
+ rtc::split(line.substr(kLinePrefixLength), kSdpDelimiterColonChar);
+ if (fields.size() < expected_min_fields) {
+ fields = rtc::split(line.substr(kLinePrefixLength), kSdpDelimiterSpaceChar);
+ }
+ if (fields.size() < expected_min_fields) {
+ return ParseFailedExpectMinFieldNum(line, expected_min_fields, error);
+ }
+ if (!rtc::FromString(fields[1], sctp_port)) {
+ return ParseFailed(line, "Invalid sctp port value.", error);
+ }
+ return true;
+}
+
+bool ParseSctpMaxMessageSize(absl::string_view line,
+ int* max_message_size,
+ SdpParseError* error) {
+ // draft-ietf-mmusic-sctp-sdp-26
+ // a=max-message-size:199999
+ const size_t expected_min_fields = 2;
+ std::vector<absl::string_view> fields =
+ rtc::split(line.substr(kLinePrefixLength), kSdpDelimiterColonChar);
+ if (fields.size() < expected_min_fields) {
+ return ParseFailedExpectMinFieldNum(line, expected_min_fields, error);
+ }
+ if (!rtc::FromString(fields[1], max_message_size)) {
+ return ParseFailed(line, "Invalid SCTP max message size.", error);
+ }
+ return true;
+}
+
+bool ParseExtmap(absl::string_view line,
+ RtpExtension* extmap,
+ SdpParseError* error) {
+ // RFC 5285
+ // a=extmap:<value>["/"<direction>] <URI> <extensionattributes>
+ std::vector<absl::string_view> fields =
+ rtc::split(line.substr(kLinePrefixLength), kSdpDelimiterSpaceChar);
+ const size_t expected_min_fields = 2;
+ if (fields.size() < expected_min_fields) {
+ return ParseFailedExpectMinFieldNum(line, expected_min_fields, error);
+ }
+ absl::string_view uri = fields[1];
+
+ std::string value_direction;
+ if (!GetValue(fields[0], kAttributeExtmap, &value_direction, error)) {
+ return false;
+ }
+ std::vector<absl::string_view> sub_fields =
+ rtc::split(value_direction, kSdpDelimiterSlashChar);
+ int value = 0;
+ if (!GetValueFromString(line, sub_fields[0], &value, error)) {
+ return false;
+ }
+
+ bool encrypted = false;
+ if (uri == RtpExtension::kEncryptHeaderExtensionsUri) {
+ // RFC 6904
+ // a=extmap:<value["/"<direction>] urn:ietf:params:rtp-hdrext:encrypt <URI>
+ // <extensionattributes>
+ const size_t expected_min_fields_encrypted = expected_min_fields + 1;
+ if (fields.size() < expected_min_fields_encrypted) {
+ return ParseFailedExpectMinFieldNum(line, expected_min_fields_encrypted,
+ error);
+ }
+
+ encrypted = true;
+ uri = fields[2];
+ if (uri == RtpExtension::kEncryptHeaderExtensionsUri) {
+ return ParseFailed(line, "Recursive encrypted header.", error);
+ }
+ }
+
+ *extmap = RtpExtension(uri, value, encrypted);
+ return true;
+}
+
+static void BuildSctpContentAttributes(
+ std::string* message,
+ const cricket::SctpDataContentDescription* data_desc) {
+ rtc::StringBuilder os;
+ if (data_desc->use_sctpmap()) {
+ // draft-ietf-mmusic-sctp-sdp-04
+ // a=sctpmap:sctpmap-number protocol [streams]
+ rtc::StringBuilder os;
+ InitAttrLine(kAttributeSctpmap, &os);
+ os << kSdpDelimiterColon << data_desc->port() << kSdpDelimiterSpace
+ << kDefaultSctpmapProtocol << kSdpDelimiterSpace
+ << cricket::kMaxSctpStreams;
+ AddLine(os.str(), message);
+ } else {
+ // draft-ietf-mmusic-sctp-sdp-23
+ // a=sctp-port:<port>
+ InitAttrLine(kAttributeSctpPort, &os);
+ os << kSdpDelimiterColon << data_desc->port();
+ AddLine(os.str(), message);
+ if (data_desc->max_message_size() != kDefaultSctpMaxMessageSize) {
+ InitAttrLine(kAttributeMaxMessageSize, &os);
+ os << kSdpDelimiterColon << data_desc->max_message_size();
+ AddLine(os.str(), message);
+ }
+ }
+}
+
+void BuildIceUfragPwd(const TransportInfo* transport_info,
+ std::string* message) {
+ RTC_DCHECK(transport_info);
+
+ rtc::StringBuilder os;
+ // RFC 5245
+ // ice-pwd-att = "ice-pwd" ":" password
+ // ice-ufrag-att = "ice-ufrag" ":" ufrag
+ // ice-ufrag
+ if (!transport_info->description.ice_ufrag.empty()) {
+ InitAttrLine(kAttributeIceUfrag, &os);
+ os << kSdpDelimiterColon << transport_info->description.ice_ufrag;
+ AddLine(os.str(), message);
+ }
+ // ice-pwd
+ if (!transport_info->description.ice_pwd.empty()) {
+ InitAttrLine(kAttributeIcePwd, &os);
+ os << kSdpDelimiterColon << transport_info->description.ice_pwd;
+ AddLine(os.str(), message);
+ }
+}
+
+void BuildDtlsFingerprintSetup(const TransportInfo* transport_info,
+ std::string* message) {
+ RTC_DCHECK(transport_info);
+
+ rtc::StringBuilder os;
+ // RFC 4572
+ // fingerprint-attribute =
+ // "fingerprint" ":" hash-func SP fingerprint
+ // When using max-bundle this is already included at session level.
+ // Insert the fingerprint attribute.
+ auto fingerprint = transport_info->description.identity_fingerprint.get();
+ if (!fingerprint) {
+ return;
+ }
+ InitAttrLine(kAttributeFingerprint, &os);
+ os << kSdpDelimiterColon << fingerprint->algorithm << kSdpDelimiterSpace
+ << fingerprint->GetRfc4572Fingerprint();
+ AddLine(os.str(), message);
+
+ // Inserting setup attribute.
+ if (transport_info->description.connection_role !=
+ cricket::CONNECTIONROLE_NONE) {
+ // Making sure we are not using "passive" mode.
+ cricket::ConnectionRole role = transport_info->description.connection_role;
+ std::string dtls_role_str;
+ const bool success = cricket::ConnectionRoleToString(role, &dtls_role_str);
+ RTC_DCHECK(success);
+ InitAttrLine(kAttributeSetup, &os);
+ os << kSdpDelimiterColon << dtls_role_str;
+ AddLine(os.str(), message);
+ }
+}
+
+void BuildMediaLine(const cricket::MediaType media_type,
+ const ContentInfo* content_info,
+ const MediaContentDescription* media_desc,
+ std::string* message) {
+ rtc::StringBuilder os;
+
+ // RFC 4566
+ // m=<media> <port> <proto> <fmt>
+ // fmt is a list of payload type numbers that MAY be used in the session.
+ std::string type;
+ std::string fmt;
+ if (media_type == cricket::MEDIA_TYPE_AUDIO ||
+ media_type == cricket::MEDIA_TYPE_VIDEO) {
+ type = media_type == cricket::MEDIA_TYPE_AUDIO ? kMediaTypeAudio
+ : kMediaTypeVideo;
+ for (const cricket::Codec& codec : media_desc->codecs()) {
+ fmt.append(" ");
+ fmt.append(rtc::ToString(codec.id));
+ }
+ } else if (media_type == cricket::MEDIA_TYPE_DATA) {
+ type = kMediaTypeData;
+ const cricket::SctpDataContentDescription* sctp_data_desc =
+ media_desc->as_sctp();
+ if (sctp_data_desc) {
+ fmt.append(" ");
+
+ if (sctp_data_desc->use_sctpmap()) {
+ fmt.append(rtc::ToString(sctp_data_desc->port()));
+ } else {
+ fmt.append(kDefaultSctpmapProtocol);
+ }
+ } else {
+ RTC_DCHECK_NOTREACHED() << "Data description without SCTP";
+ }
+ } else if (media_type == cricket::MEDIA_TYPE_UNSUPPORTED) {
+ const UnsupportedContentDescription* unsupported_desc =
+ media_desc->as_unsupported();
+ type = unsupported_desc->media_type();
+ } else {
+ RTC_DCHECK_NOTREACHED();
+ }
+ // The fmt must never be empty. If no codecs are found, set the fmt attribute
+ // to 0.
+ if (fmt.empty()) {
+ fmt = " 0";
+ }
+
+ // The port number in the m line will be updated later when associated with
+ // the candidates.
+ //
+ // A port value of 0 indicates that the m= section is rejected.
+ // RFC 3264
+ // To reject an offered stream, the port number in the corresponding stream in
+ // the answer MUST be set to zero.
+ //
+ // However, the BUNDLE draft adds a new meaning to port zero, when used along
+ // with a=bundle-only.
+ std::string port = kDummyPort;
+ if (content_info->rejected || content_info->bundle_only) {
+ port = kMediaPortRejected;
+ } else if (!media_desc->connection_address().IsNil()) {
+ port = rtc::ToString(media_desc->connection_address().port());
+ }
+
+ // Add the m and c lines.
+ InitLine(kLineTypeMedia, type, &os);
+ os << " " << port << " " << media_desc->protocol() << fmt;
+ AddLine(os.str(), message);
+}
+
+void BuildMediaDescription(const ContentInfo* content_info,
+ const TransportInfo* transport_info,
+ const cricket::MediaType media_type,
+ const std::vector<Candidate>& candidates,
+ int msid_signaling,
+ std::string* message) {
+ RTC_DCHECK(message);
+ if (!content_info) {
+ return;
+ }
+ rtc::StringBuilder os;
+ const MediaContentDescription* media_desc = content_info->media_description();
+ RTC_DCHECK(media_desc);
+
+ // Add the m line.
+ BuildMediaLine(media_type, content_info, media_desc, message);
+ // Add the c line.
+ InitLine(kLineTypeConnection, kConnectionNettype, &os);
+ if (media_desc->connection_address().IsNil()) {
+ os << " " << kConnectionIpv4Addrtype << " " << kDummyAddress;
+ } else if (media_desc->connection_address().family() == AF_INET) {
+ os << " " << kConnectionIpv4Addrtype << " "
+ << media_desc->connection_address().ipaddr().ToString();
+ } else if (media_desc->connection_address().family() == AF_INET6) {
+ os << " " << kConnectionIpv6Addrtype << " "
+ << media_desc->connection_address().ipaddr().ToString();
+ } else {
+ os << " " << kConnectionIpv4Addrtype << " " << kDummyAddress;
+ }
+ AddLine(os.str(), message);
+
+ // RFC 4566
+ // b=AS:<bandwidth> or
+ // b=TIAS:<bandwidth>
+ int bandwidth = media_desc->bandwidth();
+ std::string bandwidth_type = media_desc->bandwidth_type();
+ if (bandwidth_type == kApplicationSpecificBandwidth && bandwidth >= 1000) {
+ InitLine(kLineTypeSessionBandwidth, bandwidth_type, &os);
+ bandwidth /= 1000;
+ os << kSdpDelimiterColon << bandwidth;
+ AddLine(os.str(), message);
+ } else if (bandwidth_type == kTransportSpecificBandwidth && bandwidth > 0) {
+ InitLine(kLineTypeSessionBandwidth, bandwidth_type, &os);
+ os << kSdpDelimiterColon << bandwidth;
+ AddLine(os.str(), message);
+ }
+
+ // Add the a=bundle-only line.
+ if (content_info->bundle_only) {
+ InitAttrLine(kAttributeBundleOnly, &os);
+ AddLine(os.str(), message);
+ }
+
+ // Add the a=rtcp line.
+ if (cricket::IsRtpProtocol(media_desc->protocol())) {
+ std::string rtcp_line = GetRtcpLine(candidates);
+ if (!rtcp_line.empty()) {
+ AddLine(rtcp_line, message);
+ }
+ }
+
+ // Build the a=candidate lines. We don't include ufrag and pwd in the
+ // candidates in the SDP to avoid redundancy.
+ BuildCandidate(candidates, false, message);
+
+ // Use the transport_info to build the media level ice-ufrag, ice-pwd
+ // and DTLS fingerprint and setup attributes.
+ if (transport_info) {
+ BuildIceUfragPwd(transport_info, message);
+
+ // draft-petithuguenin-mmusic-ice-attributes-level-03
+ BuildIceOptions(transport_info->description.transport_options, message);
+
+ // Also include the DTLS fingerprint and setup attribute if available.
+ BuildDtlsFingerprintSetup(transport_info, message);
+ }
+
+ // RFC 3388
+ // mid-attribute = "a=mid:" identification-tag
+ // identification-tag = token
+ // Use the content name as the mid identification-tag.
+ InitAttrLine(kAttributeMid, &os);
+ os << kSdpDelimiterColon << content_info->name;
+ AddLine(os.str(), message);
+
+ if (cricket::IsDtlsSctp(media_desc->protocol())) {
+ const cricket::SctpDataContentDescription* data_desc =
+ media_desc->as_sctp();
+ BuildSctpContentAttributes(message, data_desc);
+ } else if (cricket::IsRtpProtocol(media_desc->protocol())) {
+ BuildRtpContentAttributes(media_desc, media_type, msid_signaling, message);
+ }
+}
+
+void BuildRtpContentAttributes(const MediaContentDescription* media_desc,
+ const cricket::MediaType media_type,
+ int msid_signaling,
+ std::string* message) {
+ SimulcastSdpSerializer serializer;
+ rtc::StringBuilder os;
+ // RFC 8285
+ // a=extmap-allow-mixed
+ // The attribute MUST be either on session level or media level. We support
+ // responding on both levels, however, we don't respond on media level if it's
+ // set on session level.
+ if (media_desc->extmap_allow_mixed_enum() ==
+ MediaContentDescription::kMedia) {
+ InitAttrLine(kAttributeExtmapAllowMixed, &os);
+ AddLine(os.str(), message);
+ }
+ BuildRtpHeaderExtensions(media_desc->rtp_header_extensions(), message);
+
+ // RFC 3264
+ // a=sendrecv || a=sendonly || a=sendrecv || a=inactive
+ switch (media_desc->direction()) {
+ // Special case that for sdp purposes should be treated same as inactive.
+ case RtpTransceiverDirection::kStopped:
+ case RtpTransceiverDirection::kInactive:
+ InitAttrLine(kAttributeInactive, &os);
+ break;
+ case RtpTransceiverDirection::kSendOnly:
+ InitAttrLine(kAttributeSendOnly, &os);
+ break;
+ case RtpTransceiverDirection::kRecvOnly:
+ InitAttrLine(kAttributeRecvOnly, &os);
+ break;
+ case RtpTransceiverDirection::kSendRecv:
+ InitAttrLine(kAttributeSendRecv, &os);
+ break;
+ default:
+ RTC_DCHECK_NOTREACHED();
+ InitAttrLine(kAttributeSendRecv, &os);
+ break;
+ }
+ AddLine(os.str(), message);
+
+ // Specified in https://datatracker.ietf.org/doc/draft-ietf-mmusic-msid/16/
+ // a=msid:<msid-id> <msid-appdata>
+ // The msid-id is a 1*64 token char representing the media stream id, and the
+ // msid-appdata is a 1*64 token char representing the track id. There is a
+ // line for every media stream, with a special msid-id value of "-"
+ // representing no streams. The value of "msid-appdata" MUST be identical for
+ // all lines.
+ if (msid_signaling & cricket::kMsidSignalingMediaSection) {
+ const StreamParamsVec& streams = media_desc->streams();
+ if (streams.size() == 1u) {
+ const StreamParams& track = streams[0];
+ std::vector<std::string> stream_ids = track.stream_ids();
+ if (stream_ids.empty()) {
+ stream_ids.push_back(kNoStreamMsid);
+ }
+ for (const std::string& stream_id : stream_ids) {
+ InitAttrLine(kAttributeMsid, &os);
+ os << kSdpDelimiterColon << stream_id << kSdpDelimiterSpace << track.id;
+ AddLine(os.str(), message);
+ }
+ } else if (streams.size() > 1u) {
+ RTC_LOG(LS_WARNING)
+ << "Trying to serialize Unified Plan SDP with more than "
+ "one track in a media section. Omitting 'a=msid'.";
+ }
+ }
+
+ // RFC 5761
+ // a=rtcp-mux
+ if (media_desc->rtcp_mux()) {
+ InitAttrLine(kAttributeRtcpMux, &os);
+ AddLine(os.str(), message);
+ }
+
+ // RFC 5506
+ // a=rtcp-rsize
+ if (media_desc->rtcp_reduced_size()) {
+ InitAttrLine(kAttributeRtcpReducedSize, &os);
+ AddLine(os.str(), message);
+ }
+
+ if (media_desc->conference_mode()) {
+ InitAttrLine(kAttributeXGoogleFlag, &os);
+ os << kSdpDelimiterColon << kValueConference;
+ AddLine(os.str(), message);
+ }
+
+ if (media_desc->remote_estimate()) {
+ InitAttrLine(kAttributeRtcpRemoteEstimate, &os);
+ AddLine(os.str(), message);
+ }
+
+ // RFC 4568
+ // a=crypto:<tag> <crypto-suite> <key-params> [<session-params>]
+ for (const CryptoParams& crypto_params : media_desc->cryptos()) {
+ InitAttrLine(kAttributeCrypto, &os);
+ os << kSdpDelimiterColon << crypto_params.tag << " "
+ << crypto_params.crypto_suite << " " << crypto_params.key_params;
+ if (!crypto_params.session_params.empty()) {
+ os << " " << crypto_params.session_params;
+ }
+ AddLine(os.str(), message);
+ }
+
+ // RFC 4566
+ // a=rtpmap:<payload type> <encoding name>/<clock rate>
+ // [/<encodingparameters>]
+ BuildRtpmap(media_desc, media_type, message);
+
+ for (const StreamParams& track : media_desc->streams()) {
+ // Build the ssrc-group lines.
+ for (const SsrcGroup& ssrc_group : track.ssrc_groups) {
+ // RFC 5576
+ // a=ssrc-group:<semantics> <ssrc-id> ...
+ if (ssrc_group.ssrcs.empty()) {
+ continue;
+ }
+ InitAttrLine(kAttributeSsrcGroup, &os);
+ os << kSdpDelimiterColon << ssrc_group.semantics;
+ for (uint32_t ssrc : ssrc_group.ssrcs) {
+ os << kSdpDelimiterSpace << rtc::ToString(ssrc);
+ }
+ AddLine(os.str(), message);
+ }
+ // Build the ssrc lines for each ssrc.
+ for (uint32_t ssrc : track.ssrcs) {
+ // RFC 5576
+ // a=ssrc:<ssrc-id> cname:<value>
+ AddSsrcLine(ssrc, kSsrcAttributeCname, track.cname, message);
+
+ if (msid_signaling & cricket::kMsidSignalingSsrcAttribute) {
+ // draft-alvestrand-mmusic-msid-00
+ // a=ssrc:<ssrc-id> msid:identifier [appdata]
+ // The appdata consists of the "id" attribute of a MediaStreamTrack,
+ // which corresponds to the "id" attribute of StreamParams.
+ // Since a=ssrc msid signaling is used in Plan B SDP semantics, and
+ // multiple stream ids are not supported for Plan B, we are only adding
+ // a line for the first media stream id here.
+ const std::string& track_stream_id = track.first_stream_id();
+ // We use a special msid-id value of "-" to represent no streams,
+ // for Unified Plan compatibility. Plan B will always have a
+ // track_stream_id.
+ const std::string& stream_id =
+ track_stream_id.empty() ? kNoStreamMsid : track_stream_id;
+ InitAttrLine(kAttributeSsrc, &os);
+ os << kSdpDelimiterColon << ssrc << kSdpDelimiterSpace
+ << kSsrcAttributeMsid << kSdpDelimiterColon << stream_id
+ << kSdpDelimiterSpace << track.id;
+ AddLine(os.str(), message);
+ }
+ }
+
+ // Build the rid lines for each layer of the track
+ for (const RidDescription& rid_description : track.rids()) {
+ InitAttrLine(kAttributeRid, &os);
+ os << kSdpDelimiterColon
+ << serializer.SerializeRidDescription(rid_description);
+ AddLine(os.str(), message);
+ }
+ }
+
+ for (const RidDescription& rid_description : media_desc->receive_rids()) {
+ InitAttrLine(kAttributeRid, &os);
+ os << kSdpDelimiterColon
+ << serializer.SerializeRidDescription(rid_description);
+ AddLine(os.str(), message);
+ }
+
+ // Simulcast (a=simulcast)
+ // https://tools.ietf.org/html/draft-ietf-mmusic-sdp-simulcast-13#section-5.1
+ if (media_desc->HasSimulcast()) {
+ const auto& simulcast = media_desc->simulcast_description();
+ InitAttrLine(kAttributeSimulcast, &os);
+ os << kSdpDelimiterColon
+ << serializer.SerializeSimulcastDescription(simulcast);
+ AddLine(os.str(), message);
+ }
+}
+
+void BuildRtpHeaderExtensions(const RtpHeaderExtensions& extensions,
+ std::string* message) {
+ rtc::StringBuilder os;
+
+ // RFC 8285
+ // a=extmap:<value>["/"<direction>] <URI> <extensionattributes>
+ // The definitions MUST be either all session level or all media level. This
+ // implementation uses all media level.
+ for (const RtpExtension& extension : extensions) {
+ InitAttrLine(kAttributeExtmap, &os);
+ os << kSdpDelimiterColon << extension.id;
+ if (extension.encrypt) {
+ os << kSdpDelimiterSpace << RtpExtension::kEncryptHeaderExtensionsUri;
+ }
+ os << kSdpDelimiterSpace << extension.uri;
+ AddLine(os.str(), message);
+ }
+}
+
+void WriteFmtpHeader(int payload_type, rtc::StringBuilder* os) {
+ // fmtp header: a=fmtp:`payload_type` <parameters>
+ // Add a=fmtp
+ InitAttrLine(kAttributeFmtp, os);
+ // Add :`payload_type`
+ *os << kSdpDelimiterColon << payload_type;
+}
+
+void WritePacketizationHeader(int payload_type, rtc::StringBuilder* os) {
+ // packetization header: a=packetization:`payload_type` <packetization_format>
+ // Add a=packetization
+ InitAttrLine(kAttributePacketization, os);
+ // Add :`payload_type`
+ *os << kSdpDelimiterColon << payload_type;
+}
+
+void WriteRtcpFbHeader(int payload_type, rtc::StringBuilder* os) {
+ // rtcp-fb header: a=rtcp-fb:`payload_type`
+ // <parameters>/<ccm <ccm_parameters>>
+ // Add a=rtcp-fb
+ InitAttrLine(kAttributeRtcpFb, os);
+ // Add :
+ *os << kSdpDelimiterColon;
+ if (payload_type == kWildcardPayloadType) {
+ *os << "*";
+ } else {
+ *os << payload_type;
+ }
+}
+
+void WriteFmtpParameter(absl::string_view parameter_name,
+ absl::string_view parameter_value,
+ rtc::StringBuilder* os) {
+ if (parameter_name.empty()) {
+ // RFC 2198 and RFC 4733 don't use key-value pairs.
+ *os << parameter_value;
+ } else {
+ // fmtp parameters: `parameter_name`=`parameter_value`
+ *os << parameter_name << kSdpDelimiterEqual << parameter_value;
+ }
+}
+
+bool IsFmtpParam(absl::string_view name) {
+ // RFC 4855, section 3 specifies the mapping of media format parameters to SDP
+ // parameters. Only ptime, maxptime, channels and rate are placed outside of
+ // the fmtp line. In WebRTC, channels and rate are already handled separately
+ // and thus not included in the CodecParameterMap.
+ return name != kCodecParamPTime && name != kCodecParamMaxPTime;
+}
+
+bool WriteFmtpParameters(const cricket::CodecParameterMap& parameters,
+ rtc::StringBuilder* os) {
+ bool empty = true;
+ const char* delimiter = ""; // No delimiter before first parameter.
+ for (const auto& entry : parameters) {
+ const std::string& key = entry.first;
+ const std::string& value = entry.second;
+
+ if (IsFmtpParam(key)) {
+ *os << delimiter;
+ // A semicolon before each subsequent parameter.
+ delimiter = kSdpDelimiterSemicolon;
+ WriteFmtpParameter(key, value, os);
+ empty = false;
+ }
+ }
+
+ return !empty;
+}
+
+void AddFmtpLine(const cricket::Codec& codec, std::string* message) {
+ rtc::StringBuilder os;
+ WriteFmtpHeader(codec.id, &os);
+ os << kSdpDelimiterSpace;
+ // Create FMTP line and check that it's nonempty.
+ if (WriteFmtpParameters(codec.params, &os)) {
+ AddLine(os.str(), message);
+ }
+ return;
+}
+
+void AddPacketizationLine(const cricket::Codec& codec, std::string* message) {
+ if (!codec.packetization) {
+ return;
+ }
+ rtc::StringBuilder os;
+ WritePacketizationHeader(codec.id, &os);
+ os << " " << *codec.packetization;
+ AddLine(os.str(), message);
+}
+
+void AddRtcpFbLines(const cricket::Codec& codec, std::string* message) {
+ for (const cricket::FeedbackParam& param : codec.feedback_params.params()) {
+ rtc::StringBuilder os;
+ WriteRtcpFbHeader(codec.id, &os);
+ os << " " << param.id();
+ if (!param.param().empty()) {
+ os << " " << param.param();
+ }
+ AddLine(os.str(), message);
+ }
+}
+
+bool GetMinValue(const std::vector<int>& values, int* value) {
+ if (values.empty()) {
+ return false;
+ }
+ auto it = absl::c_min_element(values);
+ *value = *it;
+ return true;
+}
+
+bool GetParameter(const std::string& name,
+ const cricket::CodecParameterMap& params,
+ int* value) {
+ std::map<std::string, std::string>::const_iterator found = params.find(name);
+ if (found == params.end()) {
+ return false;
+ }
+ if (!rtc::FromString(found->second, value)) {
+ return false;
+ }
+ return true;
+}
+
+void BuildRtpmap(const MediaContentDescription* media_desc,
+ const cricket::MediaType media_type,
+ std::string* message) {
+ RTC_DCHECK(message != NULL);
+ RTC_DCHECK(media_desc != NULL);
+ rtc::StringBuilder os;
+ if (media_type == cricket::MEDIA_TYPE_VIDEO) {
+ for (const cricket::Codec& codec : media_desc->codecs()) {
+ // RFC 4566
+ // a=rtpmap:<payload type> <encoding name>/<clock rate>
+ // [/<encodingparameters>]
+ if (codec.id != kWildcardPayloadType) {
+ InitAttrLine(kAttributeRtpmap, &os);
+ os << kSdpDelimiterColon << codec.id << " " << codec.name << "/"
+ << cricket::kVideoCodecClockrate;
+ AddLine(os.str(), message);
+ }
+ AddPacketizationLine(codec, message);
+ AddRtcpFbLines(codec, message);
+ AddFmtpLine(codec, message);
+ }
+ } else if (media_type == cricket::MEDIA_TYPE_AUDIO) {
+ std::vector<int> ptimes;
+ std::vector<int> maxptimes;
+ int max_minptime = 0;
+ for (const cricket::Codec& codec : media_desc->codecs()) {
+ RTC_DCHECK(!codec.name.empty());
+ // RFC 4566
+ // a=rtpmap:<payload type> <encoding name>/<clock rate>
+ // [/<encodingparameters>]
+ InitAttrLine(kAttributeRtpmap, &os);
+ os << kSdpDelimiterColon << codec.id << " ";
+ os << codec.name << "/" << codec.clockrate;
+ if (codec.channels != 1) {
+ os << "/" << codec.channels;
+ }
+ AddLine(os.str(), message);
+ AddRtcpFbLines(codec, message);
+ AddFmtpLine(codec, message);
+ int minptime = 0;
+ if (GetParameter(kCodecParamMinPTime, codec.params, &minptime)) {
+ max_minptime = std::max(minptime, max_minptime);
+ }
+ int ptime;
+ if (GetParameter(kCodecParamPTime, codec.params, &ptime)) {
+ ptimes.push_back(ptime);
+ }
+ int maxptime;
+ if (GetParameter(kCodecParamMaxPTime, codec.params, &maxptime)) {
+ maxptimes.push_back(maxptime);
+ }
+ }
+ // Populate the maxptime attribute with the smallest maxptime of all codecs
+ // under the same m-line.
+ int min_maxptime = INT_MAX;
+ if (GetMinValue(maxptimes, &min_maxptime)) {
+ AddAttributeLine(kCodecParamMaxPTime, min_maxptime, message);
+ }
+ RTC_DCHECK_GE(min_maxptime, max_minptime);
+ // Populate the ptime attribute with the smallest ptime or the largest
+ // minptime, whichever is the largest, for all codecs under the same m-line.
+ int ptime = INT_MAX;
+ if (GetMinValue(ptimes, &ptime)) {
+ ptime = std::min(ptime, min_maxptime);
+ ptime = std::max(ptime, max_minptime);
+ AddAttributeLine(kCodecParamPTime, ptime, message);
+ }
+ }
+}
+
+void BuildCandidate(const std::vector<Candidate>& candidates,
+ bool include_ufrag,
+ std::string* message) {
+ rtc::StringBuilder os;
+
+ for (const Candidate& candidate : candidates) {
+ // RFC 5245
+ // a=candidate:<foundation> <component-id> <transport> <priority>
+ // <connection-address> <port> typ <candidate-types>
+ // [raddr <connection-address>] [rport <port>]
+ // *(SP extension-att-name SP extension-att-value)
+ std::string type;
+ // Map the cricket candidate type to "host" / "srflx" / "prflx" / "relay"
+ if (candidate.type() == cricket::LOCAL_PORT_TYPE) {
+ type = kCandidateHost;
+ } else if (candidate.type() == cricket::STUN_PORT_TYPE) {
+ type = kCandidateSrflx;
+ } else if (candidate.type() == cricket::RELAY_PORT_TYPE) {
+ type = kCandidateRelay;
+ } else if (candidate.type() == cricket::PRFLX_PORT_TYPE) {
+ type = kCandidatePrflx;
+ // Peer reflexive candidate may be signaled for being removed.
+ } else {
+ RTC_DCHECK_NOTREACHED();
+ // Never write out candidates if we don't know the type.
+ continue;
+ }
+
+ InitAttrLine(kAttributeCandidate, &os);
+ os << kSdpDelimiterColon << candidate.foundation() << " "
+ << candidate.component() << " " << candidate.protocol() << " "
+ << candidate.priority() << " "
+ << (candidate.address().ipaddr().IsNil()
+ ? candidate.address().hostname()
+ : candidate.address().ipaddr().ToString())
+ << " " << candidate.address().PortAsString() << " "
+ << kAttributeCandidateTyp << " " << type << " ";
+
+ // Related address
+ if (!candidate.related_address().IsNil()) {
+ os << kAttributeCandidateRaddr << " "
+ << candidate.related_address().ipaddr().ToString() << " "
+ << kAttributeCandidateRport << " "
+ << candidate.related_address().PortAsString() << " ";
+ }
+
+ // Note that we allow the tcptype to be missing, for backwards
+ // compatibility; the implementation treats this as a passive candidate.
+ // TODO(bugs.webrtc.org/11466): Treat a missing tcptype as an error?
+ if (candidate.protocol() == cricket::TCP_PROTOCOL_NAME &&
+ !candidate.tcptype().empty()) {
+ os << kTcpCandidateType << " " << candidate.tcptype() << " ";
+ }
+
+ // Extensions
+ os << kAttributeCandidateGeneration << " " << candidate.generation();
+ if (include_ufrag && !candidate.username().empty()) {
+ os << " " << kAttributeCandidateUfrag << " " << candidate.username();
+ }
+ if (candidate.network_id() > 0) {
+ os << " " << kAttributeCandidateNetworkId << " "
+ << candidate.network_id();
+ }
+ if (candidate.network_cost() > 0) {
+ os << " " << kAttributeCandidateNetworkCost << " "
+ << candidate.network_cost();
+ }
+
+ AddLine(os.str(), message);
+ }
+}
+
+void BuildIceOptions(const std::vector<std::string>& transport_options,
+ std::string* message) {
+ if (!transport_options.empty()) {
+ rtc::StringBuilder os;
+ InitAttrLine(kAttributeIceOption, &os);
+ os << kSdpDelimiterColon << transport_options[0];
+ for (size_t i = 1; i < transport_options.size(); ++i) {
+ os << kSdpDelimiterSpace << transport_options[i];
+ }
+ AddLine(os.str(), message);
+ }
+}
+
+bool ParseConnectionData(absl::string_view line,
+ rtc::SocketAddress* addr,
+ SdpParseError* error) {
+ // Parse the line from left to right.
+ std::string token;
+ std::string rightpart;
+ // RFC 4566
+ // c=<nettype> <addrtype> <connection-address>
+ // Skip the "c="
+ if (!rtc::tokenize_first(line, kSdpDelimiterEqualChar, &token, &rightpart)) {
+ return ParseFailed(line, "Failed to parse the network type.", error);
+ }
+
+ // Extract and verify the <nettype>
+ if (!rtc::tokenize_first(rightpart, kSdpDelimiterSpaceChar, &token,
+ &rightpart) ||
+ token != kConnectionNettype) {
+ return ParseFailed(line,
+ "Failed to parse the connection data. The network type "
+ "is not currently supported.",
+ error);
+ }
+
+ // Extract the "<addrtype>" and "<connection-address>".
+ if (!rtc::tokenize_first(rightpart, kSdpDelimiterSpaceChar, &token,
+ &rightpart)) {
+ return ParseFailed(line, "Failed to parse the address type.", error);
+ }
+
+ // The rightpart part should be the IP address without the slash which is used
+ // for multicast.
+ if (rightpart.find('/') != std::string::npos) {
+ return ParseFailed(line,
+ "Failed to parse the connection data. Multicast is not "
+ "currently supported.",
+ error);
+ }
+ addr->SetIP(rightpart);
+
+ // Verify that the addrtype matches the type of the parsed address.
+ if ((addr->family() == AF_INET && token != "IP4") ||
+ (addr->family() == AF_INET6 && token != "IP6")) {
+ addr->Clear();
+ return ParseFailed(
+ line,
+ "Failed to parse the connection data. The address type is mismatching.",
+ error);
+ }
+ return true;
+}
+
+bool ParseSessionDescription(absl::string_view message,
+ size_t* pos,
+ std::string* session_id,
+ std::string* session_version,
+ TransportDescription* session_td,
+ RtpHeaderExtensions* session_extmaps,
+ rtc::SocketAddress* connection_addr,
+ cricket::SessionDescription* desc,
+ SdpParseError* error) {
+ absl::optional<absl::string_view> line;
+
+ desc->set_msid_supported(false);
+ desc->set_extmap_allow_mixed(false);
+ // RFC 4566
+ // v= (protocol version)
+ line = GetLineWithType(message, pos, kLineTypeVersion);
+ if (!line) {
+ return ParseFailedExpectLine(message, *pos, kLineTypeVersion, std::string(),
+ error);
+ }
+ // RFC 4566
+ // o=<username> <sess-id> <sess-version> <nettype> <addrtype>
+ // <unicast-address>
+ line = GetLineWithType(message, pos, kLineTypeOrigin);
+ if (!line) {
+ return ParseFailedExpectLine(message, *pos, kLineTypeOrigin, std::string(),
+ error);
+ }
+ std::vector<absl::string_view> fields =
+ rtc::split(line->substr(kLinePrefixLength), kSdpDelimiterSpaceChar);
+ const size_t expected_fields = 6;
+ if (fields.size() != expected_fields) {
+ return ParseFailedExpectFieldNum(*line, expected_fields, error);
+ }
+ *session_id = std::string(fields[1]);
+ *session_version = std::string(fields[2]);
+
+ // RFC 4566
+ // s= (session name)
+ line = GetLineWithType(message, pos, kLineTypeSessionName);
+ if (!line) {
+ return ParseFailedExpectLine(message, *pos, kLineTypeSessionName,
+ std::string(), error);
+ }
+
+ // optional lines
+ // Those are the optional lines, so shouldn't return false if not present.
+ // RFC 4566
+ // i=* (session information)
+ GetLineWithType(message, pos, kLineTypeSessionInfo);
+
+ // RFC 4566
+ // u=* (URI of description)
+ GetLineWithType(message, pos, kLineTypeSessionUri);
+
+ // RFC 4566
+ // e=* (email address)
+ GetLineWithType(message, pos, kLineTypeSessionEmail);
+
+ // RFC 4566
+ // p=* (phone number)
+ GetLineWithType(message, pos, kLineTypeSessionPhone);
+
+ // RFC 4566
+ // c=* (connection information -- not required if included in
+ // all media)
+ if (absl::optional<absl::string_view> cline =
+ GetLineWithType(message, pos, kLineTypeConnection);
+ cline.has_value()) {
+ if (!ParseConnectionData(*cline, connection_addr, error)) {
+ return false;
+ }
+ }
+
+ // RFC 4566
+ // b=* (zero or more bandwidth information lines)
+ while (GetLineWithType(message, pos, kLineTypeSessionBandwidth).has_value()) {
+ // By pass zero or more b lines.
+ }
+
+ // RFC 4566
+ // One or more time descriptions ("t=" and "r=" lines; see below)
+ // t= (time the session is active)
+ // r=* (zero or more repeat times)
+ // Ensure there's at least one time description
+ if (!GetLineWithType(message, pos, kLineTypeTiming).has_value()) {
+ return ParseFailedExpectLine(message, *pos, kLineTypeTiming, std::string(),
+ error);
+ }
+
+ while (GetLineWithType(message, pos, kLineTypeRepeatTimes).has_value()) {
+ // By pass zero or more r lines.
+ }
+
+ // Go through the rest of the time descriptions
+ while (GetLineWithType(message, pos, kLineTypeTiming).has_value()) {
+ while (GetLineWithType(message, pos, kLineTypeRepeatTimes).has_value()) {
+ // By pass zero or more r lines.
+ }
+ }
+
+ // RFC 4566
+ // z=* (time zone adjustments)
+ GetLineWithType(message, pos, kLineTypeTimeZone);
+
+ // RFC 4566
+ // k=* (encryption key)
+ GetLineWithType(message, pos, kLineTypeEncryptionKey);
+
+ // RFC 4566
+ // a=* (zero or more session attribute lines)
+ while (absl::optional<absl::string_view> aline =
+ GetLineWithType(message, pos, kLineTypeAttributes)) {
+ if (HasAttribute(*aline, kAttributeGroup)) {
+ if (!ParseGroupAttribute(*aline, desc, error)) {
+ return false;
+ }
+ } else if (HasAttribute(*aline, kAttributeIceUfrag)) {
+ if (!GetValue(*aline, kAttributeIceUfrag, &(session_td->ice_ufrag),
+ error)) {
+ return false;
+ }
+ } else if (HasAttribute(*aline, kAttributeIcePwd)) {
+ if (!GetValue(*aline, kAttributeIcePwd, &(session_td->ice_pwd), error)) {
+ return false;
+ }
+ } else if (HasAttribute(*aline, kAttributeIceLite)) {
+ session_td->ice_mode = cricket::ICEMODE_LITE;
+ } else if (HasAttribute(*aline, kAttributeIceOption)) {
+ if (!ParseIceOptions(*aline, &(session_td->transport_options), error)) {
+ return false;
+ }
+ } else if (HasAttribute(*aline, kAttributeFingerprint)) {
+ if (session_td->identity_fingerprint.get()) {
+ return ParseFailed(
+ *aline,
+ "Can't have multiple fingerprint attributes at the same level.",
+ error);
+ }
+ std::unique_ptr<rtc::SSLFingerprint> fingerprint;
+ if (!ParseFingerprintAttribute(*aline, &fingerprint, error)) {
+ return false;
+ }
+ session_td->identity_fingerprint = std::move(fingerprint);
+ } else if (HasAttribute(*aline, kAttributeSetup)) {
+ if (!ParseDtlsSetup(*aline, &(session_td->connection_role), error)) {
+ return false;
+ }
+ } else if (HasAttribute(*aline, kAttributeMsidSemantics)) {
+ std::string semantics;
+ if (!GetValue(*aline, kAttributeMsidSemantics, &semantics, error)) {
+ return false;
+ }
+ desc->set_msid_supported(
+ CaseInsensitiveFind(semantics, kMediaStreamSemantic));
+ } else if (HasAttribute(*aline, kAttributeExtmapAllowMixed)) {
+ desc->set_extmap_allow_mixed(true);
+ } else if (HasAttribute(*aline, kAttributeExtmap)) {
+ RtpExtension extmap;
+ if (!ParseExtmap(*aline, &extmap, error)) {
+ return false;
+ }
+ session_extmaps->push_back(extmap);
+ }
+ }
+ return true;
+}
+
+bool ParseGroupAttribute(absl::string_view line,
+ cricket::SessionDescription* desc,
+ SdpParseError* error) {
+ RTC_DCHECK(desc != NULL);
+
+ // RFC 5888 and draft-holmberg-mmusic-sdp-bundle-negotiation-00
+ // a=group:BUNDLE video voice
+ std::vector<absl::string_view> fields =
+ rtc::split(line.substr(kLinePrefixLength), kSdpDelimiterSpaceChar);
+ std::string semantics;
+ if (!GetValue(fields[0], kAttributeGroup, &semantics, error)) {
+ return false;
+ }
+ cricket::ContentGroup group(semantics);
+ for (size_t i = 1; i < fields.size(); ++i) {
+ group.AddContentName(fields[i]);
+ }
+ desc->AddGroup(group);
+ return true;
+}
+
+static bool ParseFingerprintAttribute(
+ absl::string_view line,
+ std::unique_ptr<rtc::SSLFingerprint>* fingerprint,
+ SdpParseError* error) {
+ std::vector<absl::string_view> fields =
+ rtc::split(line.substr(kLinePrefixLength), kSdpDelimiterSpaceChar);
+ const size_t expected_fields = 2;
+ if (fields.size() != expected_fields) {
+ return ParseFailedExpectFieldNum(line, expected_fields, error);
+ }
+
+ // The first field here is "fingerprint:<hash>.
+ std::string algorithm;
+ if (!GetValue(fields[0], kAttributeFingerprint, &algorithm, error)) {
+ return false;
+ }
+
+ // Downcase the algorithm. Note that we don't need to downcase the
+ // fingerprint because hex_decode can handle upper-case.
+ absl::c_transform(algorithm, algorithm.begin(), ::tolower);
+
+ // The second field is the digest value. De-hexify it.
+ *fingerprint =
+ rtc::SSLFingerprint::CreateUniqueFromRfc4572(algorithm, fields[1]);
+ if (!*fingerprint) {
+ return ParseFailed(line, "Failed to create fingerprint from the digest.",
+ error);
+ }
+
+ return true;
+}
+
+static bool ParseDtlsSetup(absl::string_view line,
+ cricket::ConnectionRole* role_ptr,
+ SdpParseError* error) {
+ // setup-attr = "a=setup:" role
+ // role = "active" / "passive" / "actpass" / "holdconn"
+ std::vector<absl::string_view> fields =
+ rtc::split(line.substr(kLinePrefixLength), kSdpDelimiterColonChar);
+ const size_t expected_fields = 2;
+ if (fields.size() != expected_fields) {
+ return ParseFailedExpectFieldNum(line, expected_fields, error);
+ }
+ if (absl::optional<cricket::ConnectionRole> role =
+ cricket::StringToConnectionRole(fields[1]);
+ role.has_value()) {
+ *role_ptr = *role;
+ return true;
+ }
+ return ParseFailed(line, "Invalid attribute value.", error);
+}
+
+static bool ParseMsidAttribute(absl::string_view line,
+ std::vector<std::string>* stream_ids,
+ std::string* track_id,
+ SdpParseError* error) {
+ // https://datatracker.ietf.org/doc/rfc8830/
+ // a=msid:<msid-value>
+ // msid-value = msid-id [ SP msid-appdata ]
+ // msid-id = 1*64token-char ; see RFC 4566
+ // msid-appdata = 1*64token-char ; see RFC 4566
+ // Note that JSEP stipulates not sending msid-appdata so
+ // a=msid:<stream id> <track id>
+ // is supported for backward compability reasons only.
+ std::vector<std::string> fields;
+ size_t num_fields = rtc::tokenize(line.substr(kLinePrefixLength),
+ kSdpDelimiterSpaceChar, &fields);
+ if (num_fields < 1 || num_fields > 2) {
+ return ParseFailed(line, "Expected a stream ID and optionally a track ID",
+ error);
+ }
+ if (num_fields == 1) {
+ if (line.back() == kSdpDelimiterSpaceChar) {
+ return ParseFailed(line, "Missing track ID in msid attribute.", error);
+ }
+ if (!track_id->empty()) {
+ fields.push_back(*track_id);
+ } else {
+ // Ending with an empty string track will cause a random track id
+ // to be generated later in the process.
+ fields.push_back("");
+ }
+ }
+ RTC_DCHECK_EQ(fields.size(), 2);
+
+ // All track ids should be the same within an m section in a Unified Plan SDP.
+ if (!track_id->empty() && track_id->compare(fields[1]) != 0) {
+ return ParseFailed(
+ line, "Two different track IDs in msid attribute in one m= section",
+ error);
+ }
+ *track_id = fields[1];
+
+ // msid:<msid-id>
+ std::string new_stream_id;
+ if (!GetValue(fields[0], kAttributeMsid, &new_stream_id, error)) {
+ return false;
+ }
+ if (new_stream_id.empty()) {
+ return ParseFailed(line, "Missing stream ID in msid attribute.", error);
+ }
+ // The special value "-" indicates "no MediaStream".
+ if (new_stream_id.compare(kNoStreamMsid) != 0 &&
+ !absl::c_any_of(*stream_ids,
+ [&new_stream_id](const std::string& existing_stream_id) {
+ return new_stream_id == existing_stream_id;
+ })) {
+ stream_ids->push_back(new_stream_id);
+ }
+ return true;
+}
+
+static void RemoveInvalidRidDescriptions(const std::vector<int>& payload_types,
+ std::vector<RidDescription>* rids) {
+ RTC_DCHECK(rids);
+ std::set<std::string> to_remove;
+ std::set<std::string> unique_rids;
+
+ // Check the rids to see which ones should be removed.
+ for (RidDescription& rid : *rids) {
+ // In the case of a duplicate, the entire "a=rid" line, and all "a=rid"
+ // lines with rid-ids that duplicate this line, are discarded and MUST NOT
+ // be included in the SDP Answer.
+ auto pair = unique_rids.insert(rid.rid);
+ // Insert will "fail" if element already exists.
+ if (!pair.second) {
+ to_remove.insert(rid.rid);
+ continue;
+ }
+
+ // If the "a=rid" line contains a "pt=", the list of payload types
+ // is verified against the list of valid payload types for the media
+ // section (that is, those listed on the "m=" line). Any PT missing
+ // from the "m=" line is discarded from the set of values in the
+ // "pt=". If no values are left in the "pt=" parameter after this
+ // processing, then the "a=rid" line is discarded.
+ if (rid.payload_types.empty()) {
+ // If formats were not specified, rid should not be removed.
+ continue;
+ }
+
+ // Note: Spec does not mention how to handle duplicate formats.
+ // Media section does not handle duplicates either.
+ std::set<int> removed_formats;
+ for (int payload_type : rid.payload_types) {
+ if (!absl::c_linear_search(payload_types, payload_type)) {
+ removed_formats.insert(payload_type);
+ }
+ }
+
+ rid.payload_types.erase(
+ std::remove_if(rid.payload_types.begin(), rid.payload_types.end(),
+ [&removed_formats](int format) {
+ return removed_formats.count(format) > 0;
+ }),
+ rid.payload_types.end());
+
+ // If all formats were removed then remove the rid alogether.
+ if (rid.payload_types.empty()) {
+ to_remove.insert(rid.rid);
+ }
+ }
+
+ // Remove every rid description that appears in the to_remove list.
+ if (!to_remove.empty()) {
+ rids->erase(std::remove_if(rids->begin(), rids->end(),
+ [&to_remove](const RidDescription& rid) {
+ return to_remove.count(rid.rid) > 0;
+ }),
+ rids->end());
+ }
+}
+
+// Create a new list (because SimulcastLayerList is immutable) without any
+// layers that have a rid in the to_remove list.
+// If a group of alternatives is empty after removing layers, the group should
+// be removed altogether.
+static SimulcastLayerList RemoveRidsFromSimulcastLayerList(
+ const std::set<std::string>& to_remove,
+ const SimulcastLayerList& layers) {
+ SimulcastLayerList result;
+ for (const std::vector<SimulcastLayer>& vector : layers) {
+ std::vector<SimulcastLayer> new_layers;
+ for (const SimulcastLayer& layer : vector) {
+ if (to_remove.find(layer.rid) == to_remove.end()) {
+ new_layers.push_back(layer);
+ }
+ }
+ // If all layers were removed, do not add an entry.
+ if (!new_layers.empty()) {
+ result.AddLayerWithAlternatives(new_layers);
+ }
+ }
+
+ return result;
+}
+
+// Will remove Simulcast Layers if:
+// 1. They appear in both send and receive directions.
+// 2. They do not appear in the list of `valid_rids`.
+static void RemoveInvalidRidsFromSimulcast(
+ const std::vector<RidDescription>& valid_rids,
+ SimulcastDescription* simulcast) {
+ RTC_DCHECK(simulcast);
+ std::set<std::string> to_remove;
+ std::vector<SimulcastLayer> all_send_layers =
+ simulcast->send_layers().GetAllLayers();
+ std::vector<SimulcastLayer> all_receive_layers =
+ simulcast->receive_layers().GetAllLayers();
+
+ // If a rid appears in both send and receive directions, remove it from both.
+ // This algorithm runs in O(n^2) time, but for small n (as is the case with
+ // simulcast layers) it should still perform well.
+ for (const SimulcastLayer& send_layer : all_send_layers) {
+ if (absl::c_any_of(all_receive_layers,
+ [&send_layer](const SimulcastLayer& layer) {
+ return layer.rid == send_layer.rid;
+ })) {
+ to_remove.insert(send_layer.rid);
+ }
+ }
+
+ // Add any rid that is not in the valid list to the remove set.
+ for (const SimulcastLayer& send_layer : all_send_layers) {
+ if (absl::c_none_of(valid_rids, [&send_layer](const RidDescription& rid) {
+ return send_layer.rid == rid.rid &&
+ rid.direction == cricket::RidDirection::kSend;
+ })) {
+ to_remove.insert(send_layer.rid);
+ }
+ }
+
+ // Add any rid that is not in the valid list to the remove set.
+ for (const SimulcastLayer& receive_layer : all_receive_layers) {
+ if (absl::c_none_of(
+ valid_rids, [&receive_layer](const RidDescription& rid) {
+ return receive_layer.rid == rid.rid &&
+ rid.direction == cricket::RidDirection::kReceive;
+ })) {
+ to_remove.insert(receive_layer.rid);
+ }
+ }
+
+ simulcast->send_layers() =
+ RemoveRidsFromSimulcastLayerList(to_remove, simulcast->send_layers());
+ simulcast->receive_layers() =
+ RemoveRidsFromSimulcastLayerList(to_remove, simulcast->receive_layers());
+}
+
+// RFC 3551
+// PT encoding media type clock rate channels
+// name (Hz)
+// 0 PCMU A 8,000 1
+// 1 reserved A
+// 2 reserved A
+// 3 GSM A 8,000 1
+// 4 G723 A 8,000 1
+// 5 DVI4 A 8,000 1
+// 6 DVI4 A 16,000 1
+// 7 LPC A 8,000 1
+// 8 PCMA A 8,000 1
+// 9 G722 A 8,000 1
+// 10 L16 A 44,100 2
+// 11 L16 A 44,100 1
+// 12 QCELP A 8,000 1
+// 13 CN A 8,000 1
+// 14 MPA A 90,000 (see text)
+// 15 G728 A 8,000 1
+// 16 DVI4 A 11,025 1
+// 17 DVI4 A 22,050 1
+// 18 G729 A 8,000 1
+struct StaticPayloadAudioCodec {
+ const char* name;
+ int clockrate;
+ size_t channels;
+};
+static const StaticPayloadAudioCodec kStaticPayloadAudioCodecs[] = {
+ {"PCMU", 8000, 1}, {"reserved", 0, 0}, {"reserved", 0, 0},
+ {"GSM", 8000, 1}, {"G723", 8000, 1}, {"DVI4", 8000, 1},
+ {"DVI4", 16000, 1}, {"LPC", 8000, 1}, {"PCMA", 8000, 1},
+ {"G722", 8000, 1}, {"L16", 44100, 2}, {"L16", 44100, 1},
+ {"QCELP", 8000, 1}, {"CN", 8000, 1}, {"MPA", 90000, 1},
+ {"G728", 8000, 1}, {"DVI4", 11025, 1}, {"DVI4", 22050, 1},
+ {"G729", 8000, 1},
+};
+
+void MaybeCreateStaticPayloadAudioCodecs(const std::vector<int>& fmts,
+ MediaContentDescription* media_desc) {
+ if (!media_desc) {
+ return;
+ }
+ RTC_DCHECK(media_desc->codecs().empty());
+ for (int payload_type : fmts) {
+ if (!media_desc->HasCodec(payload_type) && payload_type >= 0 &&
+ static_cast<uint32_t>(payload_type) <
+ arraysize(kStaticPayloadAudioCodecs)) {
+ std::string encoding_name = kStaticPayloadAudioCodecs[payload_type].name;
+ int clock_rate = kStaticPayloadAudioCodecs[payload_type].clockrate;
+ size_t channels = kStaticPayloadAudioCodecs[payload_type].channels;
+ media_desc->AddCodec(cricket::CreateAudioCodec(
+ payload_type, encoding_name, clock_rate, channels));
+ }
+ }
+}
+
+static std::unique_ptr<MediaContentDescription> ParseContentDescription(
+ absl::string_view message,
+ const cricket::MediaType media_type,
+ int mline_index,
+ absl::string_view protocol,
+ const std::vector<int>& payload_types,
+ size_t* pos,
+ std::string* content_name,
+ bool* bundle_only,
+ int* msid_signaling,
+ TransportDescription* transport,
+ std::vector<std::unique_ptr<JsepIceCandidate>>* candidates,
+ webrtc::SdpParseError* error) {
+ std::unique_ptr<MediaContentDescription> media_desc;
+ if (media_type == cricket::MediaType::MEDIA_TYPE_AUDIO) {
+ media_desc = std::make_unique<AudioContentDescription>();
+ } else if (media_type == cricket::MediaType::MEDIA_TYPE_VIDEO) {
+ media_desc = std::make_unique<VideoContentDescription>();
+ } else {
+ RTC_DCHECK_NOTREACHED();
+ return nullptr;
+ }
+
+ media_desc->set_extmap_allow_mixed_enum(MediaContentDescription::kNo);
+ if (!ParseContent(message, media_type, mline_index, protocol, payload_types,
+ pos, content_name, bundle_only, msid_signaling,
+ media_desc.get(), transport, candidates, error)) {
+ return nullptr;
+ }
+ // Sort the codecs according to the m-line fmt list.
+ std::unordered_map<int, int> payload_type_preferences;
+ // "size + 1" so that the lowest preference payload type has a preference of
+ // 1, which is greater than the default (0) for payload types not in the fmt
+ // list.
+ int preference = static_cast<int>(payload_types.size() + 1);
+ for (int pt : payload_types) {
+ payload_type_preferences[pt] = preference--;
+ }
+ std::vector<cricket::Codec> codecs = media_desc->codecs();
+ absl::c_sort(codecs, [&payload_type_preferences](const cricket::Codec& a,
+ const cricket::Codec& b) {
+ return payload_type_preferences[a.id] > payload_type_preferences[b.id];
+ });
+ media_desc->set_codecs(codecs);
+ return media_desc;
+}
+
+bool ParseMediaDescription(
+ absl::string_view message,
+ const TransportDescription& session_td,
+ const RtpHeaderExtensions& session_extmaps,
+ size_t* pos,
+ const rtc::SocketAddress& session_connection_addr,
+ cricket::SessionDescription* desc,
+ std::vector<std::unique_ptr<JsepIceCandidate>>* candidates,
+ SdpParseError* error) {
+ RTC_DCHECK(desc != NULL);
+ int mline_index = -1;
+ int msid_signaling = 0;
+
+ // Zero or more media descriptions
+ // RFC 4566
+ // m=<media> <port> <proto> <fmt>
+ while (absl::optional<absl::string_view> mline =
+ GetLineWithType(message, pos, kLineTypeMedia)) {
+ ++mline_index;
+
+ std::vector<absl::string_view> fields =
+ rtc::split(mline->substr(kLinePrefixLength), kSdpDelimiterSpaceChar);
+
+ const size_t expected_min_fields = 4;
+ if (fields.size() < expected_min_fields) {
+ return ParseFailedExpectMinFieldNum(*mline, expected_min_fields, error);
+ }
+ bool port_rejected = false;
+ // RFC 3264
+ // To reject an offered stream, the port number in the corresponding stream
+ // in the answer MUST be set to zero.
+ if (fields[1] == kMediaPortRejected) {
+ port_rejected = true;
+ }
+
+ int port = 0;
+ if (!rtc::FromString<int>(fields[1], &port) || !IsValidPort(port)) {
+ return ParseFailed(*mline, "The port number is invalid", error);
+ }
+ absl::string_view protocol = fields[2];
+
+ // <fmt>
+ std::vector<int> payload_types;
+ if (cricket::IsRtpProtocol(protocol)) {
+ for (size_t j = 3; j < fields.size(); ++j) {
+ int pl = 0;
+ if (!GetPayloadTypeFromString(*mline, fields[j], &pl, error)) {
+ return false;
+ }
+ payload_types.push_back(pl);
+ }
+ }
+
+ // Make a temporary TransportDescription based on `session_td`.
+ // Some of this gets overwritten by ParseContent.
+ TransportDescription transport(
+ session_td.transport_options, session_td.ice_ufrag, session_td.ice_pwd,
+ session_td.ice_mode, session_td.connection_role,
+ session_td.identity_fingerprint.get());
+
+ std::unique_ptr<MediaContentDescription> content;
+ std::string content_name;
+ bool bundle_only = false;
+ int section_msid_signaling = 0;
+ absl::string_view media_type = fields[0];
+ if ((media_type == kMediaTypeVideo || media_type == kMediaTypeAudio) &&
+ !cricket::IsRtpProtocol(protocol)) {
+ return ParseFailed(*mline, "Unsupported protocol for media type", error);
+ }
+ if (media_type == kMediaTypeVideo) {
+ content = ParseContentDescription(
+ message, cricket::MEDIA_TYPE_VIDEO, mline_index, protocol,
+ payload_types, pos, &content_name, &bundle_only,
+ &section_msid_signaling, &transport, candidates, error);
+ } else if (media_type == kMediaTypeAudio) {
+ content = ParseContentDescription(
+ message, cricket::MEDIA_TYPE_AUDIO, mline_index, protocol,
+ payload_types, pos, &content_name, &bundle_only,
+ &section_msid_signaling, &transport, candidates, error);
+ } else if (media_type == kMediaTypeData && cricket::IsDtlsSctp(protocol)) {
+ // The draft-03 format is:
+ // m=application <port> DTLS/SCTP <sctp-port>...
+ // use_sctpmap should be false.
+ // The draft-26 format is:
+ // m=application <port> UDP/DTLS/SCTP webrtc-datachannel
+ // use_sctpmap should be false.
+ auto data_desc = std::make_unique<SctpDataContentDescription>();
+ // Default max message size is 64K
+ // according to draft-ietf-mmusic-sctp-sdp-26
+ data_desc->set_max_message_size(kDefaultSctpMaxMessageSize);
+ int p;
+ if (rtc::FromString(fields[3], &p)) {
+ data_desc->set_port(p);
+ } else if (fields[3] == kDefaultSctpmapProtocol) {
+ data_desc->set_use_sctpmap(false);
+ }
+ if (!ParseContent(message, cricket::MEDIA_TYPE_DATA, mline_index,
+ protocol, payload_types, pos, &content_name,
+ &bundle_only, &section_msid_signaling, data_desc.get(),
+ &transport, candidates, error)) {
+ return false;
+ }
+ data_desc->set_protocol(protocol);
+ content = std::move(data_desc);
+ } else {
+ RTC_LOG(LS_WARNING) << "Unsupported media type: " << *mline;
+ auto unsupported_desc =
+ std::make_unique<UnsupportedContentDescription>(media_type);
+ if (!ParseContent(message, cricket::MEDIA_TYPE_UNSUPPORTED, mline_index,
+ protocol, payload_types, pos, &content_name,
+ &bundle_only, &section_msid_signaling,
+ unsupported_desc.get(), &transport, candidates,
+ error)) {
+ return false;
+ }
+ unsupported_desc->set_protocol(protocol);
+ content = std::move(unsupported_desc);
+ }
+ if (!content.get()) {
+ // ParseContentDescription returns NULL if failed.
+ return false;
+ }
+
+ msid_signaling |= section_msid_signaling;
+
+ bool content_rejected = false;
+ // A port of 0 is not interpreted as a rejected m= section when it's
+ // used along with a=bundle-only.
+ if (bundle_only) {
+ if (!port_rejected) {
+ // Usage of bundle-only with a nonzero port is unspecified. So just
+ // ignore bundle-only if we see this.
+ bundle_only = false;
+ RTC_LOG(LS_WARNING)
+ << "a=bundle-only attribute observed with a nonzero "
+ "port; this usage is unspecified so the attribute is being "
+ "ignored.";
+ }
+ } else {
+ // If not using bundle-only, interpret port 0 in the normal way; the m=
+ // section is being rejected.
+ content_rejected = port_rejected;
+ }
+
+ if (content->as_unsupported()) {
+ content_rejected = true;
+ } else if (cricket::IsRtpProtocol(protocol) && !content->as_sctp()) {
+ content->set_protocol(std::string(protocol));
+ // Set the extmap.
+ if (!session_extmaps.empty() &&
+ !content->rtp_header_extensions().empty()) {
+ return ParseFailed("",
+ "The a=extmap MUST be either all session level or "
+ "all media level.",
+ error);
+ }
+ for (size_t i = 0; i < session_extmaps.size(); ++i) {
+ content->AddRtpHeaderExtension(session_extmaps[i]);
+ }
+ } else if (content->as_sctp()) {
+ // Do nothing, it's OK
+ } else {
+ RTC_LOG(LS_WARNING) << "Parse failed with unknown protocol " << protocol;
+ return false;
+ }
+
+ // Use the session level connection address if the media level addresses are
+ // not specified.
+ rtc::SocketAddress address;
+ address = content->connection_address().IsNil()
+ ? session_connection_addr
+ : content->connection_address();
+ address.SetPort(port);
+ content->set_connection_address(address);
+
+ desc->AddContent(content_name,
+ cricket::IsDtlsSctp(protocol) ? MediaProtocolType::kSctp
+ : MediaProtocolType::kRtp,
+ content_rejected, bundle_only, std::move(content));
+ // Create TransportInfo with the media level "ice-pwd" and "ice-ufrag".
+ desc->AddTransportInfo(TransportInfo(content_name, transport));
+ }
+
+ desc->set_msid_signaling(msid_signaling);
+
+ size_t end_of_message = message.size();
+ if (mline_index == -1 && *pos != end_of_message) {
+ ParseFailed(message, *pos, "Expects m line.", error);
+ return false;
+ }
+ return true;
+}
+
+void AddParameters(const cricket::CodecParameterMap& parameters,
+ cricket::Codec* codec) {
+ for (const auto& entry : parameters) {
+ const std::string& key = entry.first;
+ const std::string& value = entry.second;
+ codec->SetParam(key, value);
+ }
+}
+
+void AddFeedbackParameter(const cricket::FeedbackParam& feedback_param,
+ cricket::Codec* codec) {
+ codec->AddFeedbackParam(feedback_param);
+}
+
+void AddFeedbackParameters(const cricket::FeedbackParams& feedback_params,
+ cricket::Codec* codec) {
+ for (const cricket::FeedbackParam& param : feedback_params.params()) {
+ codec->AddFeedbackParam(param);
+ }
+}
+
+// Gets the current codec setting associated with `payload_type`. If there
+// is no Codec associated with that payload type it returns an empty codec
+// with that payload type.
+cricket::Codec GetCodecWithPayloadType(
+ cricket::MediaType type,
+ const std::vector<cricket::Codec>& codecs,
+ int payload_type) {
+ const cricket::Codec* codec = FindCodecById(codecs, payload_type);
+ if (codec)
+ return *codec;
+ // Return empty codec with `payload_type`.
+ if (type == cricket::MEDIA_TYPE_AUDIO) {
+ return cricket::CreateAudioCodec(payload_type, "", 0, 0);
+ } else {
+ return cricket::CreateVideoCodec(payload_type, "");
+ }
+}
+
+// Updates or creates a new codec entry in the media description.
+void AddOrReplaceCodec(MediaContentDescription* content_desc,
+ const cricket::Codec& codec) {
+ std::vector<cricket::Codec> codecs = content_desc->codecs();
+ bool found = false;
+ for (cricket::Codec& existing_codec : codecs) {
+ if (codec.id == existing_codec.id) {
+ // Overwrite existing codec with the new codec.
+ existing_codec = codec;
+ found = true;
+ break;
+ }
+ }
+ if (!found) {
+ content_desc->AddCodec(codec);
+ return;
+ }
+ content_desc->set_codecs(codecs);
+}
+
+// Adds or updates existing codec corresponding to `payload_type` according
+// to `parameters`.
+void UpdateCodec(MediaContentDescription* content_desc,
+ int payload_type,
+ const cricket::CodecParameterMap& parameters) {
+ // Codec might already have been populated (from rtpmap).
+ cricket::Codec new_codec = GetCodecWithPayloadType(
+ content_desc->type(), content_desc->codecs(), payload_type);
+ AddParameters(parameters, &new_codec);
+ AddOrReplaceCodec(content_desc, new_codec);
+}
+
+// Adds or updates existing codec corresponding to `payload_type` according
+// to `feedback_param`.
+void UpdateCodec(MediaContentDescription* content_desc,
+ int payload_type,
+ const cricket::FeedbackParam& feedback_param) {
+ // Codec might already have been populated (from rtpmap).
+ cricket::Codec new_codec = GetCodecWithPayloadType(
+ content_desc->type(), content_desc->codecs(), payload_type);
+ AddFeedbackParameter(feedback_param, &new_codec);
+ AddOrReplaceCodec(content_desc, new_codec);
+}
+
+// Adds or updates existing video codec corresponding to `payload_type`
+// according to `packetization`.
+void UpdateVideoCodecPacketization(MediaContentDescription* desc,
+ int payload_type,
+ absl::string_view packetization) {
+ if (packetization != cricket::kPacketizationParamRaw) {
+ // Ignore unsupported packetization attribute.
+ return;
+ }
+
+ // Codec might already have been populated (from rtpmap).
+ cricket::Codec codec =
+ GetCodecWithPayloadType(desc->type(), desc->codecs(), payload_type);
+ codec.packetization = std::string(packetization);
+ AddOrReplaceCodec(desc, codec);
+}
+
+absl::optional<cricket::Codec> PopWildcardCodec(
+ std::vector<cricket::Codec>* codecs) {
+ RTC_DCHECK(codecs);
+ for (auto iter = codecs->begin(); iter != codecs->end(); ++iter) {
+ if (iter->id == kWildcardPayloadType) {
+ cricket::Codec wildcard_codec = *iter;
+ codecs->erase(iter);
+ return wildcard_codec;
+ }
+ }
+ return absl::nullopt;
+}
+
+void UpdateFromWildcardCodecs(cricket::MediaContentDescription* desc) {
+ RTC_DCHECK(desc);
+ auto codecs = desc->codecs();
+ absl::optional<cricket::Codec> wildcard_codec = PopWildcardCodec(&codecs);
+ if (!wildcard_codec) {
+ return;
+ }
+ for (auto& codec : codecs) {
+ AddFeedbackParameters(wildcard_codec->feedback_params, &codec);
+ }
+ desc->set_codecs(codecs);
+}
+
+void AddAudioAttribute(const std::string& name,
+ absl::string_view value,
+ MediaContentDescription* desc) {
+ RTC_DCHECK(desc);
+ if (value.empty()) {
+ return;
+ }
+ std::vector<cricket::Codec> codecs = desc->codecs();
+ for (cricket::Codec& codec : codecs) {
+ codec.params[name] = std::string(value);
+ }
+ desc->set_codecs(codecs);
+}
+
+bool ParseContent(absl::string_view message,
+ const cricket::MediaType media_type,
+ int mline_index,
+ absl::string_view protocol,
+ const std::vector<int>& payload_types,
+ size_t* pos,
+ std::string* content_name,
+ bool* bundle_only,
+ int* msid_signaling,
+ MediaContentDescription* media_desc,
+ TransportDescription* transport,
+ std::vector<std::unique_ptr<JsepIceCandidate>>* candidates,
+ SdpParseError* error) {
+ RTC_DCHECK(media_desc != NULL);
+ RTC_DCHECK(content_name != NULL);
+ RTC_DCHECK(transport != NULL);
+
+ if (media_type == cricket::MEDIA_TYPE_AUDIO) {
+ MaybeCreateStaticPayloadAudioCodecs(payload_types, media_desc);
+ }
+
+ // The media level "ice-ufrag" and "ice-pwd".
+ // The candidates before update the media level "ice-pwd" and "ice-ufrag".
+ Candidates candidates_orig;
+ std::string mline_id;
+ // Tracks created out of the ssrc attributes.
+ StreamParamsVec tracks;
+ SsrcInfoVec ssrc_infos;
+ SsrcGroupVec ssrc_groups;
+ std::string maxptime_as_string;
+ std::string ptime_as_string;
+ std::vector<std::string> stream_ids;
+ std::string track_id;
+ SimulcastSdpSerializer deserializer;
+ std::vector<RidDescription> rids;
+ SimulcastDescription simulcast;
+
+ // Loop until the next m line
+ while (!IsLineType(message, kLineTypeMedia, *pos)) {
+ absl::optional<absl::string_view> line = GetLine(message, pos);
+ if (!line.has_value()) {
+ if (*pos >= message.size()) {
+ break; // Done parsing
+ } else {
+ return ParseFailed(message, *pos, "Invalid SDP line.", error);
+ }
+ }
+
+ // RFC 4566
+ // b=* (zero or more bandwidth information lines)
+ if (IsLineType(*line, kLineTypeSessionBandwidth)) {
+ std::string bandwidth;
+ std::string bandwidth_type;
+ if (!rtc::tokenize_first(line->substr(kLinePrefixLength),
+ kSdpDelimiterColonChar, &bandwidth_type,
+ &bandwidth)) {
+ return ParseFailed(
+ *line,
+ "b= syntax error, does not match b=<modifier>:<bandwidth-value>.",
+ error);
+ }
+ if (!(bandwidth_type == kApplicationSpecificBandwidth ||
+ bandwidth_type == kTransportSpecificBandwidth)) {
+ // Ignore unknown bandwidth types.
+ continue;
+ }
+ int b = 0;
+ if (!GetValueFromString(*line, bandwidth, &b, error)) {
+ return false;
+ }
+ // TODO(deadbeef): Historically, applications may be setting a value
+ // of -1 to mean "unset any previously set bandwidth limit", even
+ // though ommitting the "b=AS" entirely will do just that. Once we've
+ // transitioned applications to doing the right thing, it would be
+ // better to treat this as a hard error instead of just ignoring it.
+ if (bandwidth_type == kApplicationSpecificBandwidth && b == -1) {
+ RTC_LOG(LS_WARNING) << "Ignoring \"b=AS:-1\"; will be treated as \"no "
+ "bandwidth limit\".";
+ continue;
+ }
+ if (b < 0) {
+ return ParseFailed(
+ *line, "b=" + bandwidth_type + " value can't be negative.", error);
+ }
+ // Convert values. Prevent integer overflow.
+ if (bandwidth_type == kApplicationSpecificBandwidth) {
+ b = std::min(b, INT_MAX / 1000) * 1000;
+ } else {
+ b = std::min(b, INT_MAX);
+ }
+ media_desc->set_bandwidth(b);
+ media_desc->set_bandwidth_type(bandwidth_type);
+ continue;
+ }
+
+ // Parse the media level connection data.
+ if (IsLineType(*line, kLineTypeConnection)) {
+ rtc::SocketAddress addr;
+ if (!ParseConnectionData(*line, &addr, error)) {
+ return false;
+ }
+ media_desc->set_connection_address(addr);
+ continue;
+ }
+
+ if (!IsLineType(*line, kLineTypeAttributes)) {
+ // TODO(deadbeef): Handle other lines if needed.
+ RTC_LOG(LS_VERBOSE) << "Ignored line: " << *line;
+ continue;
+ }
+
+ // Handle attributes common to SCTP and RTP.
+ if (HasAttribute(*line, kAttributeMid)) {
+ // RFC 3388
+ // mid-attribute = "a=mid:" identification-tag
+ // identification-tag = token
+ // Use the mid identification-tag as the content name.
+ if (!GetSingleTokenValue(*line, kAttributeMid, &mline_id, error)) {
+ return false;
+ }
+ *content_name = mline_id;
+ } else if (HasAttribute(*line, kAttributeBundleOnly)) {
+ *bundle_only = true;
+ } else if (HasAttribute(*line, kAttributeCandidate)) {
+ Candidate candidate;
+ if (!ParseCandidate(*line, &candidate, error, false)) {
+ return false;
+ }
+ // ParseCandidate will parse non-standard ufrag and password attributes,
+ // since it's used for candidate trickling, but we only want to process
+ // the "a=ice-ufrag"/"a=ice-pwd" values in a session description, so
+ // strip them off at this point.
+ candidate.set_username(std::string());
+ candidate.set_password(std::string());
+ candidates_orig.push_back(candidate);
+ } else if (HasAttribute(*line, kAttributeIceUfrag)) {
+ if (!GetValue(*line, kAttributeIceUfrag, &transport->ice_ufrag, error)) {
+ return false;
+ }
+ } else if (HasAttribute(*line, kAttributeIcePwd)) {
+ if (!GetValue(*line, kAttributeIcePwd, &transport->ice_pwd, error)) {
+ return false;
+ }
+ } else if (HasAttribute(*line, kAttributeIceOption)) {
+ if (!ParseIceOptions(*line, &transport->transport_options, error)) {
+ return false;
+ }
+ } else if (HasAttribute(*line, kAttributeFmtp)) {
+ if (!ParseFmtpAttributes(*line, media_type, media_desc, error)) {
+ return false;
+ }
+ } else if (HasAttribute(*line, kAttributeFingerprint)) {
+ std::unique_ptr<rtc::SSLFingerprint> fingerprint;
+ if (!ParseFingerprintAttribute(*line, &fingerprint, error)) {
+ return false;
+ }
+ transport->identity_fingerprint = std::move(fingerprint);
+ } else if (HasAttribute(*line, kAttributeSetup)) {
+ if (!ParseDtlsSetup(*line, &(transport->connection_role), error)) {
+ return false;
+ }
+ } else if (cricket::IsDtlsSctp(protocol) &&
+ media_type == cricket::MEDIA_TYPE_DATA) {
+ //
+ // SCTP specific attributes
+ //
+ if (HasAttribute(*line, kAttributeSctpPort)) {
+ if (media_desc->as_sctp()->use_sctpmap()) {
+ return ParseFailed(
+ *line, "sctp-port attribute can't be used with sctpmap.", error);
+ }
+ int sctp_port;
+ if (!ParseSctpPort(*line, &sctp_port, error)) {
+ return false;
+ }
+ media_desc->as_sctp()->set_port(sctp_port);
+ } else if (HasAttribute(*line, kAttributeMaxMessageSize)) {
+ int max_message_size;
+ if (!ParseSctpMaxMessageSize(*line, &max_message_size, error)) {
+ return false;
+ }
+ media_desc->as_sctp()->set_max_message_size(max_message_size);
+ } else if (HasAttribute(*line, kAttributeSctpmap)) {
+ // Ignore a=sctpmap: from early versions of draft-ietf-mmusic-sctp-sdp
+ continue;
+ }
+ } else if (cricket::IsRtpProtocol(protocol)) {
+ //
+ // RTP specific attributes
+ //
+ if (HasAttribute(*line, kAttributeRtcpMux)) {
+ media_desc->set_rtcp_mux(true);
+ } else if (HasAttribute(*line, kAttributeRtcpReducedSize)) {
+ media_desc->set_rtcp_reduced_size(true);
+ } else if (HasAttribute(*line, kAttributeRtcpRemoteEstimate)) {
+ media_desc->set_remote_estimate(true);
+ } else if (HasAttribute(*line, kAttributeSsrcGroup)) {
+ if (!ParseSsrcGroupAttribute(*line, &ssrc_groups, error)) {
+ return false;
+ }
+ } else if (HasAttribute(*line, kAttributeSsrc)) {
+ if (!ParseSsrcAttribute(*line, &ssrc_infos, msid_signaling, error)) {
+ return false;
+ }
+ } else if (HasAttribute(*line, kAttributeCrypto)) {
+ if (!ParseCryptoAttribute(*line, media_desc, error)) {
+ return false;
+ }
+ } else if (HasAttribute(*line, kAttributeRtpmap)) {
+ if (!ParseRtpmapAttribute(*line, media_type, payload_types, media_desc,
+ error)) {
+ return false;
+ }
+ } else if (HasAttribute(*line, kCodecParamMaxPTime)) {
+ if (!GetValue(*line, kCodecParamMaxPTime, &maxptime_as_string, error)) {
+ return false;
+ }
+ } else if (HasAttribute(*line, kAttributePacketization)) {
+ if (!ParsePacketizationAttribute(*line, media_type, media_desc,
+ error)) {
+ return false;
+ }
+ } else if (HasAttribute(*line, kAttributeRtcpFb)) {
+ if (!ParseRtcpFbAttribute(*line, media_type, media_desc, error)) {
+ return false;
+ }
+ } else if (HasAttribute(*line, kCodecParamPTime)) {
+ if (!GetValue(*line, kCodecParamPTime, &ptime_as_string, error)) {
+ return false;
+ }
+ } else if (HasAttribute(*line, kAttributeSendOnly)) {
+ media_desc->set_direction(RtpTransceiverDirection::kSendOnly);
+ } else if (HasAttribute(*line, kAttributeRecvOnly)) {
+ media_desc->set_direction(RtpTransceiverDirection::kRecvOnly);
+ } else if (HasAttribute(*line, kAttributeInactive)) {
+ media_desc->set_direction(RtpTransceiverDirection::kInactive);
+ } else if (HasAttribute(*line, kAttributeSendRecv)) {
+ media_desc->set_direction(RtpTransceiverDirection::kSendRecv);
+ } else if (HasAttribute(*line, kAttributeExtmapAllowMixed)) {
+ media_desc->set_extmap_allow_mixed_enum(
+ MediaContentDescription::kMedia);
+ } else if (HasAttribute(*line, kAttributeExtmap)) {
+ RtpExtension extmap;
+ if (!ParseExtmap(*line, &extmap, error)) {
+ return false;
+ }
+ media_desc->AddRtpHeaderExtension(extmap);
+ } else if (HasAttribute(*line, kAttributeXGoogleFlag)) {
+ // Experimental attribute. Conference mode activates more aggressive
+ // AEC and NS settings.
+ // TODO(deadbeef): expose API to set these directly.
+ std::string flag_value;
+ if (!GetValue(*line, kAttributeXGoogleFlag, &flag_value, error)) {
+ return false;
+ }
+ if (flag_value.compare(kValueConference) == 0)
+ media_desc->set_conference_mode(true);
+ } else if (HasAttribute(*line, kAttributeMsid)) {
+ if (!ParseMsidAttribute(*line, &stream_ids, &track_id, error)) {
+ return false;
+ }
+ *msid_signaling |= cricket::kMsidSignalingMediaSection;
+ } else if (HasAttribute(*line, kAttributeRid)) {
+ const size_t kRidPrefixLength =
+ kLinePrefixLength + arraysize(kAttributeRid);
+ if (line->size() <= kRidPrefixLength) {
+ RTC_LOG(LS_INFO) << "Ignoring empty RID attribute: " << *line;
+ continue;
+ }
+ RTCErrorOr<RidDescription> error_or_rid_description =
+ deserializer.DeserializeRidDescription(
+ line->substr(kRidPrefixLength));
+
+ // Malformed a=rid lines are discarded.
+ if (!error_or_rid_description.ok()) {
+ RTC_LOG(LS_INFO) << "Ignoring malformed RID line: '" << *line
+ << "'. Error: "
+ << error_or_rid_description.error().message();
+ continue;
+ }
+
+ rids.push_back(error_or_rid_description.MoveValue());
+ } else if (HasAttribute(*line, kAttributeSimulcast)) {
+ const size_t kSimulcastPrefixLength =
+ kLinePrefixLength + arraysize(kAttributeSimulcast);
+ if (line->size() <= kSimulcastPrefixLength) {
+ return ParseFailed(*line, "Simulcast attribute is empty.", error);
+ }
+
+ if (!simulcast.empty()) {
+ return ParseFailed(*line, "Multiple Simulcast attributes specified.",
+ error);
+ }
+
+ RTCErrorOr<SimulcastDescription> error_or_simulcast =
+ deserializer.DeserializeSimulcastDescription(
+ line->substr(kSimulcastPrefixLength));
+ if (!error_or_simulcast.ok()) {
+ return ParseFailed(*line,
+ std::string("Malformed simulcast line: ") +
+ error_or_simulcast.error().message(),
+ error);
+ }
+
+ simulcast = error_or_simulcast.value();
+ } else if (HasAttribute(*line, kAttributeRtcp)) {
+ // Ignore and do not log a=rtcp line.
+ // JSEP section 5.8.2 (media section parsing) says to ignore it.
+ continue;
+ } else {
+ // Unrecognized attribute in RTP protocol.
+ RTC_LOG(LS_VERBOSE) << "Ignored line: " << *line;
+ continue;
+ }
+ } else {
+ // Only parse lines that we are interested of.
+ RTC_LOG(LS_VERBOSE) << "Ignored line: " << *line;
+ continue;
+ }
+ }
+
+ // Remove duplicate or inconsistent rids.
+ RemoveInvalidRidDescriptions(payload_types, &rids);
+
+ // If simulcast is specifed, split the rids into send and receive.
+ // Rids that do not appear in simulcast attribute will be removed.
+ std::vector<RidDescription> send_rids;
+ std::vector<RidDescription> receive_rids;
+ if (!simulcast.empty()) {
+ // Verify that the rids in simulcast match rids in sdp.
+ RemoveInvalidRidsFromSimulcast(rids, &simulcast);
+
+ // Use simulcast description to figure out Send / Receive RIDs.
+ std::map<std::string, RidDescription> rid_map;
+ for (const RidDescription& rid : rids) {
+ rid_map[rid.rid] = rid;
+ }
+
+ for (const auto& layer : simulcast.send_layers().GetAllLayers()) {
+ auto iter = rid_map.find(layer.rid);
+ RTC_DCHECK(iter != rid_map.end());
+ send_rids.push_back(iter->second);
+ }
+
+ for (const auto& layer : simulcast.receive_layers().GetAllLayers()) {
+ auto iter = rid_map.find(layer.rid);
+ RTC_DCHECK(iter != rid_map.end());
+ receive_rids.push_back(iter->second);
+ }
+
+ media_desc->set_simulcast_description(simulcast);
+ } else {
+ // RID is specified in RFC 8851, which identifies a lot of usages.
+ // We only support RFC 8853 usage of RID, not anything else.
+ // Ignore all RID parameters when a=simulcast is missing.
+ // In particular do NOT do send_rids = rids;
+ RTC_LOG(LS_VERBOSE) << "Ignoring send_rids without simulcast";
+ }
+
+ media_desc->set_receive_rids(receive_rids);
+
+ // Create tracks from the `ssrc_infos`.
+ // If the stream_id/track_id for all SSRCS are identical, one StreamParams
+ // will be created in CreateTracksFromSsrcInfos, containing all the SSRCs from
+ // the m= section.
+ if (!ssrc_infos.empty()) {
+ CreateTracksFromSsrcInfos(ssrc_infos, stream_ids, track_id, &tracks,
+ *msid_signaling);
+ } else if (media_type != cricket::MEDIA_TYPE_DATA &&
+ (*msid_signaling & cricket::kMsidSignalingMediaSection)) {
+ // If the stream_ids/track_id was signaled but SSRCs were unsignaled we
+ // still create a track. This isn't done for data media types because
+ // StreamParams aren't used for SCTP streams, and RTP data channels don't
+ // support unsignaled SSRCs.
+ // If track id was not specified, create a random one.
+ if (track_id.empty()) {
+ track_id = rtc::CreateRandomString(8);
+ }
+ CreateTrackWithNoSsrcs(stream_ids, track_id, send_rids, &tracks);
+ }
+
+ // Add the ssrc group to the track.
+ for (const SsrcGroup& ssrc_group : ssrc_groups) {
+ if (ssrc_group.ssrcs.empty()) {
+ continue;
+ }
+ uint32_t ssrc = ssrc_group.ssrcs.front();
+ for (StreamParams& track : tracks) {
+ if (track.has_ssrc(ssrc)) {
+ track.ssrc_groups.push_back(ssrc_group);
+ }
+ }
+ }
+
+ // Add the new tracks to the `media_desc`.
+ for (StreamParams& track : tracks) {
+ media_desc->AddStream(track);
+ }
+
+ UpdateFromWildcardCodecs(media_desc);
+ // Codec has not been populated correctly unless the name has been set. This
+ // can happen if an SDP has an fmtp or rtcp-fb with a payload type but doesn't
+ // have a corresponding "rtpmap" line. This should lead to a parse error.
+ if (!absl::c_all_of(media_desc->codecs(), [](const cricket::Codec codec) {
+ return !codec.name.empty();
+ })) {
+ return ParseFailed("Failed to parse codecs correctly.", error);
+ }
+ if (media_type == cricket::MEDIA_TYPE_AUDIO) {
+ AddAudioAttribute(kCodecParamMaxPTime, maxptime_as_string, media_desc);
+ AddAudioAttribute(kCodecParamPTime, ptime_as_string, media_desc);
+ }
+
+ // RFC 5245
+ // Update the candidates with the media level "ice-pwd" and "ice-ufrag".
+ for (Candidate& candidate : candidates_orig) {
+ RTC_DCHECK(candidate.username().empty() ||
+ candidate.username() == transport->ice_ufrag);
+ candidate.set_username(transport->ice_ufrag);
+ RTC_DCHECK(candidate.password().empty());
+ candidate.set_password(transport->ice_pwd);
+ candidates->push_back(
+ std::make_unique<JsepIceCandidate>(mline_id, mline_index, candidate));
+ }
+
+ return true;
+}
+
+bool ParseSsrcAttribute(absl::string_view line,
+ SsrcInfoVec* ssrc_infos,
+ int* msid_signaling,
+ SdpParseError* error) {
+ RTC_DCHECK(ssrc_infos != NULL);
+ // RFC 5576
+ // a=ssrc:<ssrc-id> <attribute>
+ // a=ssrc:<ssrc-id> <attribute>:<value>
+ std::string field1, field2;
+ if (!rtc::tokenize_first(line.substr(kLinePrefixLength),
+ kSdpDelimiterSpaceChar, &field1, &field2)) {
+ const size_t expected_fields = 2;
+ return ParseFailedExpectFieldNum(line, expected_fields, error);
+ }
+
+ // ssrc:<ssrc-id>
+ std::string ssrc_id_s;
+ if (!GetValue(field1, kAttributeSsrc, &ssrc_id_s, error)) {
+ return false;
+ }
+ uint32_t ssrc_id = 0;
+ if (!GetValueFromString(line, ssrc_id_s, &ssrc_id, error)) {
+ return false;
+ }
+
+ std::string attribute;
+ std::string value;
+ if (!rtc::tokenize_first(field2, kSdpDelimiterColonChar, &attribute,
+ &value)) {
+ rtc::StringBuilder description;
+ description << "Failed to get the ssrc attribute value from " << field2
+ << ". Expected format <attribute>:<value>.";
+ return ParseFailed(line, description.Release(), error);
+ }
+
+ // Check if there's already an item for this `ssrc_id`. Create a new one if
+ // there isn't.
+ auto ssrc_info_it =
+ absl::c_find_if(*ssrc_infos, [ssrc_id](const SsrcInfo& ssrc_info) {
+ return ssrc_info.ssrc_id == ssrc_id;
+ });
+ if (ssrc_info_it == ssrc_infos->end()) {
+ SsrcInfo info;
+ info.ssrc_id = ssrc_id;
+ ssrc_infos->push_back(info);
+ ssrc_info_it = ssrc_infos->end() - 1;
+ }
+ SsrcInfo& ssrc_info = *ssrc_info_it;
+
+ // Store the info to the `ssrc_info`.
+ if (attribute == kSsrcAttributeCname) {
+ // RFC 5576
+ // cname:<value>
+ ssrc_info.cname = value;
+ } else if (attribute == kSsrcAttributeMsid) {
+ // draft-alvestrand-mmusic-msid-00
+ // msid:identifier [appdata]
+ std::vector<absl::string_view> fields =
+ rtc::split(value, kSdpDelimiterSpaceChar);
+ if (fields.size() < 1 || fields.size() > 2) {
+ return ParseFailed(
+ line, "Expected format \"msid:<identifier>[ <appdata>]\".", error);
+ }
+ ssrc_info.stream_id = std::string(fields[0]);
+ if (fields.size() == 2) {
+ ssrc_info.track_id = std::string(fields[1]);
+ }
+ *msid_signaling |= cricket::kMsidSignalingSsrcAttribute;
+ } else {
+ RTC_LOG(LS_INFO) << "Ignored unknown ssrc-specific attribute: " << line;
+ }
+ return true;
+}
+
+bool ParseSsrcGroupAttribute(absl::string_view line,
+ SsrcGroupVec* ssrc_groups,
+ SdpParseError* error) {
+ RTC_DCHECK(ssrc_groups != NULL);
+ // RFC 5576
+ // a=ssrc-group:<semantics> <ssrc-id> ...
+ std::vector<absl::string_view> fields =
+ rtc::split(line.substr(kLinePrefixLength), kSdpDelimiterSpaceChar);
+ const size_t expected_min_fields = 2;
+ if (fields.size() < expected_min_fields) {
+ return ParseFailedExpectMinFieldNum(line, expected_min_fields, error);
+ }
+ std::string semantics;
+ if (!GetValue(fields[0], kAttributeSsrcGroup, &semantics, error)) {
+ return false;
+ }
+ std::vector<uint32_t> ssrcs;
+ for (size_t i = 1; i < fields.size(); ++i) {
+ uint32_t ssrc = 0;
+ if (!GetValueFromString(line, fields[i], &ssrc, error)) {
+ return false;
+ }
+ // Reject duplicates. While not forbidden by RFC 5576,
+ // they don't make sense.
+ if (absl::c_linear_search(ssrcs, ssrc)) {
+ return ParseFailed(line, "Duplicate SSRC in ssrc-group", error);
+ }
+ ssrcs.push_back(ssrc);
+ }
+ ssrc_groups->push_back(SsrcGroup(semantics, ssrcs));
+ return true;
+}
+
+bool ParseCryptoAttribute(absl::string_view line,
+ MediaContentDescription* media_desc,
+ SdpParseError* error) {
+ std::vector<absl::string_view> fields =
+ rtc::split(line.substr(kLinePrefixLength), kSdpDelimiterSpaceChar);
+ // RFC 4568
+ // a=crypto:<tag> <crypto-suite> <key-params> [<session-params>]
+ const size_t expected_min_fields = 3;
+ if (fields.size() < expected_min_fields) {
+ return ParseFailedExpectMinFieldNum(line, expected_min_fields, error);
+ }
+ std::string tag_value;
+ if (!GetValue(fields[0], kAttributeCrypto, &tag_value, error)) {
+ return false;
+ }
+ int tag = 0;
+ if (!GetValueFromString(line, tag_value, &tag, error)) {
+ return false;
+ }
+ const absl::string_view crypto_suite = fields[1];
+ const absl::string_view key_params = fields[2];
+ absl::string_view session_params;
+ if (fields.size() > 3) {
+ session_params = fields[3];
+ }
+
+ media_desc->AddCrypto(
+ CryptoParams(tag, crypto_suite, key_params, session_params));
+ return true;
+}
+
+// Updates or creates a new codec entry in the audio description with according
+// to `name`, `clockrate`, `bitrate`, and `channels`.
+void UpdateCodec(int payload_type,
+ absl::string_view name,
+ int clockrate,
+ int bitrate,
+ size_t channels,
+ MediaContentDescription* desc) {
+ // Codec may already be populated with (only) optional parameters
+ // (from an fmtp).
+ cricket::Codec codec =
+ GetCodecWithPayloadType(desc->type(), desc->codecs(), payload_type);
+ codec.name = std::string(name);
+ codec.clockrate = clockrate;
+ codec.bitrate = bitrate;
+ codec.channels = channels;
+ AddOrReplaceCodec(desc, codec);
+}
+
+// Updates or creates a new codec entry in the video description according to
+// `name`, `width`, `height`, and `framerate`.
+void UpdateCodec(int payload_type,
+ absl::string_view name,
+ MediaContentDescription* desc) {
+ // Codec may already be populated with (only) optional parameters
+ // (from an fmtp).
+ cricket::Codec codec =
+ GetCodecWithPayloadType(desc->type(), desc->codecs(), payload_type);
+ codec.name = std::string(name);
+ AddOrReplaceCodec(desc, codec);
+}
+
+bool ParseRtpmapAttribute(absl::string_view line,
+ const cricket::MediaType media_type,
+ const std::vector<int>& payload_types,
+ MediaContentDescription* media_desc,
+ SdpParseError* error) {
+ static const int kFirstDynamicPayloadTypeLowerRange = 35;
+ std::vector<absl::string_view> fields =
+ rtc::split(line.substr(kLinePrefixLength), kSdpDelimiterSpaceChar);
+ // RFC 4566
+ // a=rtpmap:<payload type> <encoding name>/<clock rate>[/<encodingparameters>]
+ const size_t expected_min_fields = 2;
+ if (fields.size() < expected_min_fields) {
+ return ParseFailedExpectMinFieldNum(line, expected_min_fields, error);
+ }
+ std::string payload_type_value;
+ if (!GetValue(fields[0], kAttributeRtpmap, &payload_type_value, error)) {
+ return false;
+ }
+ int payload_type = 0;
+ if (!GetPayloadTypeFromString(line, payload_type_value, &payload_type,
+ error)) {
+ return false;
+ }
+
+ if (!absl::c_linear_search(payload_types, payload_type)) {
+ RTC_LOG(LS_WARNING) << "Ignore rtpmap line that did not appear in the "
+ "<fmt> of the m-line: "
+ << line;
+ return true;
+ }
+ std::vector<absl::string_view> codec_params = rtc::split(fields[1], '/');
+ // <encoding name>/<clock rate>[/<encodingparameters>]
+ // 2 mandatory fields
+ if (codec_params.size() < 2 || codec_params.size() > 3) {
+ return ParseFailed(line,
+ "Expected format \"<encoding name>/<clock rate>"
+ "[/<encodingparameters>]\".",
+ error);
+ }
+ const absl::string_view encoding_name = codec_params[0];
+ int clock_rate = 0;
+ if (!GetValueFromString(line, codec_params[1], &clock_rate, error)) {
+ return false;
+ }
+
+ if (media_type == cricket::MEDIA_TYPE_VIDEO) {
+ for (const cricket::VideoCodec& existing_codec : media_desc->codecs()) {
+ if (!existing_codec.name.empty() && payload_type == existing_codec.id &&
+ (!absl::EqualsIgnoreCase(encoding_name, existing_codec.name) ||
+ clock_rate != existing_codec.clockrate)) {
+ rtc::StringBuilder description;
+ description
+ << "Duplicate "
+ << (payload_type < kFirstDynamicPayloadTypeLowerRange
+ ? "statically assigned"
+ : "")
+ << " payload type with conflicting codec name or clock rate.";
+ return ParseFailed(line, description.Release(), error);
+ }
+ }
+ UpdateCodec(payload_type, encoding_name, media_desc);
+ } else if (media_type == cricket::MEDIA_TYPE_AUDIO) {
+ // RFC 4566
+ // For audio streams, <encoding parameters> indicates the number
+ // of audio channels. This parameter is OPTIONAL and may be
+ // omitted if the number of channels is one, provided that no
+ // additional parameters are needed.
+ size_t channels = 1;
+ if (codec_params.size() == 3) {
+ if (!GetValueFromString(line, codec_params[2], &channels, error)) {
+ return false;
+ }
+ }
+ if (channels > kMaxNumberOfChannels) {
+ return ParseFailed(line, "At most 24 channels are supported.", error);
+ }
+
+ for (const cricket::AudioCodec& existing_codec : media_desc->codecs()) {
+ // TODO(crbug.com/1338902) re-add checks for clockrate and number of
+ // channels.
+ if (!existing_codec.name.empty() && payload_type == existing_codec.id &&
+ (!absl::EqualsIgnoreCase(encoding_name, existing_codec.name))) {
+ rtc::StringBuilder description;
+ description
+ << "Duplicate "
+ << (payload_type < kFirstDynamicPayloadTypeLowerRange
+ ? "statically assigned"
+ : "")
+ << " payload type with conflicting codec name or clock rate.";
+ return ParseFailed(line, description.Release(), error);
+ }
+ }
+ UpdateCodec(payload_type, encoding_name, clock_rate, 0, channels,
+ media_desc);
+ }
+ return true;
+}
+
+bool ParseFmtpParam(absl::string_view line,
+ std::string* parameter,
+ std::string* value,
+ SdpParseError* error) {
+ if (!rtc::tokenize_first(line, kSdpDelimiterEqualChar, parameter, value)) {
+ // Support for non-key-value lines like RFC 2198 or RFC 4733.
+ *parameter = "";
+ *value = std::string(line);
+ return true;
+ }
+ // a=fmtp:<payload_type> <param1>=<value1>; <param2>=<value2>; ...
+ return true;
+}
+
+bool ParseFmtpAttributes(absl::string_view line,
+ const cricket::MediaType media_type,
+ MediaContentDescription* media_desc,
+ SdpParseError* error) {
+ if (media_type != cricket::MEDIA_TYPE_AUDIO &&
+ media_type != cricket::MEDIA_TYPE_VIDEO) {
+ return true;
+ }
+
+ std::string line_payload;
+ std::string line_params;
+
+ // https://tools.ietf.org/html/rfc4566#section-6
+ // a=fmtp:<format> <format specific parameters>
+ // At least two fields, whereas the second one is any of the optional
+ // parameters.
+ if (!rtc::tokenize_first(line.substr(kLinePrefixLength),
+ kSdpDelimiterSpaceChar, &line_payload,
+ &line_params)) {
+ ParseFailedExpectMinFieldNum(line, 2, error);
+ return false;
+ }
+
+ // Parse out the payload information.
+ std::string payload_type_str;
+ if (!GetValue(line_payload, kAttributeFmtp, &payload_type_str, error)) {
+ return false;
+ }
+
+ int payload_type = 0;
+ if (!GetPayloadTypeFromString(line_payload, payload_type_str, &payload_type,
+ error)) {
+ return false;
+ }
+
+ // Parse out format specific parameters.
+ cricket::CodecParameterMap codec_params;
+ for (absl::string_view param :
+ rtc::split(line_params, kSdpDelimiterSemicolonChar)) {
+ std::string name;
+ std::string value;
+ if (!ParseFmtpParam(absl::StripAsciiWhitespace(param), &name, &value,
+ error)) {
+ return false;
+ }
+ if (codec_params.find(name) != codec_params.end()) {
+ RTC_LOG(LS_INFO) << "Overwriting duplicate fmtp parameter with key \""
+ << name << "\".";
+ }
+ codec_params[name] = value;
+ }
+
+ if (media_type == cricket::MEDIA_TYPE_AUDIO ||
+ media_type == cricket::MEDIA_TYPE_VIDEO) {
+ UpdateCodec(media_desc, payload_type, codec_params);
+ }
+ return true;
+}
+
+bool ParsePacketizationAttribute(absl::string_view line,
+ const cricket::MediaType media_type,
+ MediaContentDescription* media_desc,
+ SdpParseError* error) {
+ if (media_type != cricket::MEDIA_TYPE_VIDEO) {
+ return true;
+ }
+ std::vector<absl::string_view> packetization_fields =
+ rtc::split(line, kSdpDelimiterSpaceChar);
+ if (packetization_fields.size() < 2) {
+ return ParseFailedGetValue(line, kAttributePacketization, error);
+ }
+ std::string payload_type_string;
+ if (!GetValue(packetization_fields[0], kAttributePacketization,
+ &payload_type_string, error)) {
+ return false;
+ }
+ int payload_type;
+ if (!GetPayloadTypeFromString(line, payload_type_string, &payload_type,
+ error)) {
+ return false;
+ }
+ absl::string_view packetization = packetization_fields[1];
+ UpdateVideoCodecPacketization(media_desc, payload_type, packetization);
+ return true;
+}
+
+bool ParseRtcpFbAttribute(absl::string_view line,
+ const cricket::MediaType media_type,
+ MediaContentDescription* media_desc,
+ SdpParseError* error) {
+ if (media_type != cricket::MEDIA_TYPE_AUDIO &&
+ media_type != cricket::MEDIA_TYPE_VIDEO) {
+ return true;
+ }
+ std::vector<absl::string_view> rtcp_fb_fields =
+ rtc::split(line, kSdpDelimiterSpaceChar);
+ if (rtcp_fb_fields.size() < 2) {
+ return ParseFailedGetValue(line, kAttributeRtcpFb, error);
+ }
+ std::string payload_type_string;
+ if (!GetValue(rtcp_fb_fields[0], kAttributeRtcpFb, &payload_type_string,
+ error)) {
+ return false;
+ }
+ int payload_type = kWildcardPayloadType;
+ if (payload_type_string != "*") {
+ if (!GetPayloadTypeFromString(line, payload_type_string, &payload_type,
+ error)) {
+ return false;
+ }
+ }
+ absl::string_view id = rtcp_fb_fields[1];
+ std::string param = "";
+ for (auto iter = rtcp_fb_fields.begin() + 2; iter != rtcp_fb_fields.end();
+ ++iter) {
+ param.append(iter->data(), iter->length());
+ }
+ const cricket::FeedbackParam feedback_param(id, param);
+
+ if (media_type == cricket::MEDIA_TYPE_AUDIO ||
+ media_type == cricket::MEDIA_TYPE_VIDEO) {
+ UpdateCodec(media_desc, payload_type, feedback_param);
+ }
+ return true;
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/webrtc_sdp.h b/third_party/libwebrtc/pc/webrtc_sdp.h
new file mode 100644
index 0000000000..f7759bd139
--- /dev/null
+++ b/third_party/libwebrtc/pc/webrtc_sdp.h
@@ -0,0 +1,117 @@
+/*
+ * Copyright 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This file contain functions for parsing and serializing SDP messages.
+// Related RFC/draft including:
+// * RFC 4566 - SDP
+// * RFC 5245 - ICE
+// * RFC 3388 - Grouping of Media Lines in SDP
+// * RFC 4568 - SDP Security Descriptions for Media Streams
+// * draft-lennox-mmusic-sdp-source-selection-02 -
+// Mechanisms for Media Source Selection in SDP
+
+#ifndef PC_WEBRTC_SDP_H_
+#define PC_WEBRTC_SDP_H_
+
+#include <string>
+
+#include "absl/strings/string_view.h"
+#include "api/candidate.h"
+#include "api/jsep.h"
+#include "api/jsep_ice_candidate.h"
+#include "api/jsep_session_description.h"
+#include "media/base/codec.h"
+#include "rtc_base/strings/string_builder.h"
+#include "rtc_base/system/rtc_export.h"
+
+namespace cricket {
+class Candidate;
+} // namespace cricket
+
+namespace rtc {
+class StringBuilder;
+} // namespace rtc
+
+namespace webrtc {
+class IceCandidateInterface;
+class JsepIceCandidate;
+class JsepSessionDescription;
+struct SdpParseError;
+
+// Serializes the passed in JsepSessionDescription.
+// Serialize SessionDescription including candidates if
+// JsepSessionDescription has candidates.
+// jdesc - The JsepSessionDescription object to be serialized.
+// return - SDP string serialized from the arguments.
+std::string SdpSerialize(const JsepSessionDescription& jdesc);
+
+// Serializes the passed in IceCandidateInterface to a SDP string.
+// candidate - The candidate to be serialized.
+std::string SdpSerializeCandidate(const IceCandidateInterface& candidate);
+
+// Serializes a cricket Candidate.
+// candidate - The candidate to be serialized.
+RTC_EXPORT std::string SdpSerializeCandidate(
+ const cricket::Candidate& candidate);
+
+// Deserializes the passed in SDP string to a JsepSessionDescription.
+// message - SDP string to be Deserialized.
+// jdesc - The JsepSessionDescription deserialized from the SDP string.
+// error - The detail error information when parsing fails.
+// return - true on success, false on failure.
+bool SdpDeserialize(absl::string_view message,
+ JsepSessionDescription* jdesc,
+ SdpParseError* error);
+
+// Deserializes the passed in SDP string to one JsepIceCandidate.
+// The first line must be a=candidate line and only the first line will be
+// parsed.
+// message - The SDP string to be Deserialized.
+// candidates - The JsepIceCandidate from the SDP string.
+// error - The detail error information when parsing fails.
+// return - true on success, false on failure.
+RTC_EXPORT bool SdpDeserializeCandidate(absl::string_view message,
+ JsepIceCandidate* candidate,
+ SdpParseError* error);
+
+// Deserializes the passed in SDP string to a cricket Candidate.
+// The first line must be a=candidate line and only the first line will be
+// parsed.
+// transport_name - The transport name (MID) of the candidate.
+// message - The SDP string to be deserialized.
+// candidate - The cricket Candidate from the SDP string.
+// error - The detail error information when parsing fails.
+// return - true on success, false on failure.
+RTC_EXPORT bool SdpDeserializeCandidate(absl::string_view transport_name,
+ absl::string_view message,
+ cricket::Candidate* candidate,
+ SdpParseError* error);
+
+// Parses `message` according to the grammar defined in RFC 5245, Section 15.1
+// and, if successful, stores the result in `candidate` and returns true.
+// If unsuccessful, returns false and stores error information in `error` if
+// `error` is not null.
+// If `is_raw` is false, `message` is expected to be prefixed with "a=".
+// If `is_raw` is true, no prefix is expected in `messaage`.
+RTC_EXPORT bool ParseCandidate(absl::string_view message,
+ cricket::Candidate* candidate,
+ SdpParseError* error,
+ bool is_raw);
+
+// Generates an FMTP line based on `parameters`. Please note that some
+// parameters are not considered to be part of the FMTP line, see the function
+// IsFmtpParam(). Returns true if the set of FMTP parameters is nonempty, false
+// otherwise.
+bool WriteFmtpParameters(const cricket::CodecParameterMap& parameters,
+ rtc::StringBuilder* os);
+
+} // namespace webrtc
+
+#endif // PC_WEBRTC_SDP_H_
diff --git a/third_party/libwebrtc/pc/webrtc_sdp_unittest.cc b/third_party/libwebrtc/pc/webrtc_sdp_unittest.cc
new file mode 100644
index 0000000000..2c43c35d15
--- /dev/null
+++ b/third_party/libwebrtc/pc/webrtc_sdp_unittest.cc
@@ -0,0 +1,5103 @@
+/*
+ * Copyright 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdio.h>
+#include <string.h>
+
+#include <cstdint>
+#include <map>
+#include <memory>
+#include <sstream>
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "absl/algorithm/container.h"
+#include "absl/memory/memory.h"
+#include "absl/strings/str_replace.h"
+#include "absl/strings/string_view.h"
+#include "absl/types/optional.h"
+#include "api/array_view.h"
+#include "api/crypto_params.h"
+#include "api/jsep_session_description.h"
+#include "api/media_types.h"
+#include "api/rtp_parameters.h"
+#include "api/rtp_transceiver_direction.h"
+#include "media/base/codec.h"
+#include "media/base/media_constants.h"
+#include "media/base/rid_description.h"
+#include "media/base/stream_params.h"
+#include "p2p/base/p2p_constants.h"
+#include "p2p/base/port.h"
+#include "p2p/base/transport_description.h"
+#include "p2p/base/transport_info.h"
+#include "pc/media_protocol_names.h"
+#include "pc/media_session.h"
+#include "pc/session_description.h"
+#include "pc/simulcast_description.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/message_digest.h"
+#include "rtc_base/socket_address.h"
+#include "rtc_base/ssl_fingerprint.h"
+#include "rtc_base/string_encode.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+
+#ifdef WEBRTC_ANDROID
+#include "pc/test/android_test_initializer.h"
+#endif
+#include "pc/webrtc_sdp.h"
+
+using cricket::AudioContentDescription;
+using cricket::Candidate;
+using cricket::ContentGroup;
+using cricket::ContentInfo;
+using cricket::CryptoParams;
+using cricket::ICE_CANDIDATE_COMPONENT_RTCP;
+using cricket::ICE_CANDIDATE_COMPONENT_RTP;
+using cricket::kFecSsrcGroupSemantics;
+using cricket::LOCAL_PORT_TYPE;
+using cricket::MediaProtocolType;
+using cricket::RELAY_PORT_TYPE;
+using cricket::RidDescription;
+using cricket::RidDirection;
+using cricket::SctpDataContentDescription;
+using cricket::SessionDescription;
+using cricket::SimulcastDescription;
+using cricket::SimulcastLayer;
+using cricket::StreamParams;
+using cricket::STUN_PORT_TYPE;
+using cricket::TransportDescription;
+using cricket::TransportInfo;
+using cricket::VideoContentDescription;
+using ::testing::ElementsAre;
+using ::testing::Field;
+using webrtc::IceCandidateCollection;
+using webrtc::IceCandidateInterface;
+using webrtc::JsepIceCandidate;
+using webrtc::JsepSessionDescription;
+using webrtc::RtpExtension;
+using webrtc::RtpTransceiverDirection;
+using webrtc::SdpParseError;
+using webrtc::SdpType;
+using webrtc::SessionDescriptionInterface;
+
+static const uint32_t kDefaultSctpPort = 5000;
+static const uint16_t kUnusualSctpPort = 9556;
+static const char kSessionTime[] = "t=0 0\r\n";
+static const uint32_t kCandidatePriority = 2130706432U; // pref = 1.0
+static const char kAttributeIceUfragVoice[] = "a=ice-ufrag:ufrag_voice\r\n";
+static const char kAttributeIcePwdVoice[] = "a=ice-pwd:pwd_voice\r\n";
+static const char kAttributeIceUfragVideo[] = "a=ice-ufrag:ufrag_video\r\n";
+static const char kAttributeIcePwdVideo[] = "a=ice-pwd:pwd_video\r\n";
+static const uint32_t kCandidateGeneration = 2;
+static const char kCandidateFoundation1[] = "a0+B/1";
+static const char kCandidateFoundation2[] = "a0+B/2";
+static const char kCandidateFoundation3[] = "a0+B/3";
+static const char kCandidateFoundation4[] = "a0+B/4";
+static const char kAttributeCryptoVoice[] =
+ "a=crypto:1 AES_CM_128_HMAC_SHA1_32 "
+ "inline:NzB4d1BINUAvLEw6UzF3WSJ+PSdFcGdUJShpX1Zj|2^20|1:32 "
+ "dummy_session_params\r\n";
+static const char kAttributeCryptoVideo[] =
+ "a=crypto:1 AES_CM_128_HMAC_SHA1_80 "
+ "inline:d0RmdmcmVCspeEc3QGZiNWpVLFJhQX1cfHAwJSoj|2^20|1:32\r\n";
+static const char kFingerprint[] =
+ "a=fingerprint:sha-1 "
+ "4A:AD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B:19:E5:7C:AB\r\n";
+static const char kExtmapAllowMixed[] = "a=extmap-allow-mixed\r\n";
+static const int kExtmapId = 1;
+static const char kExtmapUri[] = "http://example.com/082005/ext.htm#ttime";
+static const char kExtmap[] =
+ "a=extmap:1 http://example.com/082005/ext.htm#ttime\r\n";
+static const char kExtmapWithDirectionAndAttribute[] =
+ "a=extmap:1/sendrecv http://example.com/082005/ext.htm#ttime a1 a2\r\n";
+static const char kExtmapWithDirectionAndAttributeEncrypted[] =
+ "a=extmap:1/sendrecv urn:ietf:params:rtp-hdrext:encrypt "
+ "http://example.com/082005/ext.htm#ttime a1 a2\r\n";
+
+static const uint8_t kIdentityDigest[] = {
+ 0x4A, 0xAD, 0xB9, 0xB1, 0x3F, 0x82, 0x18, 0x3B, 0x54, 0x02,
+ 0x12, 0xDF, 0x3E, 0x5D, 0x49, 0x6B, 0x19, 0xE5, 0x7C, 0xAB};
+
+static const char kDtlsSctp[] = "DTLS/SCTP";
+static const char kUdpDtlsSctp[] = "UDP/DTLS/SCTP";
+static const char kTcpDtlsSctp[] = "TCP/DTLS/SCTP";
+
+struct CodecParams {
+ int max_ptime;
+ int ptime;
+ int min_ptime;
+ int sprop_stereo;
+ int stereo;
+ int useinband;
+ int maxaveragebitrate;
+};
+
+// TODO(deadbeef): In these reference strings, use "a=fingerprint" by default
+// instead of "a=crypto", and have an explicit test for adding "a=crypto".
+// Currently it's the other way around.
+
+// Reference sdp string
+static const char kSdpFullString[] =
+ "v=0\r\n"
+ "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "a=extmap-allow-mixed\r\n"
+ "a=msid-semantic: WMS local_stream_1\r\n"
+ "m=audio 2345 RTP/SAVPF 111 103 104\r\n"
+ "c=IN IP4 74.125.127.126\r\n"
+ "a=rtcp:2347 IN IP4 74.125.127.126\r\n"
+ "a=candidate:a0+B/1 1 udp 2130706432 192.168.1.5 1234 typ host "
+ "generation 2\r\n"
+ "a=candidate:a0+B/1 2 udp 2130706432 192.168.1.5 1235 typ host "
+ "generation 2\r\n"
+ "a=candidate:a0+B/2 1 udp 2130706432 ::1 1238 typ host "
+ "generation 2\r\n"
+ "a=candidate:a0+B/2 2 udp 2130706432 ::1 1239 typ host "
+ "generation 2\r\n"
+ "a=candidate:a0+B/3 1 udp 2130706432 74.125.127.126 2345 typ srflx "
+ "raddr 192.168.1.5 rport 2346 "
+ "generation 2\r\n"
+ "a=candidate:a0+B/3 2 udp 2130706432 74.125.127.126 2347 typ srflx "
+ "raddr 192.168.1.5 rport 2348 "
+ "generation 2\r\n"
+ "a=ice-ufrag:ufrag_voice\r\na=ice-pwd:pwd_voice\r\n"
+ "a=mid:audio_content_name\r\n"
+ "a=sendrecv\r\n"
+ "a=rtcp-mux\r\n"
+ "a=rtcp-rsize\r\n"
+ "a=crypto:1 AES_CM_128_HMAC_SHA1_32 "
+ "inline:NzB4d1BINUAvLEw6UzF3WSJ+PSdFcGdUJShpX1Zj|2^20|1:32 "
+ "dummy_session_params\r\n"
+ "a=rtpmap:111 opus/48000/2\r\n"
+ "a=rtpmap:103 ISAC/16000\r\n"
+ "a=rtpmap:104 ISAC/32000\r\n"
+ "a=ssrc:1 cname:stream_1_cname\r\n"
+ "a=ssrc:1 msid:local_stream_1 audio_track_id_1\r\n"
+ "m=video 3457 RTP/SAVPF 120\r\n"
+ "c=IN IP4 74.125.224.39\r\n"
+ "a=rtcp:3456 IN IP4 74.125.224.39\r\n"
+ "a=candidate:a0+B/1 2 udp 2130706432 192.168.1.5 1236 typ host "
+ "generation 2\r\n"
+ "a=candidate:a0+B/1 1 udp 2130706432 192.168.1.5 1237 typ host "
+ "generation 2\r\n"
+ "a=candidate:a0+B/2 2 udp 2130706432 ::1 1240 typ host "
+ "generation 2\r\n"
+ "a=candidate:a0+B/2 1 udp 2130706432 ::1 1241 typ host "
+ "generation 2\r\n"
+ "a=candidate:a0+B/4 2 udp 2130706432 74.125.224.39 3456 typ relay "
+ "generation 2\r\n"
+ "a=candidate:a0+B/4 1 udp 2130706432 74.125.224.39 3457 typ relay "
+ "generation 2\r\n"
+ "a=ice-ufrag:ufrag_video\r\na=ice-pwd:pwd_video\r\n"
+ "a=mid:video_content_name\r\n"
+ "a=sendrecv\r\n"
+ "a=crypto:1 AES_CM_128_HMAC_SHA1_80 "
+ "inline:d0RmdmcmVCspeEc3QGZiNWpVLFJhQX1cfHAwJSoj|2^20|1:32\r\n"
+ "a=rtpmap:120 VP8/90000\r\n"
+ "a=ssrc-group:FEC 2 3\r\n"
+ "a=ssrc:2 cname:stream_1_cname\r\n"
+ "a=ssrc:2 msid:local_stream_1 video_track_id_1\r\n"
+ "a=ssrc:3 cname:stream_1_cname\r\n"
+ "a=ssrc:3 msid:local_stream_1 video_track_id_1\r\n";
+
+// SDP reference string without the candidates.
+static const char kSdpString[] =
+ "v=0\r\n"
+ "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "a=extmap-allow-mixed\r\n"
+ "a=msid-semantic: WMS local_stream_1\r\n"
+ "m=audio 9 RTP/SAVPF 111 103 104\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=rtcp:9 IN IP4 0.0.0.0\r\n"
+ "a=ice-ufrag:ufrag_voice\r\na=ice-pwd:pwd_voice\r\n"
+ "a=mid:audio_content_name\r\n"
+ "a=sendrecv\r\n"
+ "a=rtcp-mux\r\n"
+ "a=rtcp-rsize\r\n"
+ "a=crypto:1 AES_CM_128_HMAC_SHA1_32 "
+ "inline:NzB4d1BINUAvLEw6UzF3WSJ+PSdFcGdUJShpX1Zj|2^20|1:32 "
+ "dummy_session_params\r\n"
+ "a=rtpmap:111 opus/48000/2\r\n"
+ "a=rtpmap:103 ISAC/16000\r\n"
+ "a=rtpmap:104 ISAC/32000\r\n"
+ "a=ssrc:1 cname:stream_1_cname\r\n"
+ "a=ssrc:1 msid:local_stream_1 audio_track_id_1\r\n"
+ "m=video 9 RTP/SAVPF 120\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=rtcp:9 IN IP4 0.0.0.0\r\n"
+ "a=ice-ufrag:ufrag_video\r\na=ice-pwd:pwd_video\r\n"
+ "a=mid:video_content_name\r\n"
+ "a=sendrecv\r\n"
+ "a=crypto:1 AES_CM_128_HMAC_SHA1_80 "
+ "inline:d0RmdmcmVCspeEc3QGZiNWpVLFJhQX1cfHAwJSoj|2^20|1:32\r\n"
+ "a=rtpmap:120 VP8/90000\r\n"
+ "a=ssrc-group:FEC 2 3\r\n"
+ "a=ssrc:2 cname:stream_1_cname\r\n"
+ "a=ssrc:2 msid:local_stream_1 video_track_id_1\r\n"
+ "a=ssrc:3 cname:stream_1_cname\r\n"
+ "a=ssrc:3 msid:local_stream_1 video_track_id_1\r\n";
+
+// draft-ietf-mmusic-sctp-sdp-03
+static const char kSdpSctpDataChannelString[] =
+ "m=application 9 UDP/DTLS/SCTP 5000\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=ice-ufrag:ufrag_data\r\n"
+ "a=ice-pwd:pwd_data\r\n"
+ "a=mid:data_content_name\r\n"
+ "a=sctpmap:5000 webrtc-datachannel 1024\r\n";
+
+// draft-ietf-mmusic-sctp-sdp-12
+// Note - this is invalid per draft-ietf-mmusic-sctp-sdp-26,
+// since the separator after "sctp-port" needs to be a colon.
+static const char kSdpSctpDataChannelStringWithSctpPort[] =
+ "m=application 9 UDP/DTLS/SCTP webrtc-datachannel\r\n"
+ "a=sctp-port 5000\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=ice-ufrag:ufrag_data\r\n"
+ "a=ice-pwd:pwd_data\r\n"
+ "a=mid:data_content_name\r\n";
+
+// draft-ietf-mmusic-sctp-sdp-26
+static const char kSdpSctpDataChannelStringWithSctpColonPort[] =
+ "m=application 9 UDP/DTLS/SCTP webrtc-datachannel\r\n"
+ "a=sctp-port:5000\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=ice-ufrag:ufrag_data\r\n"
+ "a=ice-pwd:pwd_data\r\n"
+ "a=mid:data_content_name\r\n";
+
+static const char kSdpSctpDataChannelWithCandidatesString[] =
+ "m=application 2345 UDP/DTLS/SCTP 5000\r\n"
+ "c=IN IP4 74.125.127.126\r\n"
+ "a=candidate:a0+B/1 1 udp 2130706432 192.168.1.5 1234 typ host "
+ "generation 2\r\n"
+ "a=candidate:a0+B/2 1 udp 2130706432 ::1 1238 typ host "
+ "generation 2\r\n"
+ "a=candidate:a0+B/3 1 udp 2130706432 74.125.127.126 2345 typ srflx "
+ "raddr 192.168.1.5 rport 2346 "
+ "generation 2\r\n"
+ "a=ice-ufrag:ufrag_data\r\n"
+ "a=ice-pwd:pwd_data\r\n"
+ "a=mid:data_content_name\r\n"
+ "a=sctpmap:5000 webrtc-datachannel 1024\r\n";
+
+static const char kSdpConferenceString[] =
+ "v=0\r\n"
+ "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "a=msid-semantic: WMS\r\n"
+ "m=audio 9 RTP/SAVPF 111 103 104\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=x-google-flag:conference\r\n"
+ "m=video 9 RTP/SAVPF 120\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=x-google-flag:conference\r\n";
+
+static const char kSdpSessionString[] =
+ "v=0\r\n"
+ "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "a=msid-semantic: WMS local_stream\r\n";
+
+static const char kSdpAudioString[] =
+ "m=audio 9 RTP/SAVPF 111\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=rtcp:9 IN IP4 0.0.0.0\r\n"
+ "a=ice-ufrag:ufrag_voice\r\na=ice-pwd:pwd_voice\r\n"
+ "a=mid:audio_content_name\r\n"
+ "a=sendrecv\r\n"
+ "a=rtpmap:111 opus/48000/2\r\n"
+ "a=ssrc:1 cname:stream_1_cname\r\n"
+ "a=ssrc:1 msid:local_stream audio_track_id_1\r\n";
+
+static const char kSdpVideoString[] =
+ "m=video 9 RTP/SAVPF 120\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=rtcp:9 IN IP4 0.0.0.0\r\n"
+ "a=ice-ufrag:ufrag_video\r\na=ice-pwd:pwd_video\r\n"
+ "a=mid:video_content_name\r\n"
+ "a=sendrecv\r\n"
+ "a=rtpmap:120 VP8/90000\r\n"
+ "a=ssrc:2 cname:stream_1_cname\r\n"
+ "a=ssrc:2 msid:local_stream video_track_id_1\r\n";
+
+// Reference sdp string using bundle-only.
+static const char kBundleOnlySdpFullString[] =
+ "v=0\r\n"
+ "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "a=extmap-allow-mixed\r\n"
+ "a=group:BUNDLE audio_content_name video_content_name\r\n"
+ "a=msid-semantic: WMS local_stream_1\r\n"
+ "m=audio 2345 RTP/SAVPF 111 103 104\r\n"
+ "c=IN IP4 74.125.127.126\r\n"
+ "a=rtcp:2347 IN IP4 74.125.127.126\r\n"
+ "a=candidate:a0+B/1 1 udp 2130706432 192.168.1.5 1234 typ host "
+ "generation 2\r\n"
+ "a=candidate:a0+B/1 2 udp 2130706432 192.168.1.5 1235 typ host "
+ "generation 2\r\n"
+ "a=candidate:a0+B/2 1 udp 2130706432 ::1 1238 typ host "
+ "generation 2\r\n"
+ "a=candidate:a0+B/2 2 udp 2130706432 ::1 1239 typ host "
+ "generation 2\r\n"
+ "a=candidate:a0+B/3 1 udp 2130706432 74.125.127.126 2345 typ srflx "
+ "raddr 192.168.1.5 rport 2346 "
+ "generation 2\r\n"
+ "a=candidate:a0+B/3 2 udp 2130706432 74.125.127.126 2347 typ srflx "
+ "raddr 192.168.1.5 rport 2348 "
+ "generation 2\r\n"
+ "a=ice-ufrag:ufrag_voice\r\na=ice-pwd:pwd_voice\r\n"
+ "a=mid:audio_content_name\r\n"
+ "a=sendrecv\r\n"
+ "a=rtcp-mux\r\n"
+ "a=rtcp-rsize\r\n"
+ "a=crypto:1 AES_CM_128_HMAC_SHA1_32 "
+ "inline:NzB4d1BINUAvLEw6UzF3WSJ+PSdFcGdUJShpX1Zj|2^20|1:32 "
+ "dummy_session_params\r\n"
+ "a=rtpmap:111 opus/48000/2\r\n"
+ "a=rtpmap:103 ISAC/16000\r\n"
+ "a=rtpmap:104 ISAC/32000\r\n"
+ "a=ssrc:1 cname:stream_1_cname\r\n"
+ "a=ssrc:1 msid:local_stream_1 audio_track_id_1\r\n"
+ "m=video 0 RTP/SAVPF 120\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=rtcp:9 IN IP4 0.0.0.0\r\n"
+ "a=bundle-only\r\n"
+ "a=mid:video_content_name\r\n"
+ "a=sendrecv\r\n"
+ "a=crypto:1 AES_CM_128_HMAC_SHA1_80 "
+ "inline:d0RmdmcmVCspeEc3QGZiNWpVLFJhQX1cfHAwJSoj|2^20|1:32\r\n"
+ "a=rtpmap:120 VP8/90000\r\n"
+ "a=ssrc-group:FEC 2 3\r\n"
+ "a=ssrc:2 cname:stream_1_cname\r\n"
+ "a=ssrc:2 msid:local_stream_1 video_track_id_1\r\n"
+ "a=ssrc:3 cname:stream_1_cname\r\n"
+ "a=ssrc:3 msid:local_stream_1 video_track_id_1\r\n";
+
+// Plan B SDP reference string, with 2 streams, 2 audio tracks and 3 video
+// tracks.
+static const char kPlanBSdpFullString[] =
+ "v=0\r\n"
+ "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "a=extmap-allow-mixed\r\n"
+ "a=msid-semantic: WMS local_stream_1 local_stream_2\r\n"
+ "m=audio 2345 RTP/SAVPF 111 103 104\r\n"
+ "c=IN IP4 74.125.127.126\r\n"
+ "a=rtcp:2347 IN IP4 74.125.127.126\r\n"
+ "a=candidate:a0+B/1 1 udp 2130706432 192.168.1.5 1234 typ host "
+ "generation 2\r\n"
+ "a=candidate:a0+B/1 2 udp 2130706432 192.168.1.5 1235 typ host "
+ "generation 2\r\n"
+ "a=candidate:a0+B/2 1 udp 2130706432 ::1 1238 typ host "
+ "generation 2\r\n"
+ "a=candidate:a0+B/2 2 udp 2130706432 ::1 1239 typ host "
+ "generation 2\r\n"
+ "a=candidate:a0+B/3 1 udp 2130706432 74.125.127.126 2345 typ srflx "
+ "raddr 192.168.1.5 rport 2346 "
+ "generation 2\r\n"
+ "a=candidate:a0+B/3 2 udp 2130706432 74.125.127.126 2347 typ srflx "
+ "raddr 192.168.1.5 rport 2348 "
+ "generation 2\r\n"
+ "a=ice-ufrag:ufrag_voice\r\na=ice-pwd:pwd_voice\r\n"
+ "a=mid:audio_content_name\r\n"
+ "a=sendrecv\r\n"
+ "a=rtcp-mux\r\n"
+ "a=rtcp-rsize\r\n"
+ "a=crypto:1 AES_CM_128_HMAC_SHA1_32 "
+ "inline:NzB4d1BINUAvLEw6UzF3WSJ+PSdFcGdUJShpX1Zj|2^20|1:32 "
+ "dummy_session_params\r\n"
+ "a=rtpmap:111 opus/48000/2\r\n"
+ "a=rtpmap:103 ISAC/16000\r\n"
+ "a=rtpmap:104 ISAC/32000\r\n"
+ "a=ssrc:1 cname:stream_1_cname\r\n"
+ "a=ssrc:1 msid:local_stream_1 audio_track_id_1\r\n"
+ "a=ssrc:4 cname:stream_2_cname\r\n"
+ "a=ssrc:4 msid:local_stream_2 audio_track_id_2\r\n"
+ "m=video 3457 RTP/SAVPF 120\r\n"
+ "c=IN IP4 74.125.224.39\r\n"
+ "a=rtcp:3456 IN IP4 74.125.224.39\r\n"
+ "a=candidate:a0+B/1 2 udp 2130706432 192.168.1.5 1236 typ host "
+ "generation 2\r\n"
+ "a=candidate:a0+B/1 1 udp 2130706432 192.168.1.5 1237 typ host "
+ "generation 2\r\n"
+ "a=candidate:a0+B/2 2 udp 2130706432 ::1 1240 typ host "
+ "generation 2\r\n"
+ "a=candidate:a0+B/2 1 udp 2130706432 ::1 1241 typ host "
+ "generation 2\r\n"
+ "a=candidate:a0+B/4 2 udp 2130706432 74.125.224.39 3456 typ relay "
+ "generation 2\r\n"
+ "a=candidate:a0+B/4 1 udp 2130706432 74.125.224.39 3457 typ relay "
+ "generation 2\r\n"
+ "a=ice-ufrag:ufrag_video\r\na=ice-pwd:pwd_video\r\n"
+ "a=mid:video_content_name\r\n"
+ "a=sendrecv\r\n"
+ "a=crypto:1 AES_CM_128_HMAC_SHA1_80 "
+ "inline:d0RmdmcmVCspeEc3QGZiNWpVLFJhQX1cfHAwJSoj|2^20|1:32\r\n"
+ "a=rtpmap:120 VP8/90000\r\n"
+ "a=ssrc-group:FEC 2 3\r\n"
+ "a=ssrc:2 cname:stream_1_cname\r\n"
+ "a=ssrc:2 msid:local_stream_1 video_track_id_1\r\n"
+ "a=ssrc:3 cname:stream_1_cname\r\n"
+ "a=ssrc:3 msid:local_stream_1 video_track_id_1\r\n"
+ "a=ssrc:5 cname:stream_2_cname\r\n"
+ "a=ssrc:5 msid:local_stream_2 video_track_id_2\r\n"
+ "a=ssrc:6 cname:stream_2_cname\r\n"
+ "a=ssrc:6 msid:local_stream_2 video_track_id_3\r\n";
+
+// Unified Plan SDP reference string, with 2 streams, 2 audio tracks and 3 video
+// tracks.
+static const char kUnifiedPlanSdpFullString[] =
+ "v=0\r\n"
+ "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "a=extmap-allow-mixed\r\n"
+ "a=msid-semantic: WMS local_stream_1\r\n"
+ // Audio track 1, stream 1 (with candidates).
+ "m=audio 2345 RTP/SAVPF 111 103 104\r\n"
+ "c=IN IP4 74.125.127.126\r\n"
+ "a=rtcp:2347 IN IP4 74.125.127.126\r\n"
+ "a=candidate:a0+B/1 1 udp 2130706432 192.168.1.5 1234 typ host "
+ "generation 2\r\n"
+ "a=candidate:a0+B/1 2 udp 2130706432 192.168.1.5 1235 typ host "
+ "generation 2\r\n"
+ "a=candidate:a0+B/2 1 udp 2130706432 ::1 1238 typ host "
+ "generation 2\r\n"
+ "a=candidate:a0+B/2 2 udp 2130706432 ::1 1239 typ host "
+ "generation 2\r\n"
+ "a=candidate:a0+B/3 1 udp 2130706432 74.125.127.126 2345 typ srflx "
+ "raddr 192.168.1.5 rport 2346 "
+ "generation 2\r\n"
+ "a=candidate:a0+B/3 2 udp 2130706432 74.125.127.126 2347 typ srflx "
+ "raddr 192.168.1.5 rport 2348 "
+ "generation 2\r\n"
+ "a=ice-ufrag:ufrag_voice\r\na=ice-pwd:pwd_voice\r\n"
+ "a=mid:audio_content_name\r\n"
+ "a=msid:local_stream_1 audio_track_id_1\r\n"
+ "a=sendrecv\r\n"
+ "a=rtcp-mux\r\n"
+ "a=rtcp-rsize\r\n"
+ "a=crypto:1 AES_CM_128_HMAC_SHA1_32 "
+ "inline:NzB4d1BINUAvLEw6UzF3WSJ+PSdFcGdUJShpX1Zj|2^20|1:32 "
+ "dummy_session_params\r\n"
+ "a=rtpmap:111 opus/48000/2\r\n"
+ "a=rtpmap:103 ISAC/16000\r\n"
+ "a=rtpmap:104 ISAC/32000\r\n"
+ "a=ssrc:1 cname:stream_1_cname\r\n"
+ // Video track 1, stream 1 (with candidates).
+ "m=video 3457 RTP/SAVPF 120\r\n"
+ "c=IN IP4 74.125.224.39\r\n"
+ "a=rtcp:3456 IN IP4 74.125.224.39\r\n"
+ "a=candidate:a0+B/1 2 udp 2130706432 192.168.1.5 1236 typ host "
+ "generation 2\r\n"
+ "a=candidate:a0+B/1 1 udp 2130706432 192.168.1.5 1237 typ host "
+ "generation 2\r\n"
+ "a=candidate:a0+B/2 2 udp 2130706432 ::1 1240 typ host "
+ "generation 2\r\n"
+ "a=candidate:a0+B/2 1 udp 2130706432 ::1 1241 typ host "
+ "generation 2\r\n"
+ "a=candidate:a0+B/4 2 udp 2130706432 74.125.224.39 3456 typ relay "
+ "generation 2\r\n"
+ "a=candidate:a0+B/4 1 udp 2130706432 74.125.224.39 3457 typ relay "
+ "generation 2\r\n"
+ "a=ice-ufrag:ufrag_video\r\na=ice-pwd:pwd_video\r\n"
+ "a=mid:video_content_name\r\n"
+ "a=msid:local_stream_1 video_track_id_1\r\n"
+ "a=sendrecv\r\n"
+ "a=crypto:1 AES_CM_128_HMAC_SHA1_80 "
+ "inline:d0RmdmcmVCspeEc3QGZiNWpVLFJhQX1cfHAwJSoj|2^20|1:32\r\n"
+ "a=rtpmap:120 VP8/90000\r\n"
+ "a=ssrc-group:FEC 2 3\r\n"
+ "a=ssrc:2 cname:stream_1_cname\r\n"
+ "a=ssrc:3 cname:stream_1_cname\r\n"
+ // Audio track 2, stream 2.
+ "m=audio 9 RTP/SAVPF 111 103 104\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=rtcp:9 IN IP4 0.0.0.0\r\n"
+ "a=ice-ufrag:ufrag_voice_2\r\na=ice-pwd:pwd_voice_2\r\n"
+ "a=mid:audio_content_name_2\r\n"
+ "a=msid:local_stream_2 audio_track_id_2\r\n"
+ "a=sendrecv\r\n"
+ "a=rtcp-mux\r\n"
+ "a=rtcp-rsize\r\n"
+ "a=crypto:1 AES_CM_128_HMAC_SHA1_32 "
+ "inline:NzB4d1BINUAvLEw6UzF3WSJ+PSdFcGdUJShpX1Zj|2^20|1:32 "
+ "dummy_session_params\r\n"
+ "a=rtpmap:111 opus/48000/2\r\n"
+ "a=rtpmap:103 ISAC/16000\r\n"
+ "a=rtpmap:104 ISAC/32000\r\n"
+ "a=ssrc:4 cname:stream_2_cname\r\n"
+ // Video track 2, stream 2.
+ "m=video 9 RTP/SAVPF 120\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=rtcp:9 IN IP4 0.0.0.0\r\n"
+ "a=ice-ufrag:ufrag_video_2\r\na=ice-pwd:pwd_video_2\r\n"
+ "a=mid:video_content_name_2\r\n"
+ "a=msid:local_stream_2 video_track_id_2\r\n"
+ "a=sendrecv\r\n"
+ "a=crypto:1 AES_CM_128_HMAC_SHA1_80 "
+ "inline:d0RmdmcmVCspeEc3QGZiNWpVLFJhQX1cfHAwJSoj|2^20|1:32\r\n"
+ "a=rtpmap:120 VP8/90000\r\n"
+ "a=ssrc:5 cname:stream_2_cname\r\n"
+ // Video track 3, stream 2.
+ "m=video 9 RTP/SAVPF 120\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=rtcp:9 IN IP4 0.0.0.0\r\n"
+ "a=ice-ufrag:ufrag_video_3\r\na=ice-pwd:pwd_video_3\r\n"
+ "a=mid:video_content_name_3\r\n"
+ "a=msid:local_stream_2 video_track_id_3\r\n"
+ "a=sendrecv\r\n"
+ "a=crypto:1 AES_CM_128_HMAC_SHA1_80 "
+ "inline:d0RmdmcmVCspeEc3QGZiNWpVLFJhQX1cfHAwJSoj|2^20|1:32\r\n"
+ "a=rtpmap:120 VP8/90000\r\n"
+ "a=ssrc:6 cname:stream_2_cname\r\n";
+
+// Unified Plan SDP reference string:
+// - audio track 1 has 1 a=msid lines
+// - audio track 2 has 2 a=msid lines
+// - audio track 3 has 1 a=msid line with the special "-" marker signifying that
+// there are 0 media stream ids.
+// This Unified Plan SDP represents a SDP that signals the msid using both
+// a=msid and a=ssrc msid semantics.
+static const char kUnifiedPlanSdpFullStringWithSpecialMsid[] =
+ "v=0\r\n"
+ "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "a=extmap-allow-mixed\r\n"
+ "a=msid-semantic: WMS local_stream_1\r\n"
+ // Audio track 1, with 1 stream id.
+ "m=audio 2345 RTP/SAVPF 111 103 104\r\n"
+ "c=IN IP4 74.125.127.126\r\n"
+ "a=rtcp:2347 IN IP4 74.125.127.126\r\n"
+ "a=candidate:a0+B/1 1 udp 2130706432 192.168.1.5 1234 typ host "
+ "generation 2\r\n"
+ "a=candidate:a0+B/1 2 udp 2130706432 192.168.1.5 1235 typ host "
+ "generation 2\r\n"
+ "a=candidate:a0+B/2 1 udp 2130706432 ::1 1238 typ host "
+ "generation 2\r\n"
+ "a=candidate:a0+B/2 2 udp 2130706432 ::1 1239 typ host "
+ "generation 2\r\n"
+ "a=candidate:a0+B/3 1 udp 2130706432 74.125.127.126 2345 typ srflx "
+ "raddr 192.168.1.5 rport 2346 "
+ "generation 2\r\n"
+ "a=candidate:a0+B/3 2 udp 2130706432 74.125.127.126 2347 typ srflx "
+ "raddr 192.168.1.5 rport 2348 "
+ "generation 2\r\n"
+ "a=ice-ufrag:ufrag_voice\r\na=ice-pwd:pwd_voice\r\n"
+ "a=mid:audio_content_name\r\n"
+ "a=sendrecv\r\n"
+ "a=msid:local_stream_1 audio_track_id_1\r\n"
+ "a=rtcp-mux\r\n"
+ "a=rtcp-rsize\r\n"
+ "a=crypto:1 AES_CM_128_HMAC_SHA1_32 "
+ "inline:NzB4d1BINUAvLEw6UzF3WSJ+PSdFcGdUJShpX1Zj|2^20|1:32 "
+ "dummy_session_params\r\n"
+ "a=rtpmap:111 opus/48000/2\r\n"
+ "a=rtpmap:103 ISAC/16000\r\n"
+ "a=rtpmap:104 ISAC/32000\r\n"
+ "a=ssrc:1 cname:stream_1_cname\r\n"
+ "a=ssrc:1 msid:local_stream_1 audio_track_id_1\r\n"
+ // Audio track 2, with two stream ids.
+ "m=audio 9 RTP/SAVPF 111 103 104\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=rtcp:9 IN IP4 0.0.0.0\r\n"
+ "a=ice-ufrag:ufrag_voice_2\r\na=ice-pwd:pwd_voice_2\r\n"
+ "a=mid:audio_content_name_2\r\n"
+ "a=sendrecv\r\n"
+ "a=msid:local_stream_1 audio_track_id_2\r\n"
+ "a=msid:local_stream_2 audio_track_id_2\r\n"
+ "a=rtcp-mux\r\n"
+ "a=rtcp-rsize\r\n"
+ "a=crypto:1 AES_CM_128_HMAC_SHA1_32 "
+ "inline:NzB4d1BINUAvLEw6UzF3WSJ+PSdFcGdUJShpX1Zj|2^20|1:32 "
+ "dummy_session_params\r\n"
+ "a=rtpmap:111 opus/48000/2\r\n"
+ "a=rtpmap:103 ISAC/16000\r\n"
+ "a=rtpmap:104 ISAC/32000\r\n"
+ "a=ssrc:4 cname:stream_1_cname\r\n"
+ // The support for Plan B msid signaling only includes the
+ // first media stream id "local_stream_1."
+ "a=ssrc:4 msid:local_stream_1 audio_track_id_2\r\n"
+ // Audio track 3, with no stream ids.
+ "m=audio 9 RTP/SAVPF 111 103 104\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=rtcp:9 IN IP4 0.0.0.0\r\n"
+ "a=ice-ufrag:ufrag_voice_3\r\na=ice-pwd:pwd_voice_3\r\n"
+ "a=mid:audio_content_name_3\r\n"
+ "a=sendrecv\r\n"
+ "a=msid:- audio_track_id_3\r\n"
+ "a=rtcp-mux\r\n"
+ "a=rtcp-rsize\r\n"
+ "a=crypto:1 AES_CM_128_HMAC_SHA1_32 "
+ "inline:NzB4d1BINUAvLEw6UzF3WSJ+PSdFcGdUJShpX1Zj|2^20|1:32 "
+ "dummy_session_params\r\n"
+ "a=rtpmap:111 opus/48000/2\r\n"
+ "a=rtpmap:103 ISAC/16000\r\n"
+ "a=rtpmap:104 ISAC/32000\r\n"
+ "a=ssrc:7 cname:stream_2_cname\r\n"
+ "a=ssrc:7 msid:- audio_track_id_3\r\n";
+
+// SDP string for unified plan without SSRCs
+static const char kUnifiedPlanSdpFullStringNoSsrc[] =
+ "v=0\r\n"
+ "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "a=msid-semantic: WMS local_stream_1\r\n"
+ // Audio track 1, stream 1 (with candidates).
+ "m=audio 2345 RTP/SAVPF 111 103 104\r\n"
+ "c=IN IP4 74.125.127.126\r\n"
+ "a=rtcp:2347 IN IP4 74.125.127.126\r\n"
+ "a=candidate:a0+B/1 1 udp 2130706432 192.168.1.5 1234 typ host "
+ "generation 2\r\n"
+ "a=candidate:a0+B/1 2 udp 2130706432 192.168.1.5 1235 typ host "
+ "generation 2\r\n"
+ "a=candidate:a0+B/2 1 udp 2130706432 ::1 1238 typ host "
+ "generation 2\r\n"
+ "a=candidate:a0+B/2 2 udp 2130706432 ::1 1239 typ host "
+ "generation 2\r\n"
+ "a=candidate:a0+B/3 1 udp 2130706432 74.125.127.126 2345 typ srflx "
+ "raddr 192.168.1.5 rport 2346 "
+ "generation 2\r\n"
+ "a=candidate:a0+B/3 2 udp 2130706432 74.125.127.126 2347 typ srflx "
+ "raddr 192.168.1.5 rport 2348 "
+ "generation 2\r\n"
+ "a=ice-ufrag:ufrag_voice\r\na=ice-pwd:pwd_voice\r\n"
+ "a=mid:audio_content_name\r\n"
+ "a=msid:local_stream_1 audio_track_id_1\r\n"
+ "a=sendrecv\r\n"
+ "a=rtcp-mux\r\n"
+ "a=rtcp-rsize\r\n"
+ "a=crypto:1 AES_CM_128_HMAC_SHA1_32 "
+ "inline:NzB4d1BINUAvLEw6UzF3WSJ+PSdFcGdUJShpX1Zj|2^20|1:32 "
+ "dummy_session_params\r\n"
+ "a=rtpmap:111 opus/48000/2\r\n"
+ "a=rtpmap:103 ISAC/16000\r\n"
+ "a=rtpmap:104 ISAC/32000\r\n"
+ // Video track 1, stream 1 (with candidates).
+ "m=video 3457 RTP/SAVPF 120\r\n"
+ "c=IN IP4 74.125.224.39\r\n"
+ "a=rtcp:3456 IN IP4 74.125.224.39\r\n"
+ "a=candidate:a0+B/1 2 udp 2130706432 192.168.1.5 1236 typ host "
+ "generation 2\r\n"
+ "a=candidate:a0+B/1 1 udp 2130706432 192.168.1.5 1237 typ host "
+ "generation 2\r\n"
+ "a=candidate:a0+B/2 2 udp 2130706432 ::1 1240 typ host "
+ "generation 2\r\n"
+ "a=candidate:a0+B/2 1 udp 2130706432 ::1 1241 typ host "
+ "generation 2\r\n"
+ "a=candidate:a0+B/4 2 udp 2130706432 74.125.224.39 3456 typ relay "
+ "generation 2\r\n"
+ "a=candidate:a0+B/4 1 udp 2130706432 74.125.224.39 3457 typ relay "
+ "generation 2\r\n"
+ "a=ice-ufrag:ufrag_video\r\na=ice-pwd:pwd_video\r\n"
+ "a=mid:video_content_name\r\n"
+ "a=msid:local_stream_1 video_track_id_1\r\n"
+ "a=sendrecv\r\n"
+ "a=crypto:1 AES_CM_128_HMAC_SHA1_80 "
+ "inline:d0RmdmcmVCspeEc3QGZiNWpVLFJhQX1cfHAwJSoj|2^20|1:32\r\n"
+ "a=rtpmap:120 VP8/90000\r\n"
+ // Audio track 2, stream 2.
+ "m=audio 9 RTP/SAVPF 111 103 104\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=rtcp:9 IN IP4 0.0.0.0\r\n"
+ "a=ice-ufrag:ufrag_voice_2\r\na=ice-pwd:pwd_voice_2\r\n"
+ "a=mid:audio_content_name_2\r\n"
+ "a=msid:local_stream_2 audio_track_id_2\r\n"
+ "a=sendrecv\r\n"
+ "a=rtcp-mux\r\n"
+ "a=rtcp-rsize\r\n"
+ "a=crypto:1 AES_CM_128_HMAC_SHA1_32 "
+ "inline:NzB4d1BINUAvLEw6UzF3WSJ+PSdFcGdUJShpX1Zj|2^20|1:32 "
+ "dummy_session_params\r\n"
+ "a=rtpmap:111 opus/48000/2\r\n"
+ "a=rtpmap:103 ISAC/16000\r\n"
+ "a=rtpmap:104 ISAC/32000\r\n"
+ // Video track 2, stream 2.
+ "m=video 9 RTP/SAVPF 120\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=rtcp:9 IN IP4 0.0.0.0\r\n"
+ "a=ice-ufrag:ufrag_video_2\r\na=ice-pwd:pwd_video_2\r\n"
+ "a=mid:video_content_name_2\r\n"
+ "a=msid:local_stream_2 video_track_id_2\r\n"
+ "a=sendrecv\r\n"
+ "a=crypto:1 AES_CM_128_HMAC_SHA1_80 "
+ "inline:d0RmdmcmVCspeEc3QGZiNWpVLFJhQX1cfHAwJSoj|2^20|1:32\r\n"
+ "a=rtpmap:120 VP8/90000\r\n"
+ // Video track 3, stream 2.
+ "m=video 9 RTP/SAVPF 120\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=rtcp:9 IN IP4 0.0.0.0\r\n"
+ "a=ice-ufrag:ufrag_video_3\r\na=ice-pwd:pwd_video_3\r\n"
+ "a=mid:video_content_name_3\r\n"
+ "a=msid:local_stream_2 video_track_id_3\r\n"
+ "a=sendrecv\r\n"
+ "a=crypto:1 AES_CM_128_HMAC_SHA1_80 "
+ "inline:d0RmdmcmVCspeEc3QGZiNWpVLFJhQX1cfHAwJSoj|2^20|1:32\r\n"
+ "a=rtpmap:120 VP8/90000\r\n";
+
+// One candidate reference string as per W3c spec.
+// candidate:<blah> not a=candidate:<blah>CRLF
+static const char kRawCandidate[] =
+ "candidate:a0+B/1 1 udp 2130706432 192.168.1.5 1234 typ host generation 2";
+// One candidate reference string.
+static const char kSdpOneCandidate[] =
+ "a=candidate:a0+B/1 1 udp 2130706432 192.168.1.5 1234 typ host "
+ "generation 2\r\n";
+
+static const char kSdpTcpActiveCandidate[] =
+ "candidate:a0+B/1 1 tcp 2130706432 192.168.1.5 9 typ host "
+ "tcptype active generation 2";
+static const char kSdpTcpPassiveCandidate[] =
+ "candidate:a0+B/1 1 tcp 2130706432 192.168.1.5 9 typ host "
+ "tcptype passive generation 2";
+static const char kSdpTcpSOCandidate[] =
+ "candidate:a0+B/1 1 tcp 2130706432 192.168.1.5 9 typ host "
+ "tcptype so generation 2";
+static const char kSdpTcpInvalidCandidate[] =
+ "candidate:a0+B/1 1 tcp 2130706432 192.168.1.5 9 typ host "
+ "tcptype invalid generation 2";
+
+// One candidate reference string with IPV6 address.
+static const char kRawIPV6Candidate[] =
+ "candidate:a0+B/1 1 udp 2130706432 "
+ "abcd:abcd:abcd:abcd:abcd:abcd:abcd:abcd 1234 typ host generation 2";
+
+// One candidate reference string.
+static const char kSdpOneCandidateWithUfragPwd[] =
+ "a=candidate:a0+B/1 1 udp 2130706432 192.168.1.5 1234 typ host network_name"
+ " eth0 ufrag user_rtp pwd password_rtp generation 2\r\n";
+
+static const char kRawHostnameCandidate[] =
+ "candidate:a0+B/1 1 udp 2130706432 a.test 1234 typ host generation 2";
+
+// Session id and version
+static const char kSessionId[] = "18446744069414584320";
+static const char kSessionVersion[] = "18446462598732840960";
+
+// ICE options.
+static const char kIceOption1[] = "iceoption1";
+static const char kIceOption2[] = "iceoption2";
+static const char kIceOption3[] = "iceoption3";
+
+// ICE ufrags/passwords.
+static const char kUfragVoice[] = "ufrag_voice";
+static const char kPwdVoice[] = "pwd_voice";
+static const char kUfragVideo[] = "ufrag_video";
+static const char kPwdVideo[] = "pwd_video";
+static const char kUfragData[] = "ufrag_data";
+static const char kPwdData[] = "pwd_data";
+
+// Extra ufrags/passwords for extra unified plan m= sections.
+static const char kUfragVoice2[] = "ufrag_voice_2";
+static const char kPwdVoice2[] = "pwd_voice_2";
+static const char kUfragVoice3[] = "ufrag_voice_3";
+static const char kPwdVoice3[] = "pwd_voice_3";
+static const char kUfragVideo2[] = "ufrag_video_2";
+static const char kPwdVideo2[] = "pwd_video_2";
+static const char kUfragVideo3[] = "ufrag_video_3";
+static const char kPwdVideo3[] = "pwd_video_3";
+
+// Content name
+static const char kAudioContentName[] = "audio_content_name";
+static const char kVideoContentName[] = "video_content_name";
+static const char kDataContentName[] = "data_content_name";
+
+// Extra content names for extra unified plan m= sections.
+static const char kAudioContentName2[] = "audio_content_name_2";
+static const char kAudioContentName3[] = "audio_content_name_3";
+static const char kVideoContentName2[] = "video_content_name_2";
+static const char kVideoContentName3[] = "video_content_name_3";
+
+// MediaStream 1
+static const char kStreamId1[] = "local_stream_1";
+static const char kStream1Cname[] = "stream_1_cname";
+static const char kAudioTrackId1[] = "audio_track_id_1";
+static const uint32_t kAudioTrack1Ssrc = 1;
+static const char kVideoTrackId1[] = "video_track_id_1";
+static const uint32_t kVideoTrack1Ssrc1 = 2;
+static const uint32_t kVideoTrack1Ssrc2 = 3;
+
+// MediaStream 2
+static const char kStreamId2[] = "local_stream_2";
+static const char kStream2Cname[] = "stream_2_cname";
+static const char kAudioTrackId2[] = "audio_track_id_2";
+static const uint32_t kAudioTrack2Ssrc = 4;
+static const char kVideoTrackId2[] = "video_track_id_2";
+static const uint32_t kVideoTrack2Ssrc = 5;
+static const char kVideoTrackId3[] = "video_track_id_3";
+static const uint32_t kVideoTrack3Ssrc = 6;
+static const char kAudioTrackId3[] = "audio_track_id_3";
+static const uint32_t kAudioTrack3Ssrc = 7;
+
+// Candidate
+static const char kDummyMid[] = "dummy_mid";
+static const int kDummyIndex = 123;
+
+// Misc
+static SdpType kDummyType = SdpType::kOffer;
+
+// Helper functions
+
+static bool SdpDeserialize(const std::string& message,
+ JsepSessionDescription* jdesc) {
+ return webrtc::SdpDeserialize(message, jdesc, NULL);
+}
+
+static bool SdpDeserializeCandidate(const std::string& message,
+ JsepIceCandidate* candidate) {
+ return webrtc::SdpDeserializeCandidate(message, candidate, NULL);
+}
+
+// Add some extra `newlines` to the `message` after `line`.
+static void InjectAfter(const std::string& line,
+ const std::string& newlines,
+ std::string* message) {
+ absl::StrReplaceAll({{line, line + newlines}}, message);
+}
+
+static void Replace(const std::string& line,
+ const std::string& newlines,
+ std::string* message) {
+ absl::StrReplaceAll({{line, newlines}}, message);
+}
+
+// Expect a parse failure on the line containing `bad_part` when attempting to
+// parse `bad_sdp`.
+static void ExpectParseFailure(const std::string& bad_sdp,
+ const std::string& bad_part) {
+ JsepSessionDescription desc(kDummyType);
+ SdpParseError error;
+ bool ret = webrtc::SdpDeserialize(bad_sdp, &desc, &error);
+ ASSERT_FALSE(ret);
+ EXPECT_NE(std::string::npos, error.line.find(bad_part.c_str()))
+ << "Did not find " << bad_part << " in " << error.line;
+}
+
+// Expect fail to parse kSdpFullString if replace `good_part` with `bad_part`.
+static void ExpectParseFailure(const char* good_part, const char* bad_part) {
+ std::string bad_sdp = kSdpFullString;
+ Replace(good_part, bad_part, &bad_sdp);
+ ExpectParseFailure(bad_sdp, bad_part);
+}
+
+// Expect fail to parse kSdpFullString if add `newlines` after `injectpoint`.
+static void ExpectParseFailureWithNewLines(const std::string& injectpoint,
+ const std::string& newlines,
+ const std::string& bad_part) {
+ std::string bad_sdp = kSdpFullString;
+ InjectAfter(injectpoint, newlines, &bad_sdp);
+ ExpectParseFailure(bad_sdp, bad_part);
+}
+
+static void ReplaceDirection(RtpTransceiverDirection direction,
+ std::string* message) {
+ std::string new_direction;
+ switch (direction) {
+ case RtpTransceiverDirection::kInactive:
+ new_direction = "a=inactive";
+ break;
+ case RtpTransceiverDirection::kSendOnly:
+ new_direction = "a=sendonly";
+ break;
+ case RtpTransceiverDirection::kRecvOnly:
+ new_direction = "a=recvonly";
+ break;
+ case RtpTransceiverDirection::kSendRecv:
+ new_direction = "a=sendrecv";
+ break;
+ case RtpTransceiverDirection::kStopped:
+ default:
+ RTC_DCHECK_NOTREACHED();
+ new_direction = "a=sendrecv";
+ break;
+ }
+ Replace("a=sendrecv", new_direction, message);
+}
+
+static void ReplaceRejected(bool audio_rejected,
+ bool video_rejected,
+ std::string* message) {
+ if (audio_rejected) {
+ Replace("m=audio 9", "m=audio 0", message);
+ Replace(kAttributeIceUfragVoice, "", message);
+ Replace(kAttributeIcePwdVoice, "", message);
+ }
+ if (video_rejected) {
+ Replace("m=video 9", "m=video 0", message);
+ Replace(kAttributeIceUfragVideo, "", message);
+ Replace(kAttributeIcePwdVideo, "", message);
+ }
+}
+
+// WebRtcSdpTest
+
+class WebRtcSdpTest : public ::testing::Test {
+ public:
+ WebRtcSdpTest() : jdesc_(kDummyType) {
+#ifdef WEBRTC_ANDROID
+ webrtc::InitializeAndroidObjects();
+#endif
+ // AudioContentDescription
+ audio_desc_ = CreateAudioContentDescription();
+ StreamParams audio_stream;
+ audio_stream.id = kAudioTrackId1;
+ audio_stream.cname = kStream1Cname;
+ audio_stream.set_stream_ids({kStreamId1});
+ audio_stream.ssrcs.push_back(kAudioTrack1Ssrc);
+ audio_desc_->AddStream(audio_stream);
+ rtc::SocketAddress audio_addr("74.125.127.126", 2345);
+ audio_desc_->set_connection_address(audio_addr);
+ desc_.AddContent(kAudioContentName, MediaProtocolType::kRtp,
+ absl::WrapUnique(audio_desc_));
+
+ // VideoContentDescription
+ video_desc_ = CreateVideoContentDescription();
+ StreamParams video_stream;
+ video_stream.id = kVideoTrackId1;
+ video_stream.cname = kStream1Cname;
+ video_stream.set_stream_ids({kStreamId1});
+ video_stream.ssrcs.push_back(kVideoTrack1Ssrc1);
+ video_stream.ssrcs.push_back(kVideoTrack1Ssrc2);
+ cricket::SsrcGroup ssrc_group(kFecSsrcGroupSemantics, video_stream.ssrcs);
+ video_stream.ssrc_groups.push_back(ssrc_group);
+ video_desc_->AddStream(video_stream);
+ rtc::SocketAddress video_addr("74.125.224.39", 3457);
+ video_desc_->set_connection_address(video_addr);
+ desc_.AddContent(kVideoContentName, MediaProtocolType::kRtp,
+ absl::WrapUnique(video_desc_));
+
+ // TransportInfo
+ desc_.AddTransportInfo(TransportInfo(
+ kAudioContentName, TransportDescription(kUfragVoice, kPwdVoice)));
+ desc_.AddTransportInfo(TransportInfo(
+ kVideoContentName, TransportDescription(kUfragVideo, kPwdVideo)));
+
+ // v4 host
+ int port = 1234;
+ rtc::SocketAddress address("192.168.1.5", port++);
+ Candidate candidate1(ICE_CANDIDATE_COMPONENT_RTP, "udp", address,
+ kCandidatePriority, "", "", LOCAL_PORT_TYPE,
+ kCandidateGeneration, kCandidateFoundation1);
+ address.SetPort(port++);
+ Candidate candidate2(ICE_CANDIDATE_COMPONENT_RTCP, "udp", address,
+ kCandidatePriority, "", "", LOCAL_PORT_TYPE,
+ kCandidateGeneration, kCandidateFoundation1);
+ address.SetPort(port++);
+ Candidate candidate3(ICE_CANDIDATE_COMPONENT_RTCP, "udp", address,
+ kCandidatePriority, "", "", LOCAL_PORT_TYPE,
+ kCandidateGeneration, kCandidateFoundation1);
+ address.SetPort(port++);
+ Candidate candidate4(ICE_CANDIDATE_COMPONENT_RTP, "udp", address,
+ kCandidatePriority, "", "", LOCAL_PORT_TYPE,
+ kCandidateGeneration, kCandidateFoundation1);
+
+ // v6 host
+ rtc::SocketAddress v6_address("::1", port++);
+ cricket::Candidate candidate5(cricket::ICE_CANDIDATE_COMPONENT_RTP, "udp",
+ v6_address, kCandidatePriority, "", "",
+ cricket::LOCAL_PORT_TYPE,
+ kCandidateGeneration, kCandidateFoundation2);
+ v6_address.SetPort(port++);
+ cricket::Candidate candidate6(cricket::ICE_CANDIDATE_COMPONENT_RTCP, "udp",
+ v6_address, kCandidatePriority, "", "",
+ cricket::LOCAL_PORT_TYPE,
+ kCandidateGeneration, kCandidateFoundation2);
+ v6_address.SetPort(port++);
+ cricket::Candidate candidate7(cricket::ICE_CANDIDATE_COMPONENT_RTCP, "udp",
+ v6_address, kCandidatePriority, "", "",
+ cricket::LOCAL_PORT_TYPE,
+ kCandidateGeneration, kCandidateFoundation2);
+ v6_address.SetPort(port++);
+ cricket::Candidate candidate8(cricket::ICE_CANDIDATE_COMPONENT_RTP, "udp",
+ v6_address, kCandidatePriority, "", "",
+ cricket::LOCAL_PORT_TYPE,
+ kCandidateGeneration, kCandidateFoundation2);
+
+ // stun
+ int port_stun = 2345;
+ rtc::SocketAddress address_stun("74.125.127.126", port_stun++);
+ rtc::SocketAddress rel_address_stun("192.168.1.5", port_stun++);
+ cricket::Candidate candidate9(cricket::ICE_CANDIDATE_COMPONENT_RTP, "udp",
+ address_stun, kCandidatePriority, "", "",
+ STUN_PORT_TYPE, kCandidateGeneration,
+ kCandidateFoundation3);
+ candidate9.set_related_address(rel_address_stun);
+
+ address_stun.SetPort(port_stun++);
+ rel_address_stun.SetPort(port_stun++);
+ cricket::Candidate candidate10(cricket::ICE_CANDIDATE_COMPONENT_RTCP, "udp",
+ address_stun, kCandidatePriority, "", "",
+ STUN_PORT_TYPE, kCandidateGeneration,
+ kCandidateFoundation3);
+ candidate10.set_related_address(rel_address_stun);
+
+ // relay
+ int port_relay = 3456;
+ rtc::SocketAddress address_relay("74.125.224.39", port_relay++);
+ cricket::Candidate candidate11(cricket::ICE_CANDIDATE_COMPONENT_RTCP, "udp",
+ address_relay, kCandidatePriority, "", "",
+ cricket::RELAY_PORT_TYPE,
+ kCandidateGeneration, kCandidateFoundation4);
+ address_relay.SetPort(port_relay++);
+ cricket::Candidate candidate12(cricket::ICE_CANDIDATE_COMPONENT_RTP, "udp",
+ address_relay, kCandidatePriority, "", "",
+ RELAY_PORT_TYPE, kCandidateGeneration,
+ kCandidateFoundation4);
+
+ // voice
+ candidates_.push_back(candidate1);
+ candidates_.push_back(candidate2);
+ candidates_.push_back(candidate5);
+ candidates_.push_back(candidate6);
+ candidates_.push_back(candidate9);
+ candidates_.push_back(candidate10);
+
+ // video
+ candidates_.push_back(candidate3);
+ candidates_.push_back(candidate4);
+ candidates_.push_back(candidate7);
+ candidates_.push_back(candidate8);
+ candidates_.push_back(candidate11);
+ candidates_.push_back(candidate12);
+
+ jcandidate_.reset(
+ new JsepIceCandidate(std::string("audio_content_name"), 0, candidate1));
+
+ // Set up JsepSessionDescription.
+ jdesc_.Initialize(desc_.Clone(), kSessionId, kSessionVersion);
+ std::string mline_id;
+ int mline_index = 0;
+ for (size_t i = 0; i < candidates_.size(); ++i) {
+ // In this test, the audio m line index will be 0, and the video m line
+ // will be 1.
+ bool is_video = (i > 5);
+ mline_id = is_video ? "video_content_name" : "audio_content_name";
+ mline_index = is_video ? 1 : 0;
+ JsepIceCandidate jice(mline_id, mline_index, candidates_.at(i));
+ jdesc_.AddCandidate(&jice);
+ }
+ }
+
+ void RemoveVideoCandidates() {
+ const IceCandidateCollection* video_candidates_collection =
+ jdesc_.candidates(1);
+ ASSERT_NE(nullptr, video_candidates_collection);
+ std::vector<cricket::Candidate> video_candidates;
+ for (size_t i = 0; i < video_candidates_collection->count(); ++i) {
+ cricket::Candidate c = video_candidates_collection->at(i)->candidate();
+ c.set_transport_name("video_content_name");
+ video_candidates.push_back(c);
+ }
+ jdesc_.RemoveCandidates(video_candidates);
+ }
+
+ // Turns the existing reference description into a description using
+ // a=bundle-only. This means no transport attributes and a 0 port value on
+ // the m= sections not associated with the BUNDLE-tag.
+ void MakeBundleOnlyDescription() {
+ RemoveVideoCandidates();
+
+ // And the rest of the transport attributes.
+ desc_.transport_infos()[1].description.ice_ufrag.clear();
+ desc_.transport_infos()[1].description.ice_pwd.clear();
+ desc_.transport_infos()[1].description.connection_role =
+ cricket::CONNECTIONROLE_NONE;
+
+ // Set bundle-only flag.
+ desc_.contents()[1].bundle_only = true;
+
+ // Add BUNDLE group.
+ ContentGroup group(cricket::GROUP_TYPE_BUNDLE);
+ group.AddContentName(kAudioContentName);
+ group.AddContentName(kVideoContentName);
+ desc_.AddGroup(group);
+
+ ASSERT_TRUE(jdesc_.Initialize(desc_.Clone(), jdesc_.session_id(),
+ jdesc_.session_version()));
+ }
+
+ // Turns the existing reference description into a plan B description,
+ // with 2 audio tracks and 3 video tracks.
+ void MakePlanBDescription() {
+ audio_desc_ = new AudioContentDescription(*audio_desc_);
+ video_desc_ = new VideoContentDescription(*video_desc_);
+
+ StreamParams audio_track_2;
+ audio_track_2.id = kAudioTrackId2;
+ audio_track_2.cname = kStream2Cname;
+ audio_track_2.set_stream_ids({kStreamId2});
+ audio_track_2.ssrcs.push_back(kAudioTrack2Ssrc);
+ audio_desc_->AddStream(audio_track_2);
+
+ StreamParams video_track_2;
+ video_track_2.id = kVideoTrackId2;
+ video_track_2.cname = kStream2Cname;
+ video_track_2.set_stream_ids({kStreamId2});
+ video_track_2.ssrcs.push_back(kVideoTrack2Ssrc);
+ video_desc_->AddStream(video_track_2);
+
+ StreamParams video_track_3;
+ video_track_3.id = kVideoTrackId3;
+ video_track_3.cname = kStream2Cname;
+ video_track_3.set_stream_ids({kStreamId2});
+ video_track_3.ssrcs.push_back(kVideoTrack3Ssrc);
+ video_desc_->AddStream(video_track_3);
+
+ desc_.RemoveContentByName(kAudioContentName);
+ desc_.RemoveContentByName(kVideoContentName);
+ desc_.AddContent(kAudioContentName, MediaProtocolType::kRtp,
+ absl::WrapUnique(audio_desc_));
+ desc_.AddContent(kVideoContentName, MediaProtocolType::kRtp,
+ absl::WrapUnique(video_desc_));
+
+ ASSERT_TRUE(jdesc_.Initialize(desc_.Clone(), jdesc_.session_id(),
+ jdesc_.session_version()));
+ }
+
+ // Turns the existing reference description into a unified plan description,
+ // with 2 audio tracks and 3 video tracks.
+ void MakeUnifiedPlanDescription(bool use_ssrcs = true) {
+ // Audio track 2.
+ AudioContentDescription* audio_desc_2 = CreateAudioContentDescription();
+ StreamParams audio_track_2;
+ audio_track_2.id = kAudioTrackId2;
+ audio_track_2.set_stream_ids({kStreamId2});
+ if (use_ssrcs) {
+ audio_track_2.cname = kStream2Cname;
+ audio_track_2.ssrcs.push_back(kAudioTrack2Ssrc);
+ }
+ audio_desc_2->AddStream(audio_track_2);
+ desc_.AddContent(kAudioContentName2, MediaProtocolType::kRtp,
+ absl::WrapUnique(audio_desc_2));
+ desc_.AddTransportInfo(TransportInfo(
+ kAudioContentName2, TransportDescription(kUfragVoice2, kPwdVoice2)));
+ // Video track 2, in stream 2.
+ VideoContentDescription* video_desc_2 = CreateVideoContentDescription();
+ StreamParams video_track_2;
+ video_track_2.id = kVideoTrackId2;
+ video_track_2.set_stream_ids({kStreamId2});
+ if (use_ssrcs) {
+ video_track_2.cname = kStream2Cname;
+ video_track_2.ssrcs.push_back(kVideoTrack2Ssrc);
+ }
+ video_desc_2->AddStream(video_track_2);
+ desc_.AddContent(kVideoContentName2, MediaProtocolType::kRtp,
+ absl::WrapUnique(video_desc_2));
+ desc_.AddTransportInfo(TransportInfo(
+ kVideoContentName2, TransportDescription(kUfragVideo2, kPwdVideo2)));
+
+ // Video track 3, in stream 2.
+ VideoContentDescription* video_desc_3 = CreateVideoContentDescription();
+ StreamParams video_track_3;
+ video_track_3.id = kVideoTrackId3;
+ video_track_3.set_stream_ids({kStreamId2});
+ if (use_ssrcs) {
+ video_track_3.cname = kStream2Cname;
+ video_track_3.ssrcs.push_back(kVideoTrack3Ssrc);
+ }
+ video_desc_3->AddStream(video_track_3);
+ desc_.AddContent(kVideoContentName3, MediaProtocolType::kRtp,
+ absl::WrapUnique(video_desc_3));
+ desc_.AddTransportInfo(TransportInfo(
+ kVideoContentName3, TransportDescription(kUfragVideo3, kPwdVideo3)));
+ desc_.set_msid_signaling(cricket::kMsidSignalingMediaSection);
+
+ ASSERT_TRUE(jdesc_.Initialize(desc_.Clone(), jdesc_.session_id(),
+ jdesc_.session_version()));
+ }
+
+ // Creates an audio content description with no streams, and some default
+ // configuration.
+ AudioContentDescription* CreateAudioContentDescription() {
+ AudioContentDescription* audio = new AudioContentDescription();
+ audio->set_rtcp_mux(true);
+ audio->set_rtcp_reduced_size(true);
+ audio->AddCrypto(CryptoParams(
+ 1, "AES_CM_128_HMAC_SHA1_32",
+ "inline:NzB4d1BINUAvLEw6UzF3WSJ+PSdFcGdUJShpX1Zj|2^20|1:32",
+ "dummy_session_params"));
+ audio->set_protocol(cricket::kMediaProtocolSavpf);
+ audio->AddCodec(cricket::CreateAudioCodec(111, "opus", 48000, 2));
+ audio->AddCodec(cricket::CreateAudioCodec(103, "ISAC", 16000, 1));
+ audio->AddCodec(cricket::CreateAudioCodec(104, "ISAC", 32000, 1));
+ return audio;
+ }
+
+ // Turns the existing reference description into a unified plan description,
+ // with 3 audio MediaContentDescriptions with special StreamParams that
+ // contain 0 or multiple stream ids: - audio track 1 has 1 media stream id -
+ // audio track 2 has 2 media stream ids - audio track 3 has 0 media stream ids
+ void MakeUnifiedPlanDescriptionMultipleStreamIds(const int msid_signaling) {
+ desc_.RemoveContentByName(kVideoContentName);
+ desc_.RemoveTransportInfoByName(kVideoContentName);
+ RemoveVideoCandidates();
+
+ // Audio track 2 has 2 media stream ids.
+ AudioContentDescription* audio_desc_2 = CreateAudioContentDescription();
+ StreamParams audio_track_2;
+ audio_track_2.id = kAudioTrackId2;
+ audio_track_2.cname = kStream1Cname;
+ audio_track_2.set_stream_ids({kStreamId1, kStreamId2});
+ audio_track_2.ssrcs.push_back(kAudioTrack2Ssrc);
+ audio_desc_2->AddStream(audio_track_2);
+ desc_.AddContent(kAudioContentName2, MediaProtocolType::kRtp,
+ absl::WrapUnique(audio_desc_2));
+ desc_.AddTransportInfo(TransportInfo(
+ kAudioContentName2, TransportDescription(kUfragVoice2, kPwdVoice2)));
+
+ // Audio track 3 has no stream ids.
+ AudioContentDescription* audio_desc_3 = CreateAudioContentDescription();
+ StreamParams audio_track_3;
+ audio_track_3.id = kAudioTrackId3;
+ audio_track_3.cname = kStream2Cname;
+ audio_track_3.set_stream_ids({});
+ audio_track_3.ssrcs.push_back(kAudioTrack3Ssrc);
+ audio_desc_3->AddStream(audio_track_3);
+ desc_.AddContent(kAudioContentName3, MediaProtocolType::kRtp,
+ absl::WrapUnique(audio_desc_3));
+ desc_.AddTransportInfo(TransportInfo(
+ kAudioContentName3, TransportDescription(kUfragVoice3, kPwdVoice3)));
+ desc_.set_msid_signaling(msid_signaling);
+ ASSERT_TRUE(jdesc_.Initialize(desc_.Clone(), jdesc_.session_id(),
+ jdesc_.session_version()));
+ }
+
+ // Turns the existing reference description into a unified plan description
+ // with one audio MediaContentDescription that contains one StreamParams with
+ // 0 ssrcs.
+ void MakeUnifiedPlanDescriptionNoSsrcSignaling() {
+ desc_.RemoveContentByName(kVideoContentName);
+ desc_.RemoveContentByName(kAudioContentName);
+ desc_.RemoveTransportInfoByName(kVideoContentName);
+ RemoveVideoCandidates();
+
+ AudioContentDescription* audio_desc = CreateAudioContentDescription();
+ StreamParams audio_track;
+ audio_track.id = kAudioTrackId1;
+ audio_track.set_stream_ids({kStreamId1});
+ audio_desc->AddStream(audio_track);
+ desc_.AddContent(kAudioContentName, MediaProtocolType::kRtp,
+ absl::WrapUnique(audio_desc));
+
+ // Enable signaling a=msid lines.
+ desc_.set_msid_signaling(cricket::kMsidSignalingMediaSection);
+ ASSERT_TRUE(jdesc_.Initialize(desc_.Clone(), jdesc_.session_id(),
+ jdesc_.session_version()));
+ }
+
+ // Creates a video content description with no streams, and some default
+ // configuration.
+ VideoContentDescription* CreateVideoContentDescription() {
+ VideoContentDescription* video = new VideoContentDescription();
+ video->AddCrypto(CryptoParams(
+ 1, "AES_CM_128_HMAC_SHA1_80",
+ "inline:d0RmdmcmVCspeEc3QGZiNWpVLFJhQX1cfHAwJSoj|2^20|1:32", ""));
+ video->set_protocol(cricket::kMediaProtocolSavpf);
+ video->AddCodec(cricket::CreateVideoCodec(120, "VP8"));
+ return video;
+ }
+
+ template <class MCD>
+ void CompareMediaContentDescription(const MCD* cd1, const MCD* cd2) {
+ // type
+ EXPECT_EQ(cd1->type(), cd2->type());
+
+ // content direction
+ EXPECT_EQ(cd1->direction(), cd2->direction());
+
+ // rtcp_mux
+ EXPECT_EQ(cd1->rtcp_mux(), cd2->rtcp_mux());
+
+ // rtcp_reduced_size
+ EXPECT_EQ(cd1->rtcp_reduced_size(), cd2->rtcp_reduced_size());
+
+ // cryptos
+ EXPECT_EQ(cd1->cryptos().size(), cd2->cryptos().size());
+ if (cd1->cryptos().size() != cd2->cryptos().size()) {
+ ADD_FAILURE();
+ return;
+ }
+ for (size_t i = 0; i < cd1->cryptos().size(); ++i) {
+ const CryptoParams c1 = cd1->cryptos().at(i);
+ const CryptoParams c2 = cd2->cryptos().at(i);
+ EXPECT_TRUE(c1.Matches(c2));
+ EXPECT_EQ(c1.key_params, c2.key_params);
+ EXPECT_EQ(c1.session_params, c2.session_params);
+ }
+
+ // protocol
+ // Use an equivalence class here, for old and new versions of the
+ // protocol description.
+ if (cd1->protocol() == cricket::kMediaProtocolDtlsSctp ||
+ cd1->protocol() == cricket::kMediaProtocolUdpDtlsSctp ||
+ cd1->protocol() == cricket::kMediaProtocolTcpDtlsSctp) {
+ const bool cd2_is_also_dtls_sctp =
+ cd2->protocol() == cricket::kMediaProtocolDtlsSctp ||
+ cd2->protocol() == cricket::kMediaProtocolUdpDtlsSctp ||
+ cd2->protocol() == cricket::kMediaProtocolTcpDtlsSctp;
+ EXPECT_TRUE(cd2_is_also_dtls_sctp);
+ } else {
+ EXPECT_EQ(cd1->protocol(), cd2->protocol());
+ }
+
+ // codecs
+ EXPECT_EQ(cd1->codecs(), cd2->codecs());
+
+ // bandwidth
+ EXPECT_EQ(cd1->bandwidth(), cd2->bandwidth());
+
+ // streams
+ EXPECT_EQ(cd1->streams(), cd2->streams());
+
+ // extmap-allow-mixed
+ EXPECT_EQ(cd1->extmap_allow_mixed_enum(), cd2->extmap_allow_mixed_enum());
+
+ // extmap
+ ASSERT_EQ(cd1->rtp_header_extensions().size(),
+ cd2->rtp_header_extensions().size());
+ for (size_t i = 0; i < cd1->rtp_header_extensions().size(); ++i) {
+ const RtpExtension ext1 = cd1->rtp_header_extensions().at(i);
+ const RtpExtension ext2 = cd2->rtp_header_extensions().at(i);
+ EXPECT_EQ(ext1.uri, ext2.uri);
+ EXPECT_EQ(ext1.id, ext2.id);
+ EXPECT_EQ(ext1.encrypt, ext2.encrypt);
+ }
+ }
+
+ void CompareRidDescriptionIds(const std::vector<RidDescription>& rids,
+ const std::vector<std::string>& ids) {
+ // Order of elements does not matter, only equivalence of sets.
+ EXPECT_EQ(rids.size(), ids.size());
+ for (const std::string& id : ids) {
+ EXPECT_EQ(1l, absl::c_count_if(rids, [id](const RidDescription& rid) {
+ return rid.rid == id;
+ }));
+ }
+ }
+
+ void CompareSimulcastDescription(const SimulcastDescription& simulcast1,
+ const SimulcastDescription& simulcast2) {
+ EXPECT_EQ(simulcast1.send_layers().size(), simulcast2.send_layers().size());
+ EXPECT_EQ(simulcast1.receive_layers().size(),
+ simulcast2.receive_layers().size());
+ }
+
+ void CompareSctpDataContentDescription(
+ const SctpDataContentDescription* dcd1,
+ const SctpDataContentDescription* dcd2) {
+ EXPECT_EQ(dcd1->use_sctpmap(), dcd2->use_sctpmap());
+ EXPECT_EQ(dcd1->port(), dcd2->port());
+ EXPECT_EQ(dcd1->max_message_size(), dcd2->max_message_size());
+ }
+
+ void CompareSessionDescription(const SessionDescription& desc1,
+ const SessionDescription& desc2) {
+ // Compare content descriptions.
+ if (desc1.contents().size() != desc2.contents().size()) {
+ ADD_FAILURE();
+ return;
+ }
+ for (size_t i = 0; i < desc1.contents().size(); ++i) {
+ const cricket::ContentInfo& c1 = desc1.contents().at(i);
+ const cricket::ContentInfo& c2 = desc2.contents().at(i);
+ // ContentInfo properties.
+ EXPECT_EQ(c1.name, c2.name);
+ EXPECT_EQ(c1.type, c2.type);
+ EXPECT_EQ(c1.rejected, c2.rejected);
+ EXPECT_EQ(c1.bundle_only, c2.bundle_only);
+
+ ASSERT_EQ(IsAudioContent(&c1), IsAudioContent(&c2));
+ if (IsAudioContent(&c1)) {
+ const AudioContentDescription* acd1 =
+ c1.media_description()->as_audio();
+ const AudioContentDescription* acd2 =
+ c2.media_description()->as_audio();
+ CompareMediaContentDescription<AudioContentDescription>(acd1, acd2);
+ }
+
+ ASSERT_EQ(IsVideoContent(&c1), IsVideoContent(&c2));
+ if (IsVideoContent(&c1)) {
+ const VideoContentDescription* vcd1 =
+ c1.media_description()->as_video();
+ const VideoContentDescription* vcd2 =
+ c2.media_description()->as_video();
+ CompareMediaContentDescription<VideoContentDescription>(vcd1, vcd2);
+ }
+
+ ASSERT_EQ(IsDataContent(&c1), IsDataContent(&c2));
+ if (c1.media_description()->as_sctp()) {
+ ASSERT_TRUE(c2.media_description()->as_sctp());
+ const SctpDataContentDescription* scd1 =
+ c1.media_description()->as_sctp();
+ const SctpDataContentDescription* scd2 =
+ c2.media_description()->as_sctp();
+ CompareSctpDataContentDescription(scd1, scd2);
+ }
+
+ CompareSimulcastDescription(
+ c1.media_description()->simulcast_description(),
+ c2.media_description()->simulcast_description());
+ }
+
+ // group
+ const cricket::ContentGroups groups1 = desc1.groups();
+ const cricket::ContentGroups groups2 = desc2.groups();
+ EXPECT_EQ(groups1.size(), groups1.size());
+ if (groups1.size() != groups2.size()) {
+ ADD_FAILURE();
+ return;
+ }
+ for (size_t i = 0; i < groups1.size(); ++i) {
+ const cricket::ContentGroup group1 = groups1.at(i);
+ const cricket::ContentGroup group2 = groups2.at(i);
+ EXPECT_EQ(group1.semantics(), group2.semantics());
+ const cricket::ContentNames names1 = group1.content_names();
+ const cricket::ContentNames names2 = group2.content_names();
+ EXPECT_EQ(names1.size(), names2.size());
+ if (names1.size() != names2.size()) {
+ ADD_FAILURE();
+ return;
+ }
+ cricket::ContentNames::const_iterator iter1 = names1.begin();
+ cricket::ContentNames::const_iterator iter2 = names2.begin();
+ while (iter1 != names1.end()) {
+ EXPECT_EQ(*iter1++, *iter2++);
+ }
+ }
+
+ // transport info
+ const cricket::TransportInfos transports1 = desc1.transport_infos();
+ const cricket::TransportInfos transports2 = desc2.transport_infos();
+ EXPECT_EQ(transports1.size(), transports2.size());
+ if (transports1.size() != transports2.size()) {
+ ADD_FAILURE();
+ return;
+ }
+ for (size_t i = 0; i < transports1.size(); ++i) {
+ const cricket::TransportInfo transport1 = transports1.at(i);
+ const cricket::TransportInfo transport2 = transports2.at(i);
+ EXPECT_EQ(transport1.content_name, transport2.content_name);
+ EXPECT_EQ(transport1.description.ice_ufrag,
+ transport2.description.ice_ufrag);
+ EXPECT_EQ(transport1.description.ice_pwd, transport2.description.ice_pwd);
+ EXPECT_EQ(transport1.description.ice_mode,
+ transport2.description.ice_mode);
+ if (transport1.description.identity_fingerprint) {
+ EXPECT_EQ(*transport1.description.identity_fingerprint,
+ *transport2.description.identity_fingerprint);
+ } else {
+ EXPECT_EQ(transport1.description.identity_fingerprint.get(),
+ transport2.description.identity_fingerprint.get());
+ }
+ EXPECT_EQ(transport1.description.transport_options,
+ transport2.description.transport_options);
+ }
+
+ // global attributes
+ EXPECT_EQ(desc1.msid_supported(), desc2.msid_supported());
+ EXPECT_EQ(desc1.extmap_allow_mixed(), desc2.extmap_allow_mixed());
+ }
+
+ bool CompareSessionDescription(const JsepSessionDescription& desc1,
+ const JsepSessionDescription& desc2) {
+ EXPECT_EQ(desc1.session_id(), desc2.session_id());
+ EXPECT_EQ(desc1.session_version(), desc2.session_version());
+ CompareSessionDescription(*desc1.description(), *desc2.description());
+ if (desc1.number_of_mediasections() != desc2.number_of_mediasections())
+ return false;
+ for (size_t i = 0; i < desc1.number_of_mediasections(); ++i) {
+ const IceCandidateCollection* cc1 = desc1.candidates(i);
+ const IceCandidateCollection* cc2 = desc2.candidates(i);
+ if (cc1->count() != cc2->count()) {
+ ADD_FAILURE();
+ return false;
+ }
+ for (size_t j = 0; j < cc1->count(); ++j) {
+ const IceCandidateInterface* c1 = cc1->at(j);
+ const IceCandidateInterface* c2 = cc2->at(j);
+ EXPECT_EQ(c1->sdp_mid(), c2->sdp_mid());
+ EXPECT_EQ(c1->sdp_mline_index(), c2->sdp_mline_index());
+ EXPECT_TRUE(c1->candidate().IsEquivalent(c2->candidate()));
+ }
+ }
+ return true;
+ }
+
+ // Disable the ice-ufrag and ice-pwd in given `sdp` message by replacing
+ // them with invalid keywords so that the parser will just ignore them.
+ bool RemoveCandidateUfragPwd(std::string* sdp) {
+ absl::StrReplaceAll(
+ {{"a=ice-ufrag", "a=xice-ufrag"}, {"a=ice-pwd", "a=xice-pwd"}}, sdp);
+ return true;
+ }
+
+ // Update the candidates in `jdesc` to use the given `ufrag` and `pwd`.
+ bool UpdateCandidateUfragPwd(JsepSessionDescription* jdesc,
+ int mline_index,
+ const std::string& ufrag,
+ const std::string& pwd) {
+ std::string content_name;
+ if (mline_index == 0) {
+ content_name = kAudioContentName;
+ } else if (mline_index == 1) {
+ content_name = kVideoContentName;
+ } else {
+ RTC_DCHECK_NOTREACHED();
+ }
+ TransportInfo transport_info(content_name,
+ TransportDescription(ufrag, pwd));
+ SessionDescription* desc =
+ const_cast<SessionDescription*>(jdesc->description());
+ desc->RemoveTransportInfoByName(content_name);
+ desc->AddTransportInfo(transport_info);
+ for (size_t i = 0; i < jdesc_.number_of_mediasections(); ++i) {
+ const IceCandidateCollection* cc = jdesc_.candidates(i);
+ for (size_t j = 0; j < cc->count(); ++j) {
+ if (cc->at(j)->sdp_mline_index() == mline_index) {
+ const_cast<Candidate&>(cc->at(j)->candidate()).set_username(ufrag);
+ const_cast<Candidate&>(cc->at(j)->candidate()).set_password(pwd);
+ }
+ }
+ }
+ return true;
+ }
+
+ void AddIceOptions(const std::string& content_name,
+ const std::vector<std::string>& transport_options) {
+ ASSERT_TRUE(desc_.GetTransportInfoByName(content_name) != NULL);
+ cricket::TransportInfo transport_info =
+ *(desc_.GetTransportInfoByName(content_name));
+ desc_.RemoveTransportInfoByName(content_name);
+ transport_info.description.transport_options = transport_options;
+ desc_.AddTransportInfo(transport_info);
+ }
+
+ void SetIceUfragPwd(const std::string& content_name,
+ const std::string& ice_ufrag,
+ const std::string& ice_pwd) {
+ ASSERT_TRUE(desc_.GetTransportInfoByName(content_name) != NULL);
+ cricket::TransportInfo transport_info =
+ *(desc_.GetTransportInfoByName(content_name));
+ desc_.RemoveTransportInfoByName(content_name);
+ transport_info.description.ice_ufrag = ice_ufrag;
+ transport_info.description.ice_pwd = ice_pwd;
+ desc_.AddTransportInfo(transport_info);
+ }
+
+ void AddFingerprint() {
+ desc_.RemoveTransportInfoByName(kAudioContentName);
+ desc_.RemoveTransportInfoByName(kVideoContentName);
+ rtc::SSLFingerprint fingerprint(rtc::DIGEST_SHA_1, kIdentityDigest);
+ desc_.AddTransportInfo(TransportInfo(
+ kAudioContentName,
+ TransportDescription(std::vector<std::string>(), kUfragVoice, kPwdVoice,
+ cricket::ICEMODE_FULL,
+ cricket::CONNECTIONROLE_NONE, &fingerprint)));
+ desc_.AddTransportInfo(TransportInfo(
+ kVideoContentName,
+ TransportDescription(std::vector<std::string>(), kUfragVideo, kPwdVideo,
+ cricket::ICEMODE_FULL,
+ cricket::CONNECTIONROLE_NONE, &fingerprint)));
+ }
+
+ void AddExtmap(bool encrypted) {
+ audio_desc_ = new AudioContentDescription(*audio_desc_);
+ video_desc_ = new VideoContentDescription(*video_desc_);
+ audio_desc_->AddRtpHeaderExtension(
+ RtpExtension(kExtmapUri, kExtmapId, encrypted));
+ video_desc_->AddRtpHeaderExtension(
+ RtpExtension(kExtmapUri, kExtmapId, encrypted));
+ desc_.RemoveContentByName(kAudioContentName);
+ desc_.RemoveContentByName(kVideoContentName);
+ desc_.AddContent(kAudioContentName, MediaProtocolType::kRtp,
+ absl::WrapUnique(audio_desc_));
+ desc_.AddContent(kVideoContentName, MediaProtocolType::kRtp,
+ absl::WrapUnique(video_desc_));
+ }
+
+ void RemoveCryptos() {
+ audio_desc_->set_cryptos(std::vector<CryptoParams>());
+ video_desc_->set_cryptos(std::vector<CryptoParams>());
+ }
+
+ // Removes everything in StreamParams from the session description that is
+ // used for a=ssrc lines.
+ void RemoveSsrcSignalingFromStreamParams() {
+ for (cricket::ContentInfo& content_info :
+ jdesc_.description()->contents()) {
+ // With Unified Plan there should be one StreamParams per m= section.
+ StreamParams& stream =
+ content_info.media_description()->mutable_streams()[0];
+ stream.ssrcs.clear();
+ stream.ssrc_groups.clear();
+ stream.cname.clear();
+ }
+ }
+
+ // Removes all a=ssrc lines from the SDP string, except for the
+ // "a=ssrc:... cname:..." lines.
+ void RemoveSsrcMsidLinesFromSdpString(std::string* sdp_string) {
+ const char kAttributeSsrc[] = "a=ssrc";
+ const char kAttributeCname[] = "cname";
+ size_t ssrc_line_pos = sdp_string->find(kAttributeSsrc);
+ while (ssrc_line_pos != std::string::npos) {
+ size_t beg_line_pos = sdp_string->rfind('\n', ssrc_line_pos);
+ size_t end_line_pos = sdp_string->find('\n', ssrc_line_pos);
+ size_t cname_pos = sdp_string->find(kAttributeCname, ssrc_line_pos);
+ if (cname_pos == std::string::npos || cname_pos > end_line_pos) {
+ // Only erase a=ssrc lines that don't contain "cname".
+ sdp_string->erase(beg_line_pos, end_line_pos - beg_line_pos);
+ ssrc_line_pos = sdp_string->find(kAttributeSsrc, beg_line_pos);
+ } else {
+ // Skip the "a=ssrc:... cname" line and find the next "a=ssrc" line.
+ ssrc_line_pos = sdp_string->find(kAttributeSsrc, end_line_pos);
+ }
+ }
+ }
+
+ // Removes all a=ssrc lines from the SDP string.
+ void RemoveSsrcLinesFromSdpString(std::string* sdp_string) {
+ const char kAttributeSsrc[] = "a=ssrc";
+ while (sdp_string->find(kAttributeSsrc) != std::string::npos) {
+ size_t pos_ssrc_attribute = sdp_string->find(kAttributeSsrc);
+ size_t beg_line_pos = sdp_string->rfind('\n', pos_ssrc_attribute);
+ size_t end_line_pos = sdp_string->find('\n', pos_ssrc_attribute);
+ sdp_string->erase(beg_line_pos, end_line_pos - beg_line_pos);
+ }
+ }
+
+ bool TestSerializeDirection(RtpTransceiverDirection direction) {
+ audio_desc_->set_direction(direction);
+ video_desc_->set_direction(direction);
+ std::string new_sdp = kSdpFullString;
+ ReplaceDirection(direction, &new_sdp);
+
+ if (!jdesc_.Initialize(desc_.Clone(), jdesc_.session_id(),
+ jdesc_.session_version())) {
+ return false;
+ }
+ std::string message = webrtc::SdpSerialize(jdesc_);
+ EXPECT_EQ(new_sdp, message);
+ return true;
+ }
+
+ bool TestSerializeRejected(bool audio_rejected, bool video_rejected) {
+ audio_desc_ = new AudioContentDescription(*audio_desc_);
+ video_desc_ = new VideoContentDescription(*video_desc_);
+
+ desc_.RemoveContentByName(kAudioContentName);
+ desc_.RemoveContentByName(kVideoContentName);
+ desc_.AddContent(kAudioContentName, MediaProtocolType::kRtp, audio_rejected,
+ absl::WrapUnique(audio_desc_));
+ desc_.AddContent(kVideoContentName, MediaProtocolType::kRtp, video_rejected,
+ absl::WrapUnique(video_desc_));
+ SetIceUfragPwd(kAudioContentName, audio_rejected ? "" : kUfragVoice,
+ audio_rejected ? "" : kPwdVoice);
+ SetIceUfragPwd(kVideoContentName, video_rejected ? "" : kUfragVideo,
+ video_rejected ? "" : kPwdVideo);
+
+ std::string new_sdp = kSdpString;
+ ReplaceRejected(audio_rejected, video_rejected, &new_sdp);
+
+ JsepSessionDescription jdesc_no_candidates(kDummyType);
+ MakeDescriptionWithoutCandidates(&jdesc_no_candidates);
+ std::string message = webrtc::SdpSerialize(jdesc_no_candidates);
+ EXPECT_EQ(new_sdp, message);
+ return true;
+ }
+
+ void AddSctpDataChannel(bool use_sctpmap) {
+ std::unique_ptr<SctpDataContentDescription> data(
+ new SctpDataContentDescription());
+ sctp_desc_ = data.get();
+ sctp_desc_->set_use_sctpmap(use_sctpmap);
+ sctp_desc_->set_protocol(cricket::kMediaProtocolUdpDtlsSctp);
+ sctp_desc_->set_port(kDefaultSctpPort);
+ desc_.AddContent(kDataContentName, MediaProtocolType::kSctp,
+ std::move(data));
+ desc_.AddTransportInfo(TransportInfo(
+ kDataContentName, TransportDescription(kUfragData, kPwdData)));
+ }
+
+ bool TestDeserializeDirection(RtpTransceiverDirection direction) {
+ std::string new_sdp = kSdpFullString;
+ ReplaceDirection(direction, &new_sdp);
+ JsepSessionDescription new_jdesc(kDummyType);
+
+ EXPECT_TRUE(SdpDeserialize(new_sdp, &new_jdesc));
+
+ audio_desc_->set_direction(direction);
+ video_desc_->set_direction(direction);
+ if (!jdesc_.Initialize(desc_.Clone(), jdesc_.session_id(),
+ jdesc_.session_version())) {
+ return false;
+ }
+ EXPECT_TRUE(CompareSessionDescription(jdesc_, new_jdesc));
+ return true;
+ }
+
+ bool TestDeserializeRejected(bool audio_rejected, bool video_rejected) {
+ std::string new_sdp = kSdpString;
+ ReplaceRejected(audio_rejected, video_rejected, &new_sdp);
+ JsepSessionDescription new_jdesc(SdpType::kOffer);
+ EXPECT_TRUE(SdpDeserialize(new_sdp, &new_jdesc));
+
+ audio_desc_ = new AudioContentDescription(*audio_desc_);
+ video_desc_ = new VideoContentDescription(*video_desc_);
+ desc_.RemoveContentByName(kAudioContentName);
+ desc_.RemoveContentByName(kVideoContentName);
+ desc_.AddContent(kAudioContentName, MediaProtocolType::kRtp, audio_rejected,
+ absl::WrapUnique(audio_desc_));
+ desc_.AddContent(kVideoContentName, MediaProtocolType::kRtp, video_rejected,
+ absl::WrapUnique(video_desc_));
+ SetIceUfragPwd(kAudioContentName, audio_rejected ? "" : kUfragVoice,
+ audio_rejected ? "" : kPwdVoice);
+ SetIceUfragPwd(kVideoContentName, video_rejected ? "" : kUfragVideo,
+ video_rejected ? "" : kPwdVideo);
+ JsepSessionDescription jdesc_no_candidates(kDummyType);
+ if (!jdesc_no_candidates.Initialize(desc_.Clone(), jdesc_.session_id(),
+ jdesc_.session_version())) {
+ return false;
+ }
+ EXPECT_TRUE(CompareSessionDescription(jdesc_no_candidates, new_jdesc));
+ return true;
+ }
+
+ void TestDeserializeExtmap(bool session_level,
+ bool media_level,
+ bool encrypted) {
+ AddExtmap(encrypted);
+ JsepSessionDescription new_jdesc(SdpType::kOffer);
+ ASSERT_TRUE(new_jdesc.Initialize(desc_.Clone(), jdesc_.session_id(),
+ jdesc_.session_version()));
+ JsepSessionDescription jdesc_with_extmap(SdpType::kOffer);
+ std::string sdp_with_extmap = kSdpString;
+ if (session_level) {
+ InjectAfter(kSessionTime,
+ encrypted ? kExtmapWithDirectionAndAttributeEncrypted
+ : kExtmapWithDirectionAndAttribute,
+ &sdp_with_extmap);
+ }
+ if (media_level) {
+ InjectAfter(kAttributeIcePwdVoice,
+ encrypted ? kExtmapWithDirectionAndAttributeEncrypted
+ : kExtmapWithDirectionAndAttribute,
+ &sdp_with_extmap);
+ InjectAfter(kAttributeIcePwdVideo,
+ encrypted ? kExtmapWithDirectionAndAttributeEncrypted
+ : kExtmapWithDirectionAndAttribute,
+ &sdp_with_extmap);
+ }
+ // The extmap can't be present at the same time in both session level and
+ // media level.
+ if (session_level && media_level) {
+ SdpParseError error;
+ EXPECT_FALSE(
+ webrtc::SdpDeserialize(sdp_with_extmap, &jdesc_with_extmap, &error));
+ EXPECT_NE(std::string::npos, error.description.find("a=extmap"));
+ } else {
+ EXPECT_TRUE(SdpDeserialize(sdp_with_extmap, &jdesc_with_extmap));
+ EXPECT_TRUE(CompareSessionDescription(jdesc_with_extmap, new_jdesc));
+ }
+ }
+
+ void VerifyCodecParameter(const cricket::CodecParameterMap& params,
+ const std::string& name,
+ int expected_value) {
+ cricket::CodecParameterMap::const_iterator found = params.find(name);
+ ASSERT_TRUE(found != params.end());
+ EXPECT_EQ(found->second, rtc::ToString(expected_value));
+ }
+
+ void TestDeserializeCodecParams(const CodecParams& params,
+ JsepSessionDescription* jdesc_output) {
+ std::string sdp =
+ "v=0\r\n"
+ "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ // Include semantics for WebRTC Media Streams since it is supported by
+ // this parser, and will be added to the SDP when serializing a session
+ // description.
+ "a=msid-semantic: WMS\r\n"
+ // Pl type 111 preferred.
+ "m=audio 9 RTP/SAVPF 111 104 103 105\r\n"
+ // Pltype 111 listed before 103 and 104 in the map.
+ "a=rtpmap:111 opus/48000/2\r\n"
+ // Pltype 103 listed before 104.
+ "a=rtpmap:103 ISAC/16000\r\n"
+ "a=rtpmap:104 ISAC/32000\r\n"
+ "a=rtpmap:105 telephone-event/8000\r\n"
+ "a=fmtp:105 0-15,66,70\r\n"
+ "a=fmtp:111 ";
+ std::ostringstream os;
+ os << "minptime=" << params.min_ptime << "; stereo=" << params.stereo
+ << "; sprop-stereo=" << params.sprop_stereo
+ << "; useinbandfec=" << params.useinband
+ << "; maxaveragebitrate=" << params.maxaveragebitrate
+ << "\r\n"
+ "a=ptime:"
+ << params.ptime
+ << "\r\n"
+ "a=maxptime:"
+ << params.max_ptime << "\r\n";
+ sdp += os.str();
+
+ os.clear();
+ os.str("");
+ // Pl type 100 preferred.
+ os << "m=video 9 RTP/SAVPF 99 95 96\r\n"
+ "a=rtpmap:96 VP9/90000\r\n" // out-of-order wrt the m= line.
+ "a=rtpmap:99 VP8/90000\r\n"
+ "a=rtpmap:95 RTX/90000\r\n"
+ "a=fmtp:95 apt=99;\r\n";
+ sdp += os.str();
+
+ // Deserialize
+ SdpParseError error;
+ EXPECT_TRUE(webrtc::SdpDeserialize(sdp, jdesc_output, &error));
+
+ const AudioContentDescription* acd =
+ GetFirstAudioContentDescription(jdesc_output->description());
+ ASSERT_TRUE(acd);
+ ASSERT_FALSE(acd->codecs().empty());
+ cricket::Codec opus = acd->codecs()[0];
+ EXPECT_EQ("opus", opus.name);
+ EXPECT_EQ(111, opus.id);
+ VerifyCodecParameter(opus.params, "minptime", params.min_ptime);
+ VerifyCodecParameter(opus.params, "stereo", params.stereo);
+ VerifyCodecParameter(opus.params, "sprop-stereo", params.sprop_stereo);
+ VerifyCodecParameter(opus.params, "useinbandfec", params.useinband);
+ VerifyCodecParameter(opus.params, "maxaveragebitrate",
+ params.maxaveragebitrate);
+ for (const auto& codec : acd->codecs()) {
+ VerifyCodecParameter(codec.params, "ptime", params.ptime);
+ VerifyCodecParameter(codec.params, "maxptime", params.max_ptime);
+ }
+
+ cricket::Codec dtmf = acd->codecs()[3];
+ EXPECT_EQ("telephone-event", dtmf.name);
+ EXPECT_EQ(105, dtmf.id);
+ EXPECT_EQ(3u,
+ dtmf.params.size()); // ptime and max_ptime count as parameters.
+ EXPECT_EQ(dtmf.params.begin()->first, "");
+ EXPECT_EQ(dtmf.params.begin()->second, "0-15,66,70");
+
+ const VideoContentDescription* vcd =
+ GetFirstVideoContentDescription(jdesc_output->description());
+ ASSERT_TRUE(vcd);
+ ASSERT_FALSE(vcd->codecs().empty());
+ cricket::VideoCodec vp8 = vcd->codecs()[0];
+ EXPECT_EQ("VP8", vp8.name);
+ EXPECT_EQ(99, vp8.id);
+ cricket::VideoCodec rtx = vcd->codecs()[1];
+ EXPECT_EQ("RTX", rtx.name);
+ EXPECT_EQ(95, rtx.id);
+ VerifyCodecParameter(rtx.params, "apt", vp8.id);
+ // VP9 is listed last in the m= line so should come after VP8 and RTX.
+ cricket::VideoCodec vp9 = vcd->codecs()[2];
+ EXPECT_EQ("VP9", vp9.name);
+ EXPECT_EQ(96, vp9.id);
+ }
+
+ void TestDeserializeRtcpFb(JsepSessionDescription* jdesc_output,
+ bool use_wildcard) {
+ std::string sdp_session_and_audio =
+ "v=0\r\n"
+ "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ // Include semantics for WebRTC Media Streams since it is supported by
+ // this parser, and will be added to the SDP when serializing a session
+ // description.
+ "a=msid-semantic: WMS\r\n"
+ "m=audio 9 RTP/SAVPF 111\r\n"
+ "a=rtpmap:111 opus/48000/2\r\n";
+ std::string sdp_video =
+ "m=video 3457 RTP/SAVPF 101\r\n"
+ "a=rtpmap:101 VP8/90000\r\n"
+ "a=rtcp-fb:101 goog-lntf\r\n"
+ "a=rtcp-fb:101 nack\r\n"
+ "a=rtcp-fb:101 nack pli\r\n"
+ "a=rtcp-fb:101 goog-remb\r\n";
+ std::ostringstream os;
+ os << sdp_session_and_audio;
+ os << "a=rtcp-fb:" << (use_wildcard ? "*" : "111") << " nack\r\n";
+ os << sdp_video;
+ os << "a=rtcp-fb:" << (use_wildcard ? "*" : "101") << " ccm fir\r\n";
+ std::string sdp = os.str();
+ // Deserialize
+ SdpParseError error;
+ EXPECT_TRUE(webrtc::SdpDeserialize(sdp, jdesc_output, &error));
+ const AudioContentDescription* acd =
+ GetFirstAudioContentDescription(jdesc_output->description());
+ ASSERT_TRUE(acd);
+ ASSERT_FALSE(acd->codecs().empty());
+ cricket::Codec opus = acd->codecs()[0];
+ EXPECT_EQ(111, opus.id);
+ EXPECT_TRUE(opus.HasFeedbackParam(cricket::FeedbackParam(
+ cricket::kRtcpFbParamNack, cricket::kParamValueEmpty)));
+
+ const VideoContentDescription* vcd =
+ GetFirstVideoContentDescription(jdesc_output->description());
+ ASSERT_TRUE(vcd);
+ ASSERT_FALSE(vcd->codecs().empty());
+ cricket::VideoCodec vp8 = vcd->codecs()[0];
+ EXPECT_EQ(vp8.name, "VP8");
+ EXPECT_EQ(101, vp8.id);
+ EXPECT_TRUE(vp8.HasFeedbackParam(cricket::FeedbackParam(
+ cricket::kRtcpFbParamLntf, cricket::kParamValueEmpty)));
+ EXPECT_TRUE(vp8.HasFeedbackParam(cricket::FeedbackParam(
+ cricket::kRtcpFbParamNack, cricket::kParamValueEmpty)));
+ EXPECT_TRUE(vp8.HasFeedbackParam(cricket::FeedbackParam(
+ cricket::kRtcpFbParamNack, cricket::kRtcpFbNackParamPli)));
+ EXPECT_TRUE(vp8.HasFeedbackParam(cricket::FeedbackParam(
+ cricket::kRtcpFbParamRemb, cricket::kParamValueEmpty)));
+ EXPECT_TRUE(vp8.HasFeedbackParam(cricket::FeedbackParam(
+ cricket::kRtcpFbParamCcm, cricket::kRtcpFbCcmParamFir)));
+ }
+
+ // Two SDP messages can mean the same thing but be different strings, e.g.
+ // some of the lines can be serialized in different order.
+ // However, a deserialized description can be compared field by field and has
+ // no order. If deserializer has already been tested, serializing then
+ // deserializing and comparing JsepSessionDescription will test
+ // the serializer sufficiently.
+ void TestSerialize(const JsepSessionDescription& jdesc) {
+ std::string message = webrtc::SdpSerialize(jdesc);
+ JsepSessionDescription jdesc_output_des(kDummyType);
+ SdpParseError error;
+ EXPECT_TRUE(webrtc::SdpDeserialize(message, &jdesc_output_des, &error));
+ EXPECT_TRUE(CompareSessionDescription(jdesc, jdesc_output_des));
+ }
+
+ // Calling 'Initialize' with a copy of the inner SessionDescription will
+ // create a copy of the JsepSessionDescription without candidates. The
+ // 'connection address' field, previously set from the candidates, must also
+ // be reset.
+ void MakeDescriptionWithoutCandidates(JsepSessionDescription* jdesc) {
+ rtc::SocketAddress audio_addr("0.0.0.0", 9);
+ rtc::SocketAddress video_addr("0.0.0.0", 9);
+ audio_desc_->set_connection_address(audio_addr);
+ video_desc_->set_connection_address(video_addr);
+ ASSERT_TRUE(jdesc->Initialize(desc_.Clone(), kSessionId, kSessionVersion));
+ }
+
+ protected:
+ SessionDescription desc_;
+ AudioContentDescription* audio_desc_;
+ VideoContentDescription* video_desc_;
+ SctpDataContentDescription* sctp_desc_;
+ std::vector<Candidate> candidates_;
+ std::unique_ptr<IceCandidateInterface> jcandidate_;
+ JsepSessionDescription jdesc_;
+};
+
+void TestMismatch(const std::string& string1, const std::string& string2) {
+ int position = 0;
+ for (size_t i = 0; i < string1.length() && i < string2.length(); ++i) {
+ if (string1.c_str()[i] != string2.c_str()[i]) {
+ position = static_cast<int>(i);
+ break;
+ }
+ }
+ EXPECT_EQ(0, position) << "Strings mismatch at the " << position
+ << " character\n"
+ " 1: "
+ << string1.substr(position, 20)
+ << "\n"
+ " 2: "
+ << string2.substr(position, 20) << "\n";
+}
+
+TEST_F(WebRtcSdpTest, SerializeSessionDescription) {
+ // SessionDescription with desc and candidates.
+ std::string message = webrtc::SdpSerialize(jdesc_);
+ TestMismatch(std::string(kSdpFullString), message);
+}
+
+TEST_F(WebRtcSdpTest, SerializeSessionDescriptionEmpty) {
+ JsepSessionDescription jdesc_empty(kDummyType);
+ EXPECT_EQ("", webrtc::SdpSerialize(jdesc_empty));
+}
+
+// This tests serialization of SDP with a=crypto and a=fingerprint, as would be
+// the case in a DTLS offer.
+TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithFingerprint) {
+ AddFingerprint();
+ JsepSessionDescription jdesc_with_fingerprint(kDummyType);
+ MakeDescriptionWithoutCandidates(&jdesc_with_fingerprint);
+ std::string message = webrtc::SdpSerialize(jdesc_with_fingerprint);
+
+ std::string sdp_with_fingerprint = kSdpString;
+ InjectAfter(kAttributeIcePwdVoice, kFingerprint, &sdp_with_fingerprint);
+ InjectAfter(kAttributeIcePwdVideo, kFingerprint, &sdp_with_fingerprint);
+
+ EXPECT_EQ(sdp_with_fingerprint, message);
+}
+
+// This tests serialization of SDP with a=fingerprint with no a=crypto, as would
+// be the case in a DTLS answer.
+TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithFingerprintNoCryptos) {
+ AddFingerprint();
+ RemoveCryptos();
+ JsepSessionDescription jdesc_with_fingerprint(kDummyType);
+ MakeDescriptionWithoutCandidates(&jdesc_with_fingerprint);
+ std::string message = webrtc::SdpSerialize(jdesc_with_fingerprint);
+
+ std::string sdp_with_fingerprint = kSdpString;
+ Replace(kAttributeCryptoVoice, "", &sdp_with_fingerprint);
+ Replace(kAttributeCryptoVideo, "", &sdp_with_fingerprint);
+ InjectAfter(kAttributeIcePwdVoice, kFingerprint, &sdp_with_fingerprint);
+ InjectAfter(kAttributeIcePwdVideo, kFingerprint, &sdp_with_fingerprint);
+
+ EXPECT_EQ(sdp_with_fingerprint, message);
+}
+
+TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithoutCandidates) {
+ // JsepSessionDescription with desc but without candidates.
+ JsepSessionDescription jdesc_no_candidates(kDummyType);
+ MakeDescriptionWithoutCandidates(&jdesc_no_candidates);
+ std::string message = webrtc::SdpSerialize(jdesc_no_candidates);
+ EXPECT_EQ(std::string(kSdpString), message);
+}
+
+TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithBundles) {
+ ContentGroup group1(cricket::GROUP_TYPE_BUNDLE);
+ group1.AddContentName(kAudioContentName);
+ group1.AddContentName(kVideoContentName);
+ desc_.AddGroup(group1);
+ ContentGroup group2(cricket::GROUP_TYPE_BUNDLE);
+ group2.AddContentName(kAudioContentName2);
+ desc_.AddGroup(group2);
+ ASSERT_TRUE(jdesc_.Initialize(desc_.Clone(), jdesc_.session_id(),
+ jdesc_.session_version()));
+ std::string message = webrtc::SdpSerialize(jdesc_);
+ std::string sdp_with_bundle = kSdpFullString;
+ InjectAfter(kSessionTime,
+ "a=group:BUNDLE audio_content_name video_content_name\r\n"
+ "a=group:BUNDLE audio_content_name_2\r\n",
+ &sdp_with_bundle);
+ EXPECT_EQ(sdp_with_bundle, message);
+}
+
+TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithBandwidth) {
+ VideoContentDescription* vcd = GetFirstVideoContentDescription(&desc_);
+ vcd->set_bandwidth(100 * 1000 + 755); // Integer division will drop the 755.
+ vcd->set_bandwidth_type("AS");
+ AudioContentDescription* acd = GetFirstAudioContentDescription(&desc_);
+ acd->set_bandwidth(555);
+ acd->set_bandwidth_type("TIAS");
+ ASSERT_TRUE(jdesc_.Initialize(desc_.Clone(), jdesc_.session_id(),
+ jdesc_.session_version()));
+ std::string message = webrtc::SdpSerialize(jdesc_);
+ std::string sdp_with_bandwidth = kSdpFullString;
+ InjectAfter("c=IN IP4 74.125.224.39\r\n", "b=AS:100\r\n",
+ &sdp_with_bandwidth);
+ InjectAfter("c=IN IP4 74.125.127.126\r\n", "b=TIAS:555\r\n",
+ &sdp_with_bandwidth);
+ EXPECT_EQ(sdp_with_bandwidth, message);
+}
+
+// Should default to b=AS if bandwidth_type isn't set.
+TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithMissingBandwidthType) {
+ VideoContentDescription* vcd = GetFirstVideoContentDescription(&desc_);
+ vcd->set_bandwidth(100 * 1000);
+ ASSERT_TRUE(jdesc_.Initialize(desc_.Clone(), jdesc_.session_id(),
+ jdesc_.session_version()));
+ std::string message = webrtc::SdpSerialize(jdesc_);
+ std::string sdp_with_bandwidth = kSdpFullString;
+ InjectAfter("c=IN IP4 74.125.224.39\r\n", "b=AS:100\r\n",
+ &sdp_with_bandwidth);
+ EXPECT_EQ(sdp_with_bandwidth, message);
+}
+
+TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithIceOptions) {
+ std::vector<std::string> transport_options;
+ transport_options.push_back(kIceOption1);
+ transport_options.push_back(kIceOption3);
+ AddIceOptions(kAudioContentName, transport_options);
+ transport_options.clear();
+ transport_options.push_back(kIceOption2);
+ transport_options.push_back(kIceOption3);
+ AddIceOptions(kVideoContentName, transport_options);
+ ASSERT_TRUE(jdesc_.Initialize(desc_.Clone(), jdesc_.session_id(),
+ jdesc_.session_version()));
+ std::string message = webrtc::SdpSerialize(jdesc_);
+ std::string sdp_with_ice_options = kSdpFullString;
+ InjectAfter(kAttributeIcePwdVoice, "a=ice-options:iceoption1 iceoption3\r\n",
+ &sdp_with_ice_options);
+ InjectAfter(kAttributeIcePwdVideo, "a=ice-options:iceoption2 iceoption3\r\n",
+ &sdp_with_ice_options);
+ EXPECT_EQ(sdp_with_ice_options, message);
+}
+
+TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithRecvOnlyContent) {
+ EXPECT_TRUE(TestSerializeDirection(RtpTransceiverDirection::kRecvOnly));
+}
+
+TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithSendOnlyContent) {
+ EXPECT_TRUE(TestSerializeDirection(RtpTransceiverDirection::kSendOnly));
+}
+
+TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithInactiveContent) {
+ EXPECT_TRUE(TestSerializeDirection(RtpTransceiverDirection::kInactive));
+}
+
+TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithAudioRejected) {
+ EXPECT_TRUE(TestSerializeRejected(true, false));
+}
+
+TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithVideoRejected) {
+ EXPECT_TRUE(TestSerializeRejected(false, true));
+}
+
+TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithAudioVideoRejected) {
+ EXPECT_TRUE(TestSerializeRejected(true, true));
+}
+
+TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithSctpDataChannel) {
+ bool use_sctpmap = true;
+ AddSctpDataChannel(use_sctpmap);
+ JsepSessionDescription jsep_desc(kDummyType);
+
+ MakeDescriptionWithoutCandidates(&jsep_desc);
+ std::string message = webrtc::SdpSerialize(jsep_desc);
+
+ std::string expected_sdp = kSdpString;
+ expected_sdp.append(kSdpSctpDataChannelString);
+ EXPECT_EQ(message, expected_sdp);
+}
+
+void MutateJsepSctpPort(JsepSessionDescription* jdesc,
+ const SessionDescription& desc,
+ int port) {
+ // Take our pre-built session description and change the SCTP port.
+ std::unique_ptr<cricket::SessionDescription> mutant = desc.Clone();
+ SctpDataContentDescription* dcdesc =
+ mutant->GetContentDescriptionByName(kDataContentName)->as_sctp();
+ dcdesc->set_port(port);
+ ASSERT_TRUE(
+ jdesc->Initialize(std::move(mutant), kSessionId, kSessionVersion));
+}
+
+TEST_F(WebRtcSdpTest, SerializeWithSctpDataChannelAndNewPort) {
+ bool use_sctpmap = true;
+ AddSctpDataChannel(use_sctpmap);
+ JsepSessionDescription jsep_desc(kDummyType);
+ MakeDescriptionWithoutCandidates(&jsep_desc);
+
+ const int kNewPort = 1234;
+ MutateJsepSctpPort(&jsep_desc, desc_, kNewPort);
+
+ std::string message = webrtc::SdpSerialize(jsep_desc);
+
+ std::string expected_sdp = kSdpString;
+ expected_sdp.append(kSdpSctpDataChannelString);
+
+ absl::StrReplaceAll(
+ {{rtc::ToString(kDefaultSctpPort), rtc::ToString(kNewPort)}},
+ &expected_sdp);
+
+ EXPECT_EQ(expected_sdp, message);
+}
+
+TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithExtmapAllowMixed) {
+ jdesc_.description()->set_extmap_allow_mixed(true);
+ TestSerialize(jdesc_);
+}
+
+TEST_F(WebRtcSdpTest, SerializeMediaContentDescriptionWithExtmapAllowMixed) {
+ cricket::MediaContentDescription* video_desc =
+ jdesc_.description()->GetContentDescriptionByName(kVideoContentName);
+ ASSERT_TRUE(video_desc);
+ cricket::MediaContentDescription* audio_desc =
+ jdesc_.description()->GetContentDescriptionByName(kAudioContentName);
+ ASSERT_TRUE(audio_desc);
+ video_desc->set_extmap_allow_mixed_enum(
+ cricket::MediaContentDescription::kMedia);
+ audio_desc->set_extmap_allow_mixed_enum(
+ cricket::MediaContentDescription::kMedia);
+ TestSerialize(jdesc_);
+}
+
+TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithExtmap) {
+ bool encrypted = false;
+ AddExtmap(encrypted);
+ JsepSessionDescription desc_with_extmap(kDummyType);
+ MakeDescriptionWithoutCandidates(&desc_with_extmap);
+ std::string message = webrtc::SdpSerialize(desc_with_extmap);
+
+ std::string sdp_with_extmap = kSdpString;
+ InjectAfter("a=mid:audio_content_name\r\n", kExtmap, &sdp_with_extmap);
+ InjectAfter("a=mid:video_content_name\r\n", kExtmap, &sdp_with_extmap);
+
+ EXPECT_EQ(sdp_with_extmap, message);
+}
+
+TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithExtmapEncrypted) {
+ bool encrypted = true;
+ AddExtmap(encrypted);
+ JsepSessionDescription desc_with_extmap(kDummyType);
+ ASSERT_TRUE(
+ desc_with_extmap.Initialize(desc_.Clone(), kSessionId, kSessionVersion));
+ TestSerialize(desc_with_extmap);
+}
+
+TEST_F(WebRtcSdpTest, SerializeCandidates) {
+ std::string message = webrtc::SdpSerializeCandidate(*jcandidate_);
+ EXPECT_EQ(std::string(kRawCandidate), message);
+
+ Candidate candidate_with_ufrag(candidates_.front());
+ candidate_with_ufrag.set_username("ABC");
+ jcandidate_.reset(new JsepIceCandidate(std::string("audio_content_name"), 0,
+ candidate_with_ufrag));
+ message = webrtc::SdpSerializeCandidate(*jcandidate_);
+ EXPECT_EQ(std::string(kRawCandidate) + " ufrag ABC", message);
+
+ Candidate candidate_with_network_info(candidates_.front());
+ candidate_with_network_info.set_network_id(1);
+ jcandidate_.reset(new JsepIceCandidate(std::string("audio"), 0,
+ candidate_with_network_info));
+ message = webrtc::SdpSerializeCandidate(*jcandidate_);
+ EXPECT_EQ(std::string(kRawCandidate) + " network-id 1", message);
+ candidate_with_network_info.set_network_cost(999);
+ jcandidate_.reset(new JsepIceCandidate(std::string("audio"), 0,
+ candidate_with_network_info));
+ message = webrtc::SdpSerializeCandidate(*jcandidate_);
+ EXPECT_EQ(std::string(kRawCandidate) + " network-id 1 network-cost 999",
+ message);
+}
+
+TEST_F(WebRtcSdpTest, SerializeHostnameCandidate) {
+ rtc::SocketAddress address("a.test", 1234);
+ cricket::Candidate candidate(
+ cricket::ICE_CANDIDATE_COMPONENT_RTP, "udp", address, kCandidatePriority,
+ "", "", LOCAL_PORT_TYPE, kCandidateGeneration, kCandidateFoundation1);
+ JsepIceCandidate jcandidate(std::string("audio_content_name"), 0, candidate);
+ std::string message = webrtc::SdpSerializeCandidate(jcandidate);
+ EXPECT_EQ(std::string(kRawHostnameCandidate), message);
+}
+
+TEST_F(WebRtcSdpTest, SerializeTcpCandidates) {
+ Candidate candidate(ICE_CANDIDATE_COMPONENT_RTP, "tcp",
+ rtc::SocketAddress("192.168.1.5", 9), kCandidatePriority,
+ "", "", LOCAL_PORT_TYPE, kCandidateGeneration,
+ kCandidateFoundation1);
+ candidate.set_tcptype(cricket::TCPTYPE_ACTIVE_STR);
+ std::unique_ptr<IceCandidateInterface> jcandidate(
+ new JsepIceCandidate(std::string("audio_content_name"), 0, candidate));
+
+ std::string message = webrtc::SdpSerializeCandidate(*jcandidate);
+ EXPECT_EQ(std::string(kSdpTcpActiveCandidate), message);
+}
+
+// Test serializing a TCP candidate that came in with a missing tcptype. This
+// shouldn't happen according to the spec, but our implementation has been
+// accepting this for quite some time, treating it as a passive candidate.
+//
+// So, we should be able to at least convert such candidates to and from SDP.
+// See: bugs.webrtc.org/11423
+TEST_F(WebRtcSdpTest, ParseTcpCandidateWithoutTcptype) {
+ std::string missing_tcptype =
+ "candidate:a0+B/1 1 tcp 2130706432 192.168.1.5 9999 typ host";
+ JsepIceCandidate jcandidate(kDummyMid, kDummyIndex);
+ EXPECT_TRUE(SdpDeserializeCandidate(missing_tcptype, &jcandidate));
+
+ EXPECT_EQ(std::string(cricket::TCPTYPE_PASSIVE_STR),
+ jcandidate.candidate().tcptype());
+}
+
+TEST_F(WebRtcSdpTest, ParseSslTcpCandidate) {
+ std::string ssltcp =
+ "candidate:a0+B/1 1 ssltcp 2130706432 192.168.1.5 9999 typ host tcptype "
+ "passive";
+ JsepIceCandidate jcandidate(kDummyMid, kDummyIndex);
+ EXPECT_TRUE(SdpDeserializeCandidate(ssltcp, &jcandidate));
+
+ EXPECT_EQ(std::string("ssltcp"), jcandidate.candidate().protocol());
+}
+
+TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithH264) {
+ cricket::VideoCodec h264_codec = cricket::CreateVideoCodec("H264");
+ h264_codec.SetParam("profile-level-id", "42e01f");
+ h264_codec.SetParam("level-asymmetry-allowed", "1");
+ h264_codec.SetParam("packetization-mode", "1");
+ video_desc_->AddCodec(h264_codec);
+
+ jdesc_.Initialize(desc_.Clone(), kSessionId, kSessionVersion);
+
+ std::string message = webrtc::SdpSerialize(jdesc_);
+ size_t after_pt = message.find(" H264/90000");
+ ASSERT_NE(after_pt, std::string::npos);
+ size_t before_pt = message.rfind("a=rtpmap:", after_pt);
+ ASSERT_NE(before_pt, std::string::npos);
+ before_pt += strlen("a=rtpmap:");
+ std::string pt = message.substr(before_pt, after_pt - before_pt);
+ // TODO(hta): Check if payload type `pt` occurs in the m=video line.
+ std::string to_find = "a=fmtp:" + pt + " ";
+ size_t fmtp_pos = message.find(to_find);
+ ASSERT_NE(std::string::npos, fmtp_pos) << "Failed to find " << to_find;
+ size_t fmtp_endpos = message.find('\n', fmtp_pos);
+ ASSERT_NE(std::string::npos, fmtp_endpos);
+ std::string fmtp_value = message.substr(fmtp_pos, fmtp_endpos);
+ EXPECT_NE(std::string::npos, fmtp_value.find("level-asymmetry-allowed=1"));
+ EXPECT_NE(std::string::npos, fmtp_value.find("packetization-mode=1"));
+ EXPECT_NE(std::string::npos, fmtp_value.find("profile-level-id=42e01f"));
+ // Check that there are no spaces after semicolons.
+ // https://bugs.webrtc.org/5793
+ EXPECT_EQ(std::string::npos, fmtp_value.find("; "));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSessionDescription) {
+ JsepSessionDescription jdesc(kDummyType);
+ // Deserialize
+ EXPECT_TRUE(SdpDeserialize(kSdpFullString, &jdesc));
+ // Verify
+ EXPECT_TRUE(CompareSessionDescription(jdesc_, jdesc));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithoutMline) {
+ JsepSessionDescription jdesc(kDummyType);
+ const char kSdpWithoutMline[] =
+ "v=0\r\n"
+ "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "a=msid-semantic: WMS local_stream_1 local_stream_2\r\n";
+ // Deserialize
+ EXPECT_TRUE(SdpDeserialize(kSdpWithoutMline, &jdesc));
+ EXPECT_EQ(0u, jdesc.description()->contents().size());
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithoutCarriageReturn) {
+ JsepSessionDescription jdesc(kDummyType);
+ std::string sdp_without_carriage_return = kSdpFullString;
+ Replace("\r\n", "\n", &sdp_without_carriage_return);
+ // Deserialize
+ EXPECT_TRUE(SdpDeserialize(sdp_without_carriage_return, &jdesc));
+ // Verify
+ EXPECT_TRUE(CompareSessionDescription(jdesc_, jdesc));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithoutCandidates) {
+ // SessionDescription with desc but without candidates.
+ JsepSessionDescription jdesc_no_candidates(kDummyType);
+ ASSERT_TRUE(jdesc_no_candidates.Initialize(desc_.Clone(), kSessionId,
+ kSessionVersion));
+ JsepSessionDescription new_jdesc(kDummyType);
+ EXPECT_TRUE(SdpDeserialize(kSdpString, &new_jdesc));
+ EXPECT_TRUE(CompareSessionDescription(jdesc_no_candidates, new_jdesc));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithoutRtpmap) {
+ static const char kSdpNoRtpmapString[] =
+ "v=0\r\n"
+ "o=- 11 22 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "m=audio 49232 RTP/AVP 0 18 103\r\n"
+ // Codec that doesn't appear in the m= line will be ignored.
+ "a=rtpmap:104 ISAC/32000\r\n"
+ // The rtpmap line for static payload codec is optional.
+ "a=rtpmap:18 G729/8000\r\n"
+ "a=rtpmap:103 ISAC/16000\r\n";
+
+ JsepSessionDescription jdesc(kDummyType);
+ EXPECT_TRUE(SdpDeserialize(kSdpNoRtpmapString, &jdesc));
+ cricket::AudioContentDescription* audio =
+ cricket::GetFirstAudioContentDescription(jdesc.description());
+ cricket::AudioCodecs ref_codecs;
+ // The codecs in the AudioContentDescription should be in the same order as
+ // the payload types (<fmt>s) on the m= line.
+ ref_codecs.push_back(cricket::CreateAudioCodec(0, "PCMU", 8000, 1));
+ ref_codecs.push_back(cricket::CreateAudioCodec(18, "G729", 8000, 1));
+ ref_codecs.push_back(cricket::CreateAudioCodec(103, "ISAC", 16000, 1));
+ EXPECT_EQ(ref_codecs, audio->codecs());
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithoutRtpmapButWithFmtp) {
+ static const char kSdpNoRtpmapString[] =
+ "v=0\r\n"
+ "o=- 11 22 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "m=audio 49232 RTP/AVP 18 103\r\n"
+ "a=fmtp:18 annexb=yes\r\n"
+ "a=rtpmap:103 ISAC/16000\r\n";
+
+ JsepSessionDescription jdesc(kDummyType);
+ EXPECT_TRUE(SdpDeserialize(kSdpNoRtpmapString, &jdesc));
+ cricket::AudioContentDescription* audio =
+ cricket::GetFirstAudioContentDescription(jdesc.description());
+
+ cricket::Codec g729 = audio->codecs()[0];
+ EXPECT_EQ("G729", g729.name);
+ EXPECT_EQ(8000, g729.clockrate);
+ EXPECT_EQ(18, g729.id);
+ cricket::CodecParameterMap::iterator found = g729.params.find("annexb");
+ ASSERT_TRUE(found != g729.params.end());
+ EXPECT_EQ(found->second, "yes");
+
+ cricket::Codec isac = audio->codecs()[1];
+ EXPECT_EQ("ISAC", isac.name);
+ EXPECT_EQ(103, isac.id);
+ EXPECT_EQ(16000, isac.clockrate);
+}
+
+// Ensure that we can deserialize SDP with a=fingerprint properly.
+TEST_F(WebRtcSdpTest, DeserializeJsepSessionDescriptionWithFingerprint) {
+ // Add a DTLS a=fingerprint attribute to our session description.
+ AddFingerprint();
+ JsepSessionDescription new_jdesc(kDummyType);
+ ASSERT_TRUE(new_jdesc.Initialize(desc_.Clone(), jdesc_.session_id(),
+ jdesc_.session_version()));
+
+ JsepSessionDescription jdesc_with_fingerprint(kDummyType);
+ std::string sdp_with_fingerprint = kSdpString;
+ InjectAfter(kAttributeIcePwdVoice, kFingerprint, &sdp_with_fingerprint);
+ InjectAfter(kAttributeIcePwdVideo, kFingerprint, &sdp_with_fingerprint);
+ EXPECT_TRUE(SdpDeserialize(sdp_with_fingerprint, &jdesc_with_fingerprint));
+ EXPECT_TRUE(CompareSessionDescription(jdesc_with_fingerprint, new_jdesc));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithBundle) {
+ JsepSessionDescription jdesc_with_bundle(kDummyType);
+ std::string sdp_with_bundle = kSdpFullString;
+ InjectAfter(kSessionTime,
+ "a=group:BUNDLE audio_content_name video_content_name\r\n",
+ &sdp_with_bundle);
+ EXPECT_TRUE(SdpDeserialize(sdp_with_bundle, &jdesc_with_bundle));
+ ContentGroup group(cricket::GROUP_TYPE_BUNDLE);
+ group.AddContentName(kAudioContentName);
+ group.AddContentName(kVideoContentName);
+ desc_.AddGroup(group);
+ ASSERT_TRUE(jdesc_.Initialize(desc_.Clone(), jdesc_.session_id(),
+ jdesc_.session_version()));
+ EXPECT_TRUE(CompareSessionDescription(jdesc_, jdesc_with_bundle));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithBandwidth) {
+ JsepSessionDescription jdesc_with_bandwidth(kDummyType);
+ std::string sdp_with_bandwidth = kSdpFullString;
+ InjectAfter("a=mid:video_content_name\r\na=sendrecv\r\n", "b=AS:100\r\n",
+ &sdp_with_bandwidth);
+ InjectAfter("a=mid:audio_content_name\r\na=sendrecv\r\n", "b=AS:50\r\n",
+ &sdp_with_bandwidth);
+ EXPECT_TRUE(SdpDeserialize(sdp_with_bandwidth, &jdesc_with_bandwidth));
+ VideoContentDescription* vcd = GetFirstVideoContentDescription(&desc_);
+ vcd->set_bandwidth(100 * 1000);
+ AudioContentDescription* acd = GetFirstAudioContentDescription(&desc_);
+ acd->set_bandwidth(50 * 1000);
+ ASSERT_TRUE(jdesc_.Initialize(desc_.Clone(), jdesc_.session_id(),
+ jdesc_.session_version()));
+ EXPECT_TRUE(CompareSessionDescription(jdesc_, jdesc_with_bandwidth));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithTiasBandwidth) {
+ JsepSessionDescription jdesc_with_bandwidth(kDummyType);
+ std::string sdp_with_bandwidth = kSdpFullString;
+ InjectAfter("a=mid:video_content_name\r\na=sendrecv\r\n", "b=TIAS:100000\r\n",
+ &sdp_with_bandwidth);
+ InjectAfter("a=mid:audio_content_name\r\na=sendrecv\r\n", "b=TIAS:50000\r\n",
+ &sdp_with_bandwidth);
+ EXPECT_TRUE(SdpDeserialize(sdp_with_bandwidth, &jdesc_with_bandwidth));
+ VideoContentDescription* vcd = GetFirstVideoContentDescription(&desc_);
+ vcd->set_bandwidth(100 * 1000);
+ AudioContentDescription* acd = GetFirstAudioContentDescription(&desc_);
+ acd->set_bandwidth(50 * 1000);
+ ASSERT_TRUE(jdesc_.Initialize(desc_.Clone(), jdesc_.session_id(),
+ jdesc_.session_version()));
+ EXPECT_TRUE(CompareSessionDescription(jdesc_, jdesc_with_bandwidth));
+}
+
+TEST_F(WebRtcSdpTest,
+ DeserializeSessionDescriptionWithUnknownBandwidthModifier) {
+ JsepSessionDescription jdesc_with_bandwidth(kDummyType);
+ std::string sdp_with_bandwidth = kSdpFullString;
+ InjectAfter("a=mid:video_content_name\r\na=sendrecv\r\n",
+ "b=unknown:100000\r\n", &sdp_with_bandwidth);
+ InjectAfter("a=mid:audio_content_name\r\na=sendrecv\r\n",
+ "b=unknown:50000\r\n", &sdp_with_bandwidth);
+ EXPECT_TRUE(SdpDeserialize(sdp_with_bandwidth, &jdesc_with_bandwidth));
+ VideoContentDescription* vcd = GetFirstVideoContentDescription(&desc_);
+ vcd->set_bandwidth(-1);
+ AudioContentDescription* acd = GetFirstAudioContentDescription(&desc_);
+ acd->set_bandwidth(-1);
+ ASSERT_TRUE(jdesc_.Initialize(desc_.Clone(), jdesc_.session_id(),
+ jdesc_.session_version()));
+ EXPECT_TRUE(CompareSessionDescription(jdesc_, jdesc_with_bandwidth));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithIceOptions) {
+ JsepSessionDescription jdesc_with_ice_options(kDummyType);
+ std::string sdp_with_ice_options = kSdpFullString;
+ InjectAfter(kSessionTime, "a=ice-options:iceoption3\r\n",
+ &sdp_with_ice_options);
+ InjectAfter(kAttributeIcePwdVoice, "a=ice-options:iceoption1\r\n",
+ &sdp_with_ice_options);
+ InjectAfter(kAttributeIcePwdVideo, "a=ice-options:iceoption2\r\n",
+ &sdp_with_ice_options);
+ EXPECT_TRUE(SdpDeserialize(sdp_with_ice_options, &jdesc_with_ice_options));
+ std::vector<std::string> transport_options;
+ transport_options.push_back(kIceOption3);
+ transport_options.push_back(kIceOption1);
+ AddIceOptions(kAudioContentName, transport_options);
+ transport_options.clear();
+ transport_options.push_back(kIceOption3);
+ transport_options.push_back(kIceOption2);
+ AddIceOptions(kVideoContentName, transport_options);
+ ASSERT_TRUE(jdesc_.Initialize(desc_.Clone(), jdesc_.session_id(),
+ jdesc_.session_version()));
+ EXPECT_TRUE(CompareSessionDescription(jdesc_, jdesc_with_ice_options));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithUfragPwd) {
+ // Remove the original ice-ufrag and ice-pwd
+ JsepSessionDescription jdesc_with_ufrag_pwd(kDummyType);
+ std::string sdp_with_ufrag_pwd = kSdpFullString;
+ EXPECT_TRUE(RemoveCandidateUfragPwd(&sdp_with_ufrag_pwd));
+ // Add session level ufrag and pwd
+ InjectAfter(kSessionTime,
+ "a=ice-pwd:session+level+icepwd\r\n"
+ "a=ice-ufrag:session+level+iceufrag\r\n",
+ &sdp_with_ufrag_pwd);
+ // Add media level ufrag and pwd for audio
+ InjectAfter(
+ "a=mid:audio_content_name\r\n",
+ "a=ice-pwd:media+level+icepwd\r\na=ice-ufrag:media+level+iceufrag\r\n",
+ &sdp_with_ufrag_pwd);
+ // Update the candidate ufrag and pwd to the expected ones.
+ EXPECT_TRUE(UpdateCandidateUfragPwd(&jdesc_, 0, "media+level+iceufrag",
+ "media+level+icepwd"));
+ EXPECT_TRUE(UpdateCandidateUfragPwd(&jdesc_, 1, "session+level+iceufrag",
+ "session+level+icepwd"));
+ EXPECT_TRUE(SdpDeserialize(sdp_with_ufrag_pwd, &jdesc_with_ufrag_pwd));
+ EXPECT_TRUE(CompareSessionDescription(jdesc_, jdesc_with_ufrag_pwd));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithRecvOnlyContent) {
+ EXPECT_TRUE(TestDeserializeDirection(RtpTransceiverDirection::kRecvOnly));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithSendOnlyContent) {
+ EXPECT_TRUE(TestDeserializeDirection(RtpTransceiverDirection::kSendOnly));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithInactiveContent) {
+ EXPECT_TRUE(TestDeserializeDirection(RtpTransceiverDirection::kInactive));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithRejectedAudio) {
+ EXPECT_TRUE(TestDeserializeRejected(true, false));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithRejectedVideo) {
+ EXPECT_TRUE(TestDeserializeRejected(false, true));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithRejectedAudioVideo) {
+ EXPECT_TRUE(TestDeserializeRejected(true, true));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithExtmapAllowMixed) {
+ jdesc_.description()->set_extmap_allow_mixed(true);
+ std::string sdp_with_extmap_allow_mixed = kSdpFullString;
+ // Deserialize
+ JsepSessionDescription jdesc_deserialized(kDummyType);
+ ASSERT_TRUE(SdpDeserialize(sdp_with_extmap_allow_mixed, &jdesc_deserialized));
+ // Verify
+ EXPECT_TRUE(CompareSessionDescription(jdesc_, jdesc_deserialized));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithoutExtmapAllowMixed) {
+ jdesc_.description()->set_extmap_allow_mixed(false);
+ std::string sdp_without_extmap_allow_mixed = kSdpFullString;
+ Replace(kExtmapAllowMixed, "", &sdp_without_extmap_allow_mixed);
+ // Deserialize
+ JsepSessionDescription jdesc_deserialized(kDummyType);
+ ASSERT_TRUE(
+ SdpDeserialize(sdp_without_extmap_allow_mixed, &jdesc_deserialized));
+ // Verify
+ EXPECT_TRUE(CompareSessionDescription(jdesc_, jdesc_deserialized));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeMediaContentDescriptionWithExtmapAllowMixed) {
+ cricket::MediaContentDescription* video_desc =
+ jdesc_.description()->GetContentDescriptionByName(kVideoContentName);
+ ASSERT_TRUE(video_desc);
+ cricket::MediaContentDescription* audio_desc =
+ jdesc_.description()->GetContentDescriptionByName(kAudioContentName);
+ ASSERT_TRUE(audio_desc);
+ video_desc->set_extmap_allow_mixed_enum(
+ cricket::MediaContentDescription::kMedia);
+ audio_desc->set_extmap_allow_mixed_enum(
+ cricket::MediaContentDescription::kMedia);
+
+ std::string sdp_with_extmap_allow_mixed = kSdpFullString;
+ InjectAfter("a=mid:audio_content_name\r\n", kExtmapAllowMixed,
+ &sdp_with_extmap_allow_mixed);
+ InjectAfter("a=mid:video_content_name\r\n", kExtmapAllowMixed,
+ &sdp_with_extmap_allow_mixed);
+
+ // Deserialize
+ JsepSessionDescription jdesc_deserialized(kDummyType);
+ EXPECT_TRUE(SdpDeserialize(sdp_with_extmap_allow_mixed, &jdesc_deserialized));
+ // Verify
+ EXPECT_TRUE(CompareSessionDescription(jdesc_, jdesc_deserialized));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeCandidate) {
+ JsepIceCandidate jcandidate(kDummyMid, kDummyIndex);
+
+ std::string sdp = kSdpOneCandidate;
+ EXPECT_TRUE(SdpDeserializeCandidate(sdp, &jcandidate));
+ EXPECT_EQ(kDummyMid, jcandidate.sdp_mid());
+ EXPECT_EQ(kDummyIndex, jcandidate.sdp_mline_index());
+ EXPECT_TRUE(jcandidate.candidate().IsEquivalent(jcandidate_->candidate()));
+ EXPECT_EQ(0, jcandidate.candidate().network_cost());
+
+ // Candidate line without generation extension.
+ sdp = kSdpOneCandidate;
+ Replace(" generation 2", "", &sdp);
+ EXPECT_TRUE(SdpDeserializeCandidate(sdp, &jcandidate));
+ EXPECT_EQ(kDummyMid, jcandidate.sdp_mid());
+ EXPECT_EQ(kDummyIndex, jcandidate.sdp_mline_index());
+ Candidate expected = jcandidate_->candidate();
+ expected.set_generation(0);
+ EXPECT_TRUE(jcandidate.candidate().IsEquivalent(expected));
+
+ // Candidate with network id and/or cost.
+ sdp = kSdpOneCandidate;
+ Replace(" generation 2", " generation 2 network-id 2", &sdp);
+ EXPECT_TRUE(SdpDeserializeCandidate(sdp, &jcandidate));
+ EXPECT_EQ(kDummyMid, jcandidate.sdp_mid());
+ EXPECT_EQ(kDummyIndex, jcandidate.sdp_mline_index());
+ expected = jcandidate_->candidate();
+ expected.set_network_id(2);
+ EXPECT_TRUE(jcandidate.candidate().IsEquivalent(expected));
+ EXPECT_EQ(0, jcandidate.candidate().network_cost());
+ // Add network cost
+ Replace(" network-id 2", " network-id 2 network-cost 9", &sdp);
+ EXPECT_TRUE(SdpDeserializeCandidate(sdp, &jcandidate));
+ EXPECT_TRUE(jcandidate.candidate().IsEquivalent(expected));
+ EXPECT_EQ(9, jcandidate.candidate().network_cost());
+
+ sdp = kSdpTcpActiveCandidate;
+ EXPECT_TRUE(SdpDeserializeCandidate(sdp, &jcandidate));
+ // Make a cricket::Candidate equivalent to kSdpTcpCandidate string.
+ Candidate candidate(ICE_CANDIDATE_COMPONENT_RTP, "tcp",
+ rtc::SocketAddress("192.168.1.5", 9), kCandidatePriority,
+ "", "", LOCAL_PORT_TYPE, kCandidateGeneration,
+ kCandidateFoundation1);
+ std::unique_ptr<IceCandidateInterface> jcandidate_template(
+ new JsepIceCandidate(std::string("audio_content_name"), 0, candidate));
+ EXPECT_TRUE(
+ jcandidate.candidate().IsEquivalent(jcandidate_template->candidate()));
+ sdp = kSdpTcpPassiveCandidate;
+ EXPECT_TRUE(SdpDeserializeCandidate(sdp, &jcandidate));
+ sdp = kSdpTcpSOCandidate;
+ EXPECT_TRUE(SdpDeserializeCandidate(sdp, &jcandidate));
+}
+
+// This test verifies the deserialization of candidate-attribute
+// as per RFC 5245. Candidate-attribute will be of the format
+// candidate:<blah>. This format will be used when candidates
+// are trickled.
+TEST_F(WebRtcSdpTest, DeserializeRawCandidateAttribute) {
+ JsepIceCandidate jcandidate(kDummyMid, kDummyIndex);
+
+ std::string candidate_attribute = kRawCandidate;
+ EXPECT_TRUE(SdpDeserializeCandidate(candidate_attribute, &jcandidate));
+ EXPECT_EQ(kDummyMid, jcandidate.sdp_mid());
+ EXPECT_EQ(kDummyIndex, jcandidate.sdp_mline_index());
+ EXPECT_TRUE(jcandidate.candidate().IsEquivalent(jcandidate_->candidate()));
+ EXPECT_EQ(2u, jcandidate.candidate().generation());
+
+ // Candidate line without generation extension.
+ candidate_attribute = kRawCandidate;
+ Replace(" generation 2", "", &candidate_attribute);
+ EXPECT_TRUE(SdpDeserializeCandidate(candidate_attribute, &jcandidate));
+ EXPECT_EQ(kDummyMid, jcandidate.sdp_mid());
+ EXPECT_EQ(kDummyIndex, jcandidate.sdp_mline_index());
+ Candidate expected = jcandidate_->candidate();
+ expected.set_generation(0);
+ EXPECT_TRUE(jcandidate.candidate().IsEquivalent(expected));
+
+ // Candidate line without candidate:
+ candidate_attribute = kRawCandidate;
+ Replace("candidate:", "", &candidate_attribute);
+ EXPECT_FALSE(SdpDeserializeCandidate(candidate_attribute, &jcandidate));
+
+ // Candidate line with IPV6 address.
+ EXPECT_TRUE(SdpDeserializeCandidate(kRawIPV6Candidate, &jcandidate));
+
+ // Candidate line with hostname address.
+ EXPECT_TRUE(SdpDeserializeCandidate(kRawHostnameCandidate, &jcandidate));
+}
+
+// This test verifies that the deserialization of an invalid candidate string
+// fails.
+TEST_F(WebRtcSdpTest, DeserializeInvalidCandidiate) {
+ JsepIceCandidate jcandidate(kDummyMid, kDummyIndex);
+
+ std::string candidate_attribute = kRawCandidate;
+ candidate_attribute.replace(0, 1, "x");
+ EXPECT_FALSE(SdpDeserializeCandidate(candidate_attribute, &jcandidate));
+
+ candidate_attribute = kSdpOneCandidate;
+ candidate_attribute.replace(0, 1, "x");
+ EXPECT_FALSE(SdpDeserializeCandidate(candidate_attribute, &jcandidate));
+
+ candidate_attribute = kRawCandidate;
+ candidate_attribute.append("\r\n");
+ candidate_attribute.append(kRawCandidate);
+ EXPECT_FALSE(SdpDeserializeCandidate(candidate_attribute, &jcandidate));
+
+ EXPECT_FALSE(SdpDeserializeCandidate(kSdpTcpInvalidCandidate, &jcandidate));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSdpWithSctpDataChannels) {
+ bool use_sctpmap = true;
+ AddSctpDataChannel(use_sctpmap);
+ JsepSessionDescription jdesc(kDummyType);
+ ASSERT_TRUE(jdesc.Initialize(desc_.Clone(), kSessionId, kSessionVersion));
+
+ std::string sdp_with_data = kSdpString;
+ sdp_with_data.append(kSdpSctpDataChannelString);
+ JsepSessionDescription jdesc_output(kDummyType);
+
+ // Verify with UDP/DTLS/SCTP (already in kSdpSctpDataChannelString).
+ EXPECT_TRUE(SdpDeserialize(sdp_with_data, &jdesc_output));
+ EXPECT_TRUE(CompareSessionDescription(jdesc, jdesc_output));
+
+ // Verify with DTLS/SCTP.
+ sdp_with_data.replace(sdp_with_data.find(kUdpDtlsSctp), strlen(kUdpDtlsSctp),
+ kDtlsSctp);
+ EXPECT_TRUE(SdpDeserialize(sdp_with_data, &jdesc_output));
+ EXPECT_TRUE(CompareSessionDescription(jdesc, jdesc_output));
+
+ // Verify with TCP/DTLS/SCTP.
+ sdp_with_data.replace(sdp_with_data.find(kDtlsSctp), strlen(kDtlsSctp),
+ kTcpDtlsSctp);
+ EXPECT_TRUE(SdpDeserialize(sdp_with_data, &jdesc_output));
+ EXPECT_TRUE(CompareSessionDescription(jdesc, jdesc_output));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSdpWithSctpDataChannelsWithSctpPort) {
+ bool use_sctpmap = false;
+ AddSctpDataChannel(use_sctpmap);
+ JsepSessionDescription jdesc(kDummyType);
+ ASSERT_TRUE(jdesc.Initialize(desc_.Clone(), kSessionId, kSessionVersion));
+
+ std::string sdp_with_data = kSdpString;
+ sdp_with_data.append(kSdpSctpDataChannelStringWithSctpPort);
+ JsepSessionDescription jdesc_output(kDummyType);
+
+ EXPECT_TRUE(SdpDeserialize(sdp_with_data, &jdesc_output));
+ EXPECT_TRUE(CompareSessionDescription(jdesc, jdesc_output));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSdpWithSctpDataChannelsWithSctpColonPort) {
+ bool use_sctpmap = false;
+ AddSctpDataChannel(use_sctpmap);
+ JsepSessionDescription jdesc(kDummyType);
+ ASSERT_TRUE(jdesc.Initialize(desc_.Clone(), kSessionId, kSessionVersion));
+
+ std::string sdp_with_data = kSdpString;
+ sdp_with_data.append(kSdpSctpDataChannelStringWithSctpColonPort);
+ JsepSessionDescription jdesc_output(kDummyType);
+
+ EXPECT_TRUE(SdpDeserialize(sdp_with_data, &jdesc_output));
+ EXPECT_TRUE(CompareSessionDescription(jdesc, jdesc_output));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSdpWithSctpDataChannelsButWrongMediaType) {
+ bool use_sctpmap = true;
+ AddSctpDataChannel(use_sctpmap);
+ JsepSessionDescription jdesc(kDummyType);
+ ASSERT_TRUE(jdesc.Initialize(desc_.Clone(), kSessionId, kSessionVersion));
+
+ std::string sdp = kSdpSessionString;
+ sdp += kSdpSctpDataChannelString;
+
+ const char needle[] = "m=application ";
+ sdp.replace(sdp.find(needle), strlen(needle), "m=application:bogus ");
+
+ JsepSessionDescription jdesc_output(kDummyType);
+ EXPECT_TRUE(SdpDeserialize(sdp, &jdesc_output));
+
+ EXPECT_EQ(1u, jdesc_output.description()->contents().size());
+ EXPECT_TRUE(jdesc_output.description()->contents()[0].rejected);
+}
+
+// Helper function to set the max-message-size parameter in the
+// SCTP data codec.
+void MutateJsepSctpMaxMessageSize(const SessionDescription& desc,
+ int new_value,
+ JsepSessionDescription* jdesc) {
+ std::unique_ptr<cricket::SessionDescription> mutant = desc.Clone();
+ SctpDataContentDescription* dcdesc =
+ mutant->GetContentDescriptionByName(kDataContentName)->as_sctp();
+ dcdesc->set_max_message_size(new_value);
+ jdesc->Initialize(std::move(mutant), kSessionId, kSessionVersion);
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSdpWithSctpDataChannelsWithMaxMessageSize) {
+ bool use_sctpmap = false;
+ AddSctpDataChannel(use_sctpmap);
+ JsepSessionDescription jdesc(kDummyType);
+ std::string sdp_with_data = kSdpString;
+
+ sdp_with_data.append(kSdpSctpDataChannelStringWithSctpColonPort);
+ sdp_with_data.append("a=max-message-size:12345\r\n");
+ MutateJsepSctpMaxMessageSize(desc_, 12345, &jdesc);
+ JsepSessionDescription jdesc_output(kDummyType);
+
+ EXPECT_TRUE(SdpDeserialize(sdp_with_data, &jdesc_output));
+ EXPECT_TRUE(CompareSessionDescription(jdesc, jdesc_output));
+}
+
+TEST_F(WebRtcSdpTest, SerializeSdpWithSctpDataChannelWithMaxMessageSize) {
+ bool use_sctpmap = false;
+ AddSctpDataChannel(use_sctpmap);
+ JsepSessionDescription jdesc(kDummyType);
+ MutateJsepSctpMaxMessageSize(desc_, 12345, &jdesc);
+ std::string message = webrtc::SdpSerialize(jdesc);
+ EXPECT_NE(std::string::npos,
+ message.find("\r\na=max-message-size:12345\r\n"));
+ JsepSessionDescription jdesc_output(kDummyType);
+ EXPECT_TRUE(SdpDeserialize(message, &jdesc_output));
+ EXPECT_TRUE(CompareSessionDescription(jdesc, jdesc_output));
+}
+
+TEST_F(WebRtcSdpTest,
+ SerializeSdpWithSctpDataChannelWithDefaultMaxMessageSize) {
+ // https://tools.ietf.org/html/draft-ietf-mmusic-sctp-sdp-26#section-6
+ // The default max message size is 64K.
+ bool use_sctpmap = false;
+ AddSctpDataChannel(use_sctpmap);
+ JsepSessionDescription jdesc(kDummyType);
+ MutateJsepSctpMaxMessageSize(desc_, 65536, &jdesc);
+ std::string message = webrtc::SdpSerialize(jdesc);
+ EXPECT_EQ(std::string::npos, message.find("\r\na=max-message-size:"));
+ JsepSessionDescription jdesc_output(kDummyType);
+ EXPECT_TRUE(SdpDeserialize(message, &jdesc_output));
+ EXPECT_TRUE(CompareSessionDescription(jdesc, jdesc_output));
+}
+
+// Test to check the behaviour if sctp-port is specified
+// on the m= line and in a=sctp-port.
+TEST_F(WebRtcSdpTest, DeserializeSdpWithMultiSctpPort) {
+ bool use_sctpmap = true;
+ AddSctpDataChannel(use_sctpmap);
+ JsepSessionDescription jdesc(kDummyType);
+ ASSERT_TRUE(jdesc.Initialize(desc_.Clone(), kSessionId, kSessionVersion));
+
+ std::string sdp_with_data = kSdpString;
+ // Append m= attributes
+ sdp_with_data.append(kSdpSctpDataChannelString);
+ // Append a=sctp-port attribute
+ sdp_with_data.append("a=sctp-port 5000\r\n");
+ JsepSessionDescription jdesc_output(kDummyType);
+
+ EXPECT_FALSE(SdpDeserialize(sdp_with_data, &jdesc_output));
+}
+
+// Test behavior if a=rtpmap occurs in an SCTP section.
+TEST_F(WebRtcSdpTest, DeserializeSdpWithRtpmapAttribute) {
+ std::string sdp_with_data = kSdpString;
+ // Append m= attributes
+ sdp_with_data.append(kSdpSctpDataChannelString);
+ // Append a=rtpmap attribute
+ sdp_with_data.append("a=rtpmap:111 opus/48000/2\r\n");
+ JsepSessionDescription jdesc_output(kDummyType);
+ // Correct behavior is to ignore the extra attribute.
+ EXPECT_TRUE(SdpDeserialize(sdp_with_data, &jdesc_output));
+}
+
+// For crbug/344475.
+TEST_F(WebRtcSdpTest, DeserializeSdpWithCorruptedSctpDataChannels) {
+ std::string sdp_with_data = kSdpString;
+ sdp_with_data.append(kSdpSctpDataChannelString);
+ // Remove the "\n" at the end.
+ sdp_with_data = sdp_with_data.substr(0, sdp_with_data.size() - 1);
+ JsepSessionDescription jdesc_output(kDummyType);
+
+ EXPECT_FALSE(SdpDeserialize(sdp_with_data, &jdesc_output));
+ // No crash is a pass.
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSdpWithSctpDataChannelAndUnusualPort) {
+ bool use_sctpmap = true;
+ AddSctpDataChannel(use_sctpmap);
+
+ // First setup the expected JsepSessionDescription.
+ JsepSessionDescription jdesc(kDummyType);
+ MutateJsepSctpPort(&jdesc, desc_, kUnusualSctpPort);
+
+ // Then get the deserialized JsepSessionDescription.
+ std::string sdp_with_data = kSdpString;
+ sdp_with_data.append(kSdpSctpDataChannelString);
+ absl::StrReplaceAll(
+ {{rtc::ToString(kDefaultSctpPort), rtc::ToString(kUnusualSctpPort)}},
+ &sdp_with_data);
+ JsepSessionDescription jdesc_output(kDummyType);
+
+ EXPECT_TRUE(SdpDeserialize(sdp_with_data, &jdesc_output));
+ EXPECT_TRUE(CompareSessionDescription(jdesc, jdesc_output));
+}
+
+TEST_F(WebRtcSdpTest,
+ DeserializeSdpWithSctpDataChannelAndUnusualPortInAttribute) {
+ bool use_sctpmap = false;
+ AddSctpDataChannel(use_sctpmap);
+
+ JsepSessionDescription jdesc(kDummyType);
+ MutateJsepSctpPort(&jdesc, desc_, kUnusualSctpPort);
+
+ // We need to test the deserialized JsepSessionDescription from
+ // kSdpSctpDataChannelStringWithSctpPort for
+ // draft-ietf-mmusic-sctp-sdp-07
+ // a=sctp-port
+ std::string sdp_with_data = kSdpString;
+ sdp_with_data.append(kSdpSctpDataChannelStringWithSctpPort);
+ absl::StrReplaceAll(
+ {{rtc::ToString(kDefaultSctpPort), rtc::ToString(kUnusualSctpPort)}},
+ &sdp_with_data);
+ JsepSessionDescription jdesc_output(kDummyType);
+
+ EXPECT_TRUE(SdpDeserialize(sdp_with_data, &jdesc_output));
+ EXPECT_TRUE(CompareSessionDescription(jdesc, jdesc_output));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSdpWithSctpDataChannelsAndBandwidth) {
+ bool use_sctpmap = true;
+ AddSctpDataChannel(use_sctpmap);
+ JsepSessionDescription jdesc(kDummyType);
+ SctpDataContentDescription* dcd = GetFirstSctpDataContentDescription(&desc_);
+ dcd->set_bandwidth(100 * 1000);
+ ASSERT_TRUE(jdesc.Initialize(desc_.Clone(), kSessionId, kSessionVersion));
+
+ std::string sdp_with_bandwidth = kSdpString;
+ sdp_with_bandwidth.append(kSdpSctpDataChannelString);
+ InjectAfter("a=mid:data_content_name\r\n", "b=AS:100\r\n",
+ &sdp_with_bandwidth);
+ JsepSessionDescription jdesc_with_bandwidth(kDummyType);
+
+ // SCTP has congestion control, so we shouldn't limit the bandwidth
+ // as we do for RTP.
+ EXPECT_TRUE(SdpDeserialize(sdp_with_bandwidth, &jdesc_with_bandwidth));
+ EXPECT_TRUE(CompareSessionDescription(jdesc, jdesc_with_bandwidth));
+}
+
+class WebRtcSdpExtmapTest : public WebRtcSdpTest,
+ public ::testing::WithParamInterface<bool> {};
+
+TEST_P(WebRtcSdpExtmapTest,
+ DeserializeSessionDescriptionWithSessionLevelExtmap) {
+ bool encrypted = GetParam();
+ TestDeserializeExtmap(true, false, encrypted);
+}
+
+TEST_P(WebRtcSdpExtmapTest, DeserializeSessionDescriptionWithMediaLevelExtmap) {
+ bool encrypted = GetParam();
+ TestDeserializeExtmap(false, true, encrypted);
+}
+
+TEST_P(WebRtcSdpExtmapTest, DeserializeSessionDescriptionWithInvalidExtmap) {
+ bool encrypted = GetParam();
+ TestDeserializeExtmap(true, true, encrypted);
+}
+
+INSTANTIATE_TEST_SUITE_P(Encrypted,
+ WebRtcSdpExtmapTest,
+ ::testing::Values(false, true));
+
+TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithoutEndLineBreak) {
+ JsepSessionDescription jdesc(kDummyType);
+ std::string sdp = kSdpFullString;
+ sdp = sdp.substr(0, sdp.size() - 2); // Remove \r\n at the end.
+ // Deserialize
+ SdpParseError error;
+ EXPECT_FALSE(webrtc::SdpDeserialize(sdp, &jdesc, &error));
+ const std::string lastline = "a=ssrc:3 msid:local_stream_1 video_track_id_1";
+ EXPECT_EQ(lastline, error.line);
+ EXPECT_EQ("Invalid SDP line.", error.description);
+}
+
+TEST_F(WebRtcSdpTest, DeserializeCandidateWithDifferentTransport) {
+ JsepIceCandidate jcandidate(kDummyMid, kDummyIndex);
+ std::string new_sdp = kSdpOneCandidate;
+ Replace("udp", "unsupported_transport", &new_sdp);
+ EXPECT_FALSE(SdpDeserializeCandidate(new_sdp, &jcandidate));
+ new_sdp = kSdpOneCandidate;
+ Replace("udp", "uDP", &new_sdp);
+ EXPECT_TRUE(SdpDeserializeCandidate(new_sdp, &jcandidate));
+ EXPECT_EQ(kDummyMid, jcandidate.sdp_mid());
+ EXPECT_EQ(kDummyIndex, jcandidate.sdp_mline_index());
+ EXPECT_TRUE(jcandidate.candidate().IsEquivalent(jcandidate_->candidate()));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeCandidateWithUfragPwd) {
+ JsepIceCandidate jcandidate(kDummyMid, kDummyIndex);
+ EXPECT_TRUE(
+ SdpDeserializeCandidate(kSdpOneCandidateWithUfragPwd, &jcandidate));
+ EXPECT_EQ(kDummyMid, jcandidate.sdp_mid());
+ EXPECT_EQ(kDummyIndex, jcandidate.sdp_mline_index());
+ Candidate ref_candidate = jcandidate_->candidate();
+ ref_candidate.set_username("user_rtp");
+ ref_candidate.set_password("password_rtp");
+ EXPECT_TRUE(jcandidate.candidate().IsEquivalent(ref_candidate));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSdpWithConferenceFlag) {
+ JsepSessionDescription jdesc(kDummyType);
+
+ // Deserialize
+ EXPECT_TRUE(SdpDeserialize(kSdpConferenceString, &jdesc));
+
+ // Verify
+ cricket::AudioContentDescription* audio =
+ cricket::GetFirstAudioContentDescription(jdesc.description());
+ EXPECT_TRUE(audio->conference_mode());
+
+ cricket::VideoContentDescription* video =
+ cricket::GetFirstVideoContentDescription(jdesc.description());
+ EXPECT_TRUE(video->conference_mode());
+}
+
+TEST_F(WebRtcSdpTest, SerializeSdpWithConferenceFlag) {
+ JsepSessionDescription jdesc(kDummyType);
+
+ // We tested deserialization already above, so just test that if we serialize
+ // and deserialize the flag doesn't disappear.
+ EXPECT_TRUE(SdpDeserialize(kSdpConferenceString, &jdesc));
+ std::string reserialized = webrtc::SdpSerialize(jdesc);
+ EXPECT_TRUE(SdpDeserialize(reserialized, &jdesc));
+
+ // Verify.
+ cricket::AudioContentDescription* audio =
+ cricket::GetFirstAudioContentDescription(jdesc.description());
+ EXPECT_TRUE(audio->conference_mode());
+
+ cricket::VideoContentDescription* video =
+ cricket::GetFirstVideoContentDescription(jdesc.description());
+ EXPECT_TRUE(video->conference_mode());
+}
+
+TEST_F(WebRtcSdpTest, SerializeAndDeserializeRemoteNetEstimate) {
+ {
+ // By default remote estimates are disabled.
+ JsepSessionDescription dst(kDummyType);
+ SdpDeserialize(webrtc::SdpSerialize(jdesc_), &dst);
+ EXPECT_FALSE(cricket::GetFirstVideoContentDescription(dst.description())
+ ->remote_estimate());
+ }
+ {
+ // When remote estimate is enabled, the setting is propagated via SDP.
+ cricket::GetFirstVideoContentDescription(jdesc_.description())
+ ->set_remote_estimate(true);
+ JsepSessionDescription dst(kDummyType);
+ SdpDeserialize(webrtc::SdpSerialize(jdesc_), &dst);
+ EXPECT_TRUE(cricket::GetFirstVideoContentDescription(dst.description())
+ ->remote_estimate());
+ }
+}
+
+TEST_F(WebRtcSdpTest, DeserializeBrokenSdp) {
+ const char kSdpDestroyer[] = "!@#$%^&";
+ const char kSdpEmptyType[] = " =candidate";
+ const char kSdpEqualAsPlus[] = "a+candidate";
+ const char kSdpSpaceAfterEqual[] = "a= candidate";
+ const char kSdpUpperType[] = "A=candidate";
+ const char kSdpEmptyLine[] = "";
+ const char kSdpMissingValue[] = "a=";
+
+ const char kSdpBrokenFingerprint[] =
+ "a=fingerprint:sha-1 "
+ "4AAD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B:19:E5:7C:AB";
+ const char kSdpExtraField[] =
+ "a=fingerprint:sha-1 "
+ "4A:AD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B:19:E5:7C:AB XXX";
+ const char kSdpMissingSpace[] =
+ "a=fingerprint:sha-1"
+ "4A:AD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B:19:E5:7C:AB";
+ // MD5 is not allowed in fingerprints.
+ const char kSdpMd5[] =
+ "a=fingerprint:md5 "
+ "4A:AD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B";
+
+ // Broken session description
+ ExpectParseFailure("v=", kSdpDestroyer);
+ ExpectParseFailure("o=", kSdpDestroyer);
+ ExpectParseFailure("s=-", kSdpDestroyer);
+ // Broken time description
+ ExpectParseFailure("t=", kSdpDestroyer);
+
+ // Broken media description
+ ExpectParseFailure("m=audio", "c=IN IP4 74.125.224.39");
+ ExpectParseFailure("m=video", kSdpDestroyer);
+ ExpectParseFailure("m=", "c=IN IP4 74.125.224.39");
+
+ // Invalid lines
+ ExpectParseFailure("a=candidate", kSdpEmptyType);
+ ExpectParseFailure("a=candidate", kSdpEqualAsPlus);
+ ExpectParseFailure("a=candidate", kSdpSpaceAfterEqual);
+ ExpectParseFailure("a=candidate", kSdpUpperType);
+
+ // Bogus fingerprint replacing a=sendrev. We selected this attribute
+ // because it's orthogonal to what we are replacing and hence
+ // safe.
+ ExpectParseFailure("a=sendrecv", kSdpBrokenFingerprint);
+ ExpectParseFailure("a=sendrecv", kSdpExtraField);
+ ExpectParseFailure("a=sendrecv", kSdpMissingSpace);
+ ExpectParseFailure("a=sendrecv", kSdpMd5);
+
+ // Empty Line
+ ExpectParseFailure("a=rtcp:2347 IN IP4 74.125.127.126", kSdpEmptyLine);
+ ExpectParseFailure("a=rtcp:2347 IN IP4 74.125.127.126", kSdpMissingValue);
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSdpWithInvalidAttributeValue) {
+ // ssrc
+ ExpectParseFailure("a=ssrc:1", "a=ssrc:badvalue");
+ ExpectParseFailure("a=ssrc-group:FEC 2 3", "a=ssrc-group:FEC badvalue 3");
+ // crypto
+ ExpectParseFailure("a=crypto:1 ", "a=crypto:badvalue ");
+ // rtpmap
+ ExpectParseFailure("a=rtpmap:111 ", "a=rtpmap:badvalue ");
+ ExpectParseFailure("opus/48000/2", "opus/badvalue/2");
+ ExpectParseFailure("opus/48000/2", "opus/48000/badvalue");
+ // candidate
+ ExpectParseFailure("1 udp 2130706432", "badvalue udp 2130706432");
+ ExpectParseFailure("1 udp 2130706432", "1 udp badvalue");
+ ExpectParseFailure("192.168.1.5 1234", "192.168.1.5 badvalue");
+ ExpectParseFailure("rport 2346", "rport badvalue");
+ ExpectParseFailure("rport 2346 generation 2",
+ "rport 2346 generation badvalue");
+ // m line
+ ExpectParseFailure("m=audio 2345 RTP/SAVPF 111 103 104",
+ "m=audio 2345 RTP/SAVPF 111 badvalue 104");
+
+ // bandwidth
+ ExpectParseFailureWithNewLines("a=mid:video_content_name\r\n",
+ "b=AS:badvalue\r\n", "b=AS:badvalue");
+ ExpectParseFailureWithNewLines("a=mid:video_content_name\r\n", "b=AS\r\n",
+ "b=AS");
+ ExpectParseFailureWithNewLines("a=mid:video_content_name\r\n", "b=AS:\r\n",
+ "b=AS:");
+ ExpectParseFailureWithNewLines("a=mid:video_content_name\r\n",
+ "b=AS:12:34\r\n", "b=AS:12:34");
+
+ // rtcp-fb
+ ExpectParseFailureWithNewLines("a=mid:video_content_name\r\n",
+ "a=rtcp-fb:badvalue nack\r\n",
+ "a=rtcp-fb:badvalue nack");
+ // extmap
+ ExpectParseFailureWithNewLines("a=mid:video_content_name\r\n",
+ "a=extmap:badvalue http://example.com\r\n",
+ "a=extmap:badvalue http://example.com");
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSdpWithReorderedPltypes) {
+ JsepSessionDescription jdesc_output(kDummyType);
+
+ const char kSdpWithReorderedPlTypesString[] =
+ "v=0\r\n"
+ "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "m=audio 9 RTP/SAVPF 104 103\r\n" // Pl type 104 preferred.
+ "a=rtpmap:111 opus/48000/2\r\n" // Pltype 111 listed before 103 and 104
+ // in the map.
+ "a=rtpmap:103 ISAC/16000\r\n" // Pltype 103 listed before 104 in the map.
+ "a=rtpmap:104 ISAC/32000\r\n";
+
+ // Deserialize
+ EXPECT_TRUE(SdpDeserialize(kSdpWithReorderedPlTypesString, &jdesc_output));
+
+ const AudioContentDescription* acd =
+ GetFirstAudioContentDescription(jdesc_output.description());
+ ASSERT_TRUE(acd);
+ ASSERT_FALSE(acd->codecs().empty());
+ EXPECT_EQ("ISAC", acd->codecs()[0].name);
+ EXPECT_EQ(32000, acd->codecs()[0].clockrate);
+ EXPECT_EQ(104, acd->codecs()[0].id);
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSerializeCodecParams) {
+ JsepSessionDescription jdesc_output(kDummyType);
+ CodecParams params;
+ params.max_ptime = 40;
+ params.ptime = 30;
+ params.min_ptime = 10;
+ params.sprop_stereo = 1;
+ params.stereo = 1;
+ params.useinband = 1;
+ params.maxaveragebitrate = 128000;
+ TestDeserializeCodecParams(params, &jdesc_output);
+ TestSerialize(jdesc_output);
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSerializeRtcpFb) {
+ const bool kUseWildcard = false;
+ JsepSessionDescription jdesc_output(kDummyType);
+ TestDeserializeRtcpFb(&jdesc_output, kUseWildcard);
+ TestSerialize(jdesc_output);
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSerializeRtcpFbWildcard) {
+ const bool kUseWildcard = true;
+ JsepSessionDescription jdesc_output(kDummyType);
+ TestDeserializeRtcpFb(&jdesc_output, kUseWildcard);
+ TestSerialize(jdesc_output);
+}
+
+TEST_F(WebRtcSdpTest, DeserializeVideoFmtp) {
+ JsepSessionDescription jdesc_output(kDummyType);
+
+ const char kSdpWithFmtpString[] =
+ "v=0\r\n"
+ "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "m=video 3457 RTP/SAVPF 120\r\n"
+ "a=rtpmap:120 VP8/90000\r\n"
+ "a=fmtp:120 x-google-min-bitrate=10;x-google-max-quantization=40\r\n";
+
+ // Deserialize
+ SdpParseError error;
+ EXPECT_TRUE(
+ webrtc::SdpDeserialize(kSdpWithFmtpString, &jdesc_output, &error));
+
+ const VideoContentDescription* vcd =
+ GetFirstVideoContentDescription(jdesc_output.description());
+ ASSERT_TRUE(vcd);
+ ASSERT_FALSE(vcd->codecs().empty());
+ cricket::VideoCodec vp8 = vcd->codecs()[0];
+ EXPECT_EQ("VP8", vp8.name);
+ EXPECT_EQ(120, vp8.id);
+ cricket::CodecParameterMap::iterator found =
+ vp8.params.find("x-google-min-bitrate");
+ ASSERT_TRUE(found != vp8.params.end());
+ EXPECT_EQ(found->second, "10");
+ found = vp8.params.find("x-google-max-quantization");
+ ASSERT_TRUE(found != vp8.params.end());
+ EXPECT_EQ(found->second, "40");
+}
+
+TEST_F(WebRtcSdpTest, DeserializeVideoFmtpWithSprops) {
+ JsepSessionDescription jdesc_output(kDummyType);
+
+ const char kSdpWithFmtpString[] =
+ "v=0\r\n"
+ "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "m=video 49170 RTP/AVP 98\r\n"
+ "a=rtpmap:98 H264/90000\r\n"
+ "a=fmtp:98 profile-level-id=42A01E; "
+ "sprop-parameter-sets=Z0IACpZTBYmI,aMljiA==\r\n";
+
+ // Deserialize.
+ SdpParseError error;
+ EXPECT_TRUE(
+ webrtc::SdpDeserialize(kSdpWithFmtpString, &jdesc_output, &error));
+
+ const VideoContentDescription* vcd =
+ GetFirstVideoContentDescription(jdesc_output.description());
+ ASSERT_TRUE(vcd);
+ ASSERT_FALSE(vcd->codecs().empty());
+ cricket::VideoCodec h264 = vcd->codecs()[0];
+ EXPECT_EQ("H264", h264.name);
+ EXPECT_EQ(98, h264.id);
+ cricket::CodecParameterMap::const_iterator found =
+ h264.params.find("profile-level-id");
+ ASSERT_TRUE(found != h264.params.end());
+ EXPECT_EQ(found->second, "42A01E");
+ found = h264.params.find("sprop-parameter-sets");
+ ASSERT_TRUE(found != h264.params.end());
+ EXPECT_EQ(found->second, "Z0IACpZTBYmI,aMljiA==");
+}
+
+TEST_F(WebRtcSdpTest, DeserializeVideoFmtpWithSpace) {
+ JsepSessionDescription jdesc_output(kDummyType);
+
+ const char kSdpWithFmtpString[] =
+ "v=0\r\n"
+ "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "m=video 3457 RTP/SAVPF 120\r\n"
+ "a=rtpmap:120 VP8/90000\r\n"
+ "a=fmtp:120 x-google-min-bitrate=10; x-google-max-quantization=40\r\n";
+
+ // Deserialize
+ SdpParseError error;
+ EXPECT_TRUE(
+ webrtc::SdpDeserialize(kSdpWithFmtpString, &jdesc_output, &error));
+
+ const VideoContentDescription* vcd =
+ GetFirstVideoContentDescription(jdesc_output.description());
+ ASSERT_TRUE(vcd);
+ ASSERT_FALSE(vcd->codecs().empty());
+ cricket::VideoCodec vp8 = vcd->codecs()[0];
+ EXPECT_EQ("VP8", vp8.name);
+ EXPECT_EQ(120, vp8.id);
+ cricket::CodecParameterMap::iterator found =
+ vp8.params.find("x-google-min-bitrate");
+ ASSERT_TRUE(found != vp8.params.end());
+ EXPECT_EQ(found->second, "10");
+ found = vp8.params.find("x-google-max-quantization");
+ ASSERT_TRUE(found != vp8.params.end());
+ EXPECT_EQ(found->second, "40");
+}
+
+TEST_F(WebRtcSdpTest, DeserializePacketizationAttributeWithIllegalValue) {
+ JsepSessionDescription jdesc_output(kDummyType);
+
+ const char kSdpWithPacketizationString[] =
+ "v=0\r\n"
+ "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "m=audio 9 RTP/SAVPF 111\r\n"
+ "a=rtpmap:111 opus/48000/2\r\n"
+ "a=packetization:111 unknownpacketizationattributeforaudio\r\n"
+ "m=video 3457 RTP/SAVPF 120 121 122\r\n"
+ "a=rtpmap:120 VP8/90000\r\n"
+ "a=packetization:120 raw\r\n"
+ "a=rtpmap:121 VP9/90000\r\n"
+ "a=rtpmap:122 H264/90000\r\n"
+ "a=packetization:122 unknownpacketizationattributevalue\r\n";
+
+ SdpParseError error;
+ EXPECT_TRUE(webrtc::SdpDeserialize(kSdpWithPacketizationString, &jdesc_output,
+ &error));
+
+ AudioContentDescription* acd =
+ GetFirstAudioContentDescription(jdesc_output.description());
+ ASSERT_TRUE(acd);
+ ASSERT_THAT(acd->codecs(), testing::SizeIs(1));
+ cricket::Codec opus = acd->codecs()[0];
+ EXPECT_EQ(opus.name, "opus");
+ EXPECT_EQ(opus.id, 111);
+
+ const VideoContentDescription* vcd =
+ GetFirstVideoContentDescription(jdesc_output.description());
+ ASSERT_TRUE(vcd);
+ ASSERT_THAT(vcd->codecs(), testing::SizeIs(3));
+ cricket::VideoCodec vp8 = vcd->codecs()[0];
+ EXPECT_EQ(vp8.name, "VP8");
+ EXPECT_EQ(vp8.id, 120);
+ EXPECT_EQ(vp8.packetization, "raw");
+ cricket::VideoCodec vp9 = vcd->codecs()[1];
+ EXPECT_EQ(vp9.name, "VP9");
+ EXPECT_EQ(vp9.id, 121);
+ EXPECT_EQ(vp9.packetization, absl::nullopt);
+ cricket::VideoCodec h264 = vcd->codecs()[2];
+ EXPECT_EQ(h264.name, "H264");
+ EXPECT_EQ(h264.id, 122);
+ EXPECT_EQ(h264.packetization, absl::nullopt);
+}
+
+TEST_F(WebRtcSdpTest, SerializeAudioFmtpWithUnknownParameter) {
+ AudioContentDescription* acd = GetFirstAudioContentDescription(&desc_);
+
+ cricket::AudioCodecs codecs = acd->codecs();
+ codecs[0].params["unknown-future-parameter"] = "SomeFutureValue";
+ acd->set_codecs(codecs);
+
+ ASSERT_TRUE(jdesc_.Initialize(desc_.Clone(), jdesc_.session_id(),
+ jdesc_.session_version()));
+ std::string message = webrtc::SdpSerialize(jdesc_);
+ std::string sdp_with_fmtp = kSdpFullString;
+ InjectAfter("a=rtpmap:111 opus/48000/2\r\n",
+ "a=fmtp:111 unknown-future-parameter=SomeFutureValue\r\n",
+ &sdp_with_fmtp);
+ EXPECT_EQ(sdp_with_fmtp, message);
+}
+
+TEST_F(WebRtcSdpTest, SerializeAudioFmtpWithKnownFmtpParameter) {
+ AudioContentDescription* acd = GetFirstAudioContentDescription(&desc_);
+
+ cricket::AudioCodecs codecs = acd->codecs();
+ codecs[0].params["stereo"] = "1";
+ acd->set_codecs(codecs);
+
+ ASSERT_TRUE(jdesc_.Initialize(desc_.Clone(), jdesc_.session_id(),
+ jdesc_.session_version()));
+ std::string message = webrtc::SdpSerialize(jdesc_);
+ std::string sdp_with_fmtp = kSdpFullString;
+ InjectAfter("a=rtpmap:111 opus/48000/2\r\n", "a=fmtp:111 stereo=1\r\n",
+ &sdp_with_fmtp);
+ EXPECT_EQ(sdp_with_fmtp, message);
+}
+
+TEST_F(WebRtcSdpTest, SerializeAudioFmtpWithPTimeAndMaxPTime) {
+ AudioContentDescription* acd = GetFirstAudioContentDescription(&desc_);
+
+ cricket::AudioCodecs codecs = acd->codecs();
+ codecs[0].params["ptime"] = "20";
+ codecs[0].params["maxptime"] = "120";
+ acd->set_codecs(codecs);
+
+ ASSERT_TRUE(jdesc_.Initialize(desc_.Clone(), jdesc_.session_id(),
+ jdesc_.session_version()));
+ std::string message = webrtc::SdpSerialize(jdesc_);
+ std::string sdp_with_fmtp = kSdpFullString;
+ InjectAfter("a=rtpmap:104 ISAC/32000\r\n",
+ "a=maxptime:120\r\n" // No comma here. String merging!
+ "a=ptime:20\r\n",
+ &sdp_with_fmtp);
+ EXPECT_EQ(sdp_with_fmtp, message);
+}
+
+TEST_F(WebRtcSdpTest, SerializeAudioFmtpWithTelephoneEvent) {
+ AudioContentDescription* acd = GetFirstAudioContentDescription(&desc_);
+
+ cricket::AudioCodecs codecs = acd->codecs();
+ cricket::Codec dtmf =
+ cricket::CreateAudioCodec(105, "telephone-event", 8000, 1);
+ dtmf.params[""] = "0-15";
+ codecs.push_back(dtmf);
+ acd->set_codecs(codecs);
+
+ ASSERT_TRUE(jdesc_.Initialize(desc_.Clone(), jdesc_.session_id(),
+ jdesc_.session_version()));
+ std::string message = webrtc::SdpSerialize(jdesc_);
+ std::string sdp_with_fmtp = kSdpFullString;
+ InjectAfter("m=audio 2345 RTP/SAVPF 111 103 104", " 105", &sdp_with_fmtp);
+ InjectAfter(
+ "a=rtpmap:104 ISAC/32000\r\n",
+ "a=rtpmap:105 telephone-event/8000\r\n" // No comma here. String merging!
+ "a=fmtp:105 0-15\r\n",
+ &sdp_with_fmtp);
+ EXPECT_EQ(sdp_with_fmtp, message);
+}
+
+TEST_F(WebRtcSdpTest, SerializeVideoFmtp) {
+ VideoContentDescription* vcd = GetFirstVideoContentDescription(&desc_);
+
+ cricket::VideoCodecs codecs = vcd->codecs();
+ codecs[0].params["x-google-min-bitrate"] = "10";
+ vcd->set_codecs(codecs);
+
+ ASSERT_TRUE(jdesc_.Initialize(desc_.Clone(), jdesc_.session_id(),
+ jdesc_.session_version()));
+ std::string message = webrtc::SdpSerialize(jdesc_);
+ std::string sdp_with_fmtp = kSdpFullString;
+ InjectAfter("a=rtpmap:120 VP8/90000\r\n",
+ "a=fmtp:120 x-google-min-bitrate=10\r\n", &sdp_with_fmtp);
+ EXPECT_EQ(sdp_with_fmtp, message);
+}
+
+TEST_F(WebRtcSdpTest, SerializeVideoPacketizationAttribute) {
+ VideoContentDescription* vcd = GetFirstVideoContentDescription(&desc_);
+
+ cricket::VideoCodecs codecs = vcd->codecs();
+ codecs[0].packetization = "raw";
+ vcd->set_codecs(codecs);
+
+ ASSERT_TRUE(jdesc_.Initialize(desc_.Clone(), jdesc_.session_id(),
+ jdesc_.session_version()));
+ std::string message = webrtc::SdpSerialize(jdesc_);
+ std::string sdp_with_packetization = kSdpFullString;
+ InjectAfter("a=rtpmap:120 VP8/90000\r\n", "a=packetization:120 raw\r\n",
+ &sdp_with_packetization);
+ EXPECT_EQ(sdp_with_packetization, message);
+}
+
+TEST_F(WebRtcSdpTest, DeserializeAndSerializeSdpWithIceLite) {
+ // Deserialize the baseline description, making sure it's ICE full.
+ JsepSessionDescription jdesc_with_icelite(kDummyType);
+ std::string sdp_with_icelite = kSdpFullString;
+ EXPECT_TRUE(SdpDeserialize(sdp_with_icelite, &jdesc_with_icelite));
+ cricket::SessionDescription* desc = jdesc_with_icelite.description();
+ const cricket::TransportInfo* tinfo1 =
+ desc->GetTransportInfoByName("audio_content_name");
+ EXPECT_EQ(cricket::ICEMODE_FULL, tinfo1->description.ice_mode);
+ const cricket::TransportInfo* tinfo2 =
+ desc->GetTransportInfoByName("video_content_name");
+ EXPECT_EQ(cricket::ICEMODE_FULL, tinfo2->description.ice_mode);
+
+ // Add "a=ice-lite" and deserialize, making sure it's ICE lite.
+ InjectAfter(kSessionTime, "a=ice-lite\r\n", &sdp_with_icelite);
+ EXPECT_TRUE(SdpDeserialize(sdp_with_icelite, &jdesc_with_icelite));
+ desc = jdesc_with_icelite.description();
+ const cricket::TransportInfo* atinfo =
+ desc->GetTransportInfoByName("audio_content_name");
+ EXPECT_EQ(cricket::ICEMODE_LITE, atinfo->description.ice_mode);
+ const cricket::TransportInfo* vtinfo =
+ desc->GetTransportInfoByName("video_content_name");
+ EXPECT_EQ(cricket::ICEMODE_LITE, vtinfo->description.ice_mode);
+
+ // Now that we know deserialization works, we can use TestSerialize to test
+ // serialization.
+ TestSerialize(jdesc_with_icelite);
+}
+
+// Verifies that the candidates in the input SDP are parsed and serialized
+// correctly in the output SDP.
+TEST_F(WebRtcSdpTest, RoundTripSdpWithSctpDataChannelsWithCandidates) {
+ std::string sdp_with_data = kSdpString;
+ sdp_with_data.append(kSdpSctpDataChannelWithCandidatesString);
+ JsepSessionDescription jdesc_output(kDummyType);
+
+ EXPECT_TRUE(SdpDeserialize(sdp_with_data, &jdesc_output));
+ EXPECT_EQ(sdp_with_data, webrtc::SdpSerialize(jdesc_output));
+}
+
+TEST_F(WebRtcSdpTest, SerializeDtlsSetupAttribute) {
+ AddFingerprint();
+ TransportInfo audio_transport_info =
+ *(desc_.GetTransportInfoByName(kAudioContentName));
+ EXPECT_EQ(cricket::CONNECTIONROLE_NONE,
+ audio_transport_info.description.connection_role);
+ audio_transport_info.description.connection_role =
+ cricket::CONNECTIONROLE_ACTIVE;
+
+ TransportInfo video_transport_info =
+ *(desc_.GetTransportInfoByName(kVideoContentName));
+ EXPECT_EQ(cricket::CONNECTIONROLE_NONE,
+ video_transport_info.description.connection_role);
+ video_transport_info.description.connection_role =
+ cricket::CONNECTIONROLE_ACTIVE;
+
+ desc_.RemoveTransportInfoByName(kAudioContentName);
+ desc_.RemoveTransportInfoByName(kVideoContentName);
+
+ desc_.AddTransportInfo(audio_transport_info);
+ desc_.AddTransportInfo(video_transport_info);
+
+ ASSERT_TRUE(jdesc_.Initialize(desc_.Clone(), jdesc_.session_id(),
+ jdesc_.session_version()));
+ std::string message = webrtc::SdpSerialize(jdesc_);
+ std::string sdp_with_dtlssetup = kSdpFullString;
+
+ // Fingerprint attribute is necessary to add DTLS setup attribute.
+ InjectAfter(kAttributeIcePwdVoice, kFingerprint, &sdp_with_dtlssetup);
+ InjectAfter(kAttributeIcePwdVideo, kFingerprint, &sdp_with_dtlssetup);
+ // Now adding `setup` attribute.
+ InjectAfter(kFingerprint, "a=setup:active\r\n", &sdp_with_dtlssetup);
+ EXPECT_EQ(sdp_with_dtlssetup, message);
+}
+
+TEST_F(WebRtcSdpTest, DeserializeDtlsSetupAttributeActpass) {
+ JsepSessionDescription jdesc_with_dtlssetup(kDummyType);
+ std::string sdp_with_dtlssetup = kSdpFullString;
+ InjectAfter(kSessionTime, "a=setup:actpass\r\n", &sdp_with_dtlssetup);
+ EXPECT_TRUE(SdpDeserialize(sdp_with_dtlssetup, &jdesc_with_dtlssetup));
+ cricket::SessionDescription* desc = jdesc_with_dtlssetup.description();
+ const cricket::TransportInfo* atinfo =
+ desc->GetTransportInfoByName("audio_content_name");
+ EXPECT_EQ(cricket::CONNECTIONROLE_ACTPASS,
+ atinfo->description.connection_role);
+ const cricket::TransportInfo* vtinfo =
+ desc->GetTransportInfoByName("video_content_name");
+ EXPECT_EQ(cricket::CONNECTIONROLE_ACTPASS,
+ vtinfo->description.connection_role);
+}
+
+TEST_F(WebRtcSdpTest, DeserializeDtlsSetupAttributeActive) {
+ JsepSessionDescription jdesc_with_dtlssetup(kDummyType);
+ std::string sdp_with_dtlssetup = kSdpFullString;
+ InjectAfter(kSessionTime, "a=setup:active\r\n", &sdp_with_dtlssetup);
+ EXPECT_TRUE(SdpDeserialize(sdp_with_dtlssetup, &jdesc_with_dtlssetup));
+ cricket::SessionDescription* desc = jdesc_with_dtlssetup.description();
+ const cricket::TransportInfo* atinfo =
+ desc->GetTransportInfoByName("audio_content_name");
+ EXPECT_EQ(cricket::CONNECTIONROLE_ACTIVE,
+ atinfo->description.connection_role);
+ const cricket::TransportInfo* vtinfo =
+ desc->GetTransportInfoByName("video_content_name");
+ EXPECT_EQ(cricket::CONNECTIONROLE_ACTIVE,
+ vtinfo->description.connection_role);
+}
+TEST_F(WebRtcSdpTest, DeserializeDtlsSetupAttributePassive) {
+ JsepSessionDescription jdesc_with_dtlssetup(kDummyType);
+ std::string sdp_with_dtlssetup = kSdpFullString;
+ InjectAfter(kSessionTime, "a=setup:passive\r\n", &sdp_with_dtlssetup);
+ EXPECT_TRUE(SdpDeserialize(sdp_with_dtlssetup, &jdesc_with_dtlssetup));
+ cricket::SessionDescription* desc = jdesc_with_dtlssetup.description();
+ const cricket::TransportInfo* atinfo =
+ desc->GetTransportInfoByName("audio_content_name");
+ EXPECT_EQ(cricket::CONNECTIONROLE_PASSIVE,
+ atinfo->description.connection_role);
+ const cricket::TransportInfo* vtinfo =
+ desc->GetTransportInfoByName("video_content_name");
+ EXPECT_EQ(cricket::CONNECTIONROLE_PASSIVE,
+ vtinfo->description.connection_role);
+}
+
+// Verifies that the order of the serialized m-lines follows the order of the
+// ContentInfo in SessionDescription, and vise versa for deserialization.
+TEST_F(WebRtcSdpTest, MediaContentOrderMaintainedRoundTrip) {
+ JsepSessionDescription jdesc(kDummyType);
+ const std::string media_content_sdps[3] = {kSdpAudioString, kSdpVideoString,
+ kSdpSctpDataChannelString};
+ const cricket::MediaType media_types[3] = {cricket::MEDIA_TYPE_AUDIO,
+ cricket::MEDIA_TYPE_VIDEO,
+ cricket::MEDIA_TYPE_DATA};
+
+ // Verifies all 6 permutations.
+ for (size_t i = 0; i < 6; ++i) {
+ size_t media_content_in_sdp[3];
+ // The index of the first media content.
+ media_content_in_sdp[0] = i / 2;
+ // The index of the second media content.
+ media_content_in_sdp[1] = (media_content_in_sdp[0] + i % 2 + 1) % 3;
+ // The index of the third media content.
+ media_content_in_sdp[2] = (media_content_in_sdp[0] + (i + 1) % 2 + 1) % 3;
+
+ std::string sdp_string = kSdpSessionString;
+ for (size_t i = 0; i < 3; ++i)
+ sdp_string += media_content_sdps[media_content_in_sdp[i]];
+
+ EXPECT_TRUE(SdpDeserialize(sdp_string, &jdesc));
+ cricket::SessionDescription* desc = jdesc.description();
+ EXPECT_EQ(3u, desc->contents().size());
+
+ for (size_t i = 0; i < 3; ++i) {
+ const cricket::MediaContentDescription* mdesc =
+ desc->contents()[i].media_description();
+ EXPECT_EQ(media_types[media_content_in_sdp[i]], mdesc->type());
+ }
+
+ std::string serialized_sdp = webrtc::SdpSerialize(jdesc);
+ EXPECT_EQ(sdp_string, serialized_sdp);
+ }
+}
+
+TEST_F(WebRtcSdpTest, DeserializeBundleOnlyAttribute) {
+ MakeBundleOnlyDescription();
+ JsepSessionDescription deserialized_description(kDummyType);
+ ASSERT_TRUE(
+ SdpDeserialize(kBundleOnlySdpFullString, &deserialized_description));
+ EXPECT_TRUE(CompareSessionDescription(jdesc_, deserialized_description));
+}
+
+// The semantics of "a=bundle-only" are only defined when it's used in
+// combination with a 0 port on the m= line. We should ignore it if used with a
+// nonzero port.
+TEST_F(WebRtcSdpTest, IgnoreBundleOnlyWithNonzeroPort) {
+ // Make the base bundle-only description but unset the bundle-only flag.
+ MakeBundleOnlyDescription();
+ jdesc_.description()->contents()[1].bundle_only = false;
+
+ std::string modified_sdp = kBundleOnlySdpFullString;
+ Replace("m=video 0", "m=video 9", &modified_sdp);
+ JsepSessionDescription deserialized_description(kDummyType);
+ ASSERT_TRUE(SdpDeserialize(modified_sdp, &deserialized_description));
+ EXPECT_TRUE(CompareSessionDescription(jdesc_, deserialized_description));
+}
+
+TEST_F(WebRtcSdpTest, SerializeBundleOnlyAttribute) {
+ MakeBundleOnlyDescription();
+ TestSerialize(jdesc_);
+}
+
+TEST_F(WebRtcSdpTest, DeserializePlanBSessionDescription) {
+ MakePlanBDescription();
+
+ JsepSessionDescription deserialized_description(kDummyType);
+ EXPECT_TRUE(SdpDeserialize(kPlanBSdpFullString, &deserialized_description));
+
+ EXPECT_TRUE(CompareSessionDescription(jdesc_, deserialized_description));
+}
+
+TEST_F(WebRtcSdpTest, SerializePlanBSessionDescription) {
+ MakePlanBDescription();
+ TestSerialize(jdesc_);
+}
+
+TEST_F(WebRtcSdpTest, DeserializeUnifiedPlanSessionDescription) {
+ MakeUnifiedPlanDescription();
+
+ JsepSessionDescription deserialized_description(kDummyType);
+ EXPECT_TRUE(
+ SdpDeserialize(kUnifiedPlanSdpFullString, &deserialized_description));
+
+ EXPECT_TRUE(CompareSessionDescription(jdesc_, deserialized_description));
+}
+
+TEST_F(WebRtcSdpTest, SerializeUnifiedPlanSessionDescription) {
+ MakeUnifiedPlanDescription();
+ TestSerialize(jdesc_);
+}
+
+// This tests deserializing a Unified Plan SDP that is compatible with both
+// Unified Plan and Plan B style SDP, meaning that it contains both "a=ssrc
+// msid" lines and "a=msid " lines. It tests the case for audio/video tracks
+// with no stream ids and multiple stream ids. For parsing this, the Unified
+// Plan a=msid lines should take priority, because the Plan B style a=ssrc msid
+// lines do not support multiple stream ids and no stream ids.
+TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionSpecialMsid) {
+ // Create both msid lines for Plan B and Unified Plan support.
+ MakeUnifiedPlanDescriptionMultipleStreamIds(
+ cricket::kMsidSignalingMediaSection |
+ cricket::kMsidSignalingSsrcAttribute);
+
+ JsepSessionDescription deserialized_description(kDummyType);
+ EXPECT_TRUE(SdpDeserialize(kUnifiedPlanSdpFullStringWithSpecialMsid,
+ &deserialized_description));
+
+ EXPECT_TRUE(CompareSessionDescription(jdesc_, deserialized_description));
+ EXPECT_EQ(cricket::kMsidSignalingMediaSection |
+ cricket::kMsidSignalingSsrcAttribute,
+ deserialized_description.description()->msid_signaling());
+}
+
+// Tests the serialization of a Unified Plan SDP that is compatible for both
+// Unified Plan and Plan B style SDPs, meaning that it contains both "a=ssrc
+// msid" lines and "a=msid " lines. It tests the case for no stream ids and
+// multiple stream ids.
+TEST_F(WebRtcSdpTest, SerializeSessionDescriptionSpecialMsid) {
+ // Create both msid lines for Plan B and Unified Plan support.
+ MakeUnifiedPlanDescriptionMultipleStreamIds(
+ cricket::kMsidSignalingMediaSection |
+ cricket::kMsidSignalingSsrcAttribute);
+ std::string serialized_sdp = webrtc::SdpSerialize(jdesc_);
+ // We explicitly test that the serialized SDP string is equal to the hard
+ // coded SDP string. This is necessary, because in the parser "a=msid" lines
+ // take priority over "a=ssrc msid" lines. This means if we just used
+ // TestSerialize(), it could serialize an SDP that omits "a=ssrc msid" lines,
+ // and still pass, because the deserialized version would be the same.
+ EXPECT_EQ(kUnifiedPlanSdpFullStringWithSpecialMsid, serialized_sdp);
+}
+
+// Tests that a Unified Plan style SDP (does not contain "a=ssrc msid" lines
+// that signal stream IDs) is deserialized appropriately. It tests the case for
+// no stream ids and multiple stream ids.
+TEST_F(WebRtcSdpTest, UnifiedPlanDeserializeSessionDescriptionSpecialMsid) {
+ // Only create a=msid lines for strictly Unified Plan stream ID support.
+ MakeUnifiedPlanDescriptionMultipleStreamIds(
+ cricket::kMsidSignalingMediaSection);
+
+ JsepSessionDescription deserialized_description(kDummyType);
+ std::string unified_plan_sdp_string =
+ kUnifiedPlanSdpFullStringWithSpecialMsid;
+ RemoveSsrcMsidLinesFromSdpString(&unified_plan_sdp_string);
+ EXPECT_TRUE(
+ SdpDeserialize(unified_plan_sdp_string, &deserialized_description));
+
+ EXPECT_TRUE(CompareSessionDescription(jdesc_, deserialized_description));
+}
+
+// Tests that a Unified Plan style SDP (does not contain "a=ssrc msid" lines
+// that signal stream IDs) is serialized appropriately. It tests the case for no
+// stream ids and multiple stream ids.
+TEST_F(WebRtcSdpTest, UnifiedPlanSerializeSessionDescriptionSpecialMsid) {
+ // Only create a=msid lines for strictly Unified Plan stream ID support.
+ MakeUnifiedPlanDescriptionMultipleStreamIds(
+ cricket::kMsidSignalingMediaSection);
+
+ TestSerialize(jdesc_);
+}
+
+// This tests that a Unified Plan SDP with no a=ssrc lines is
+// serialized/deserialized appropriately. In this case the
+// MediaContentDescription will contain a StreamParams object that doesn't have
+// any SSRCs. Vice versa, this will be created upon deserializing an SDP with no
+// SSRC lines.
+TEST_F(WebRtcSdpTest, DeserializeUnifiedPlanSessionDescriptionNoSsrcSignaling) {
+ MakeUnifiedPlanDescription();
+ RemoveSsrcSignalingFromStreamParams();
+ std::string unified_plan_sdp_string = kUnifiedPlanSdpFullString;
+ RemoveSsrcLinesFromSdpString(&unified_plan_sdp_string);
+
+ JsepSessionDescription deserialized_description(kDummyType);
+ EXPECT_TRUE(
+ SdpDeserialize(unified_plan_sdp_string, &deserialized_description));
+ EXPECT_TRUE(CompareSessionDescription(jdesc_, deserialized_description));
+}
+
+TEST_F(WebRtcSdpTest, SerializeUnifiedPlanSessionDescriptionNoSsrcSignaling) {
+ MakeUnifiedPlanDescription();
+ RemoveSsrcSignalingFromStreamParams();
+
+ TestSerialize(jdesc_);
+}
+
+TEST_F(WebRtcSdpTest, EmptyDescriptionHasNoMsidSignaling) {
+ JsepSessionDescription jsep_desc(kDummyType);
+ ASSERT_TRUE(SdpDeserialize(kSdpSessionString, &jsep_desc));
+ EXPECT_EQ(0, jsep_desc.description()->msid_signaling());
+}
+
+TEST_F(WebRtcSdpTest, DataChannelOnlyHasNoMsidSignaling) {
+ JsepSessionDescription jsep_desc(kDummyType);
+ std::string sdp = kSdpSessionString;
+ sdp += kSdpSctpDataChannelString;
+ ASSERT_TRUE(SdpDeserialize(sdp, &jsep_desc));
+ EXPECT_EQ(0, jsep_desc.description()->msid_signaling());
+}
+
+TEST_F(WebRtcSdpTest, PlanBHasSsrcAttributeMsidSignaling) {
+ JsepSessionDescription jsep_desc(kDummyType);
+ ASSERT_TRUE(SdpDeserialize(kPlanBSdpFullString, &jsep_desc));
+ EXPECT_EQ(cricket::kMsidSignalingSsrcAttribute,
+ jsep_desc.description()->msid_signaling());
+}
+
+TEST_F(WebRtcSdpTest, UnifiedPlanHasMediaSectionMsidSignaling) {
+ JsepSessionDescription jsep_desc(kDummyType);
+ ASSERT_TRUE(SdpDeserialize(kUnifiedPlanSdpFullString, &jsep_desc));
+ EXPECT_EQ(cricket::kMsidSignalingMediaSection,
+ jsep_desc.description()->msid_signaling());
+}
+
+const char kMediaSectionMsidLine[] = "a=msid:local_stream_1 audio_track_id_1";
+const char kSsrcAttributeMsidLine[] =
+ "a=ssrc:1 msid:local_stream_1 audio_track_id_1";
+
+TEST_F(WebRtcSdpTest, SerializeOnlyMediaSectionMsid) {
+ jdesc_.description()->set_msid_signaling(cricket::kMsidSignalingMediaSection);
+ std::string sdp = webrtc::SdpSerialize(jdesc_);
+
+ EXPECT_NE(std::string::npos, sdp.find(kMediaSectionMsidLine));
+ EXPECT_EQ(std::string::npos, sdp.find(kSsrcAttributeMsidLine));
+}
+
+TEST_F(WebRtcSdpTest, SerializeOnlySsrcAttributeMsid) {
+ jdesc_.description()->set_msid_signaling(
+ cricket::kMsidSignalingSsrcAttribute);
+ std::string sdp = webrtc::SdpSerialize(jdesc_);
+
+ EXPECT_EQ(std::string::npos, sdp.find(kMediaSectionMsidLine));
+ EXPECT_NE(std::string::npos, sdp.find(kSsrcAttributeMsidLine));
+}
+
+TEST_F(WebRtcSdpTest, SerializeBothMediaSectionAndSsrcAttributeMsid) {
+ jdesc_.description()->set_msid_signaling(
+ cricket::kMsidSignalingMediaSection |
+ cricket::kMsidSignalingSsrcAttribute);
+ std::string sdp = webrtc::SdpSerialize(jdesc_);
+
+ EXPECT_NE(std::string::npos, sdp.find(kMediaSectionMsidLine));
+ EXPECT_NE(std::string::npos, sdp.find(kSsrcAttributeMsidLine));
+}
+
+// Regression test for integer overflow bug:
+// https://bugs.chromium.org/p/chromium/issues/detail?id=648071
+TEST_F(WebRtcSdpTest, DeserializeLargeBandwidthLimit) {
+ // Bandwidth attribute is the max signed 32-bit int, which will get
+ // multiplied by 1000 and cause int overflow if not careful.
+ static const char kSdpWithLargeBandwidth[] =
+ "v=0\r\n"
+ "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "m=video 3457 RTP/SAVPF 120\r\n"
+ "b=AS:2147483647\r\n"
+ "foo=fail\r\n";
+
+ ExpectParseFailure(std::string(kSdpWithLargeBandwidth), "foo=fail");
+}
+
+// Similar to the above, except that negative values are illegal, not just
+// error-prone as large values are.
+// https://bugs.chromium.org/p/chromium/issues/detail?id=675361
+TEST_F(WebRtcSdpTest, DeserializingNegativeBandwidthLimitFails) {
+ static const char kSdpWithNegativeBandwidth[] =
+ "v=0\r\n"
+ "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "m=video 3457 RTP/SAVPF 120\r\n"
+ "b=AS:-1000\r\n";
+
+ ExpectParseFailure(std::string(kSdpWithNegativeBandwidth), "b=AS:-1000");
+}
+
+// An exception to the above rule: a value of -1 for b=AS should just be
+// ignored, resulting in "kAutoBandwidth" in the deserialized object.
+// Applications historically may be using "b=AS:-1" to mean "no bandwidth
+// limit", but this is now what ommitting the attribute entirely will do, so
+// ignoring it will have the intended effect.
+TEST_F(WebRtcSdpTest, BandwidthLimitOfNegativeOneIgnored) {
+ static const char kSdpWithBandwidthOfNegativeOne[] =
+ "v=0\r\n"
+ "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "m=video 3457 RTP/SAVPF 120\r\n"
+ "b=AS:-1\r\n";
+
+ JsepSessionDescription jdesc_output(kDummyType);
+ EXPECT_TRUE(SdpDeserialize(kSdpWithBandwidthOfNegativeOne, &jdesc_output));
+ const VideoContentDescription* vcd =
+ GetFirstVideoContentDescription(jdesc_output.description());
+ ASSERT_TRUE(vcd);
+ EXPECT_EQ(cricket::kAutoBandwidth, vcd->bandwidth());
+}
+
+// Test that "ufrag"/"pwd" in the candidate line itself are ignored, and only
+// the "a=ice-ufrag"/"a=ice-pwd" attributes are used.
+// Regression test for:
+// https://bugs.chromium.org/p/chromium/issues/detail?id=681286
+TEST_F(WebRtcSdpTest, IceCredentialsInCandidateStringIgnored) {
+ // Important piece is "ufrag foo pwd bar".
+ static const char kSdpWithIceCredentialsInCandidateString[] =
+ "v=0\r\n"
+ "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "m=audio 9 RTP/SAVPF 111\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=rtcp:9 IN IP4 0.0.0.0\r\n"
+ "a=ice-ufrag:ufrag_voice\r\na=ice-pwd:pwd_voice\r\n"
+ "a=rtpmap:111 opus/48000/2\r\n"
+ "a=candidate:a0+B/1 1 udp 2130706432 192.168.1.5 1234 typ host "
+ "generation 2 ufrag foo pwd bar\r\n";
+
+ JsepSessionDescription jdesc_output(kDummyType);
+ EXPECT_TRUE(
+ SdpDeserialize(kSdpWithIceCredentialsInCandidateString, &jdesc_output));
+ const IceCandidateCollection* candidates = jdesc_output.candidates(0);
+ ASSERT_NE(nullptr, candidates);
+ ASSERT_EQ(1U, candidates->count());
+ cricket::Candidate c = candidates->at(0)->candidate();
+ EXPECT_EQ("ufrag_voice", c.username());
+ EXPECT_EQ("pwd_voice", c.password());
+}
+
+// Test that attribute lines "a=ice-ufrag-something"/"a=ice-pwd-something" are
+// ignored, and only the "a=ice-ufrag"/"a=ice-pwd" attributes are used.
+// Regression test for:
+// https://bugs.chromium.org/p/webrtc/issues/detail?id=9712
+TEST_F(WebRtcSdpTest, AttributeWithPartialMatchingNameIsIgnored) {
+ static const char kSdpWithFooIceCredentials[] =
+ "v=0\r\n"
+ "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "m=audio 9 RTP/SAVPF 111\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=rtcp:9 IN IP4 0.0.0.0\r\n"
+ "a=ice-ufrag-something:foo\r\na=ice-pwd-something:bar\r\n"
+ "a=ice-ufrag:ufrag_voice\r\na=ice-pwd:pwd_voice\r\n"
+ "a=rtpmap:111 opus/48000/2\r\n"
+ "a=candidate:a0+B/1 1 udp 2130706432 192.168.1.5 1234 typ host "
+ "generation 2\r\n";
+
+ JsepSessionDescription jdesc_output(kDummyType);
+ EXPECT_TRUE(SdpDeserialize(kSdpWithFooIceCredentials, &jdesc_output));
+ const IceCandidateCollection* candidates = jdesc_output.candidates(0);
+ ASSERT_NE(nullptr, candidates);
+ ASSERT_EQ(1U, candidates->count());
+ cricket::Candidate c = candidates->at(0)->candidate();
+ EXPECT_EQ("ufrag_voice", c.username());
+ EXPECT_EQ("pwd_voice", c.password());
+}
+
+// Test that SDP with an invalid port number in "a=candidate" lines is
+// rejected, without crashing.
+// Regression test for:
+// https://bugs.chromium.org/p/chromium/issues/detail?id=677029
+TEST_F(WebRtcSdpTest, DeserializeInvalidPortInCandidateAttribute) {
+ static const char kSdpWithInvalidCandidatePort[] =
+ "v=0\r\n"
+ "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "m=audio 9 RTP/SAVPF 111\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=rtcp:9 IN IP4 0.0.0.0\r\n"
+ "a=ice-ufrag:ufrag_voice\r\na=ice-pwd:pwd_voice\r\n"
+ "a=rtpmap:111 opus/48000/2\r\n"
+ "a=candidate:a0+B/1 1 udp 2130706432 192.168.1.5 12345678 typ host "
+ "generation 2 raddr 192.168.1.1 rport 87654321\r\n";
+
+ JsepSessionDescription jdesc_output(kDummyType);
+ EXPECT_FALSE(SdpDeserialize(kSdpWithInvalidCandidatePort, &jdesc_output));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeMsidAttributeWithStreamIdAndTrackId) {
+ std::string sdp =
+ "v=0\r\n"
+ "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "m=audio 9 RTP/SAVPF 111\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=rtpmap:111 opus/48000/2\r\n"
+ "a=msid:stream_id track_id\r\n";
+
+ JsepSessionDescription jdesc_output(kDummyType);
+ EXPECT_TRUE(SdpDeserialize(sdp, &jdesc_output));
+ auto stream = jdesc_output.description()
+ ->contents()[0]
+ .media_description()
+ ->streams()[0];
+ ASSERT_EQ(stream.stream_ids().size(), 1u);
+ EXPECT_EQ(stream.stream_ids()[0], "stream_id");
+ EXPECT_EQ(stream.id, "track_id");
+}
+
+TEST_F(WebRtcSdpTest, DeserializeMsidAttributeWithEmptyStreamIdAndTrackId) {
+ std::string sdp =
+ "v=0\r\n"
+ "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "m=audio 9 RTP/SAVPF 111\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=rtpmap:111 opus/48000/2\r\n"
+ "a=msid:- track_id\r\n";
+
+ JsepSessionDescription jdesc_output(kDummyType);
+ EXPECT_TRUE(SdpDeserialize(sdp, &jdesc_output));
+ auto stream = jdesc_output.description()
+ ->contents()[0]
+ .media_description()
+ ->streams()[0];
+ ASSERT_EQ(stream.stream_ids().size(), 0u);
+ EXPECT_EQ(stream.id, "track_id");
+}
+
+// Test that "a=msid" with a missing track ID is rejected and doesn't crash.
+// Regression test for:
+// https://bugs.chromium.org/p/chromium/issues/detail?id=686405
+TEST_F(WebRtcSdpTest, DeserializeMsidAttributeWithMissingTrackId) {
+ std::string sdp =
+ "v=0\r\n"
+ "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "m=audio 9 RTP/SAVPF 111\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=rtpmap:111 opus/48000/2\r\n"
+ "a=msid:stream_id \r\n";
+
+ JsepSessionDescription jdesc_output(kDummyType);
+ EXPECT_FALSE(SdpDeserialize(sdp, &jdesc_output));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeMsidAttributeWithoutColon) {
+ std::string sdp =
+ "v=0\r\n"
+ "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "m=audio 9 RTP/SAVPF 111\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=rtpmap:111 opus/48000/2\r\n"
+ "a=msid\r\n";
+
+ JsepSessionDescription jdesc_output(kDummyType);
+ EXPECT_FALSE(SdpDeserialize(sdp, &jdesc_output));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeMsidAttributeWithoutAttributes) {
+ std::string sdp =
+ "v=0\r\n"
+ "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "m=audio 9 RTP/SAVPF 111\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=rtpmap:111 opus/48000/2\r\n"
+ "a=msid:\r\n";
+
+ JsepSessionDescription jdesc_output(kDummyType);
+ EXPECT_FALSE(SdpDeserialize(sdp, &jdesc_output));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeMsidAttributeWithTooManySpaces) {
+ std::string sdp =
+ "v=0\r\n"
+ "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "m=audio 9 RTP/SAVPF 111\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=rtpmap:111 opus/48000/2\r\n"
+ "a=msid:stream_id track_id bogus\r\n";
+
+ JsepSessionDescription jdesc_output(kDummyType);
+ EXPECT_FALSE(SdpDeserialize(sdp, &jdesc_output));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeMsidAttributeWithDifferentTrackIds) {
+ std::string sdp =
+ "v=0\r\n"
+ "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "m=audio 9 RTP/SAVPF 111\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=rtpmap:111 opus/48000/2\r\n"
+ "a=msid:stream_id track_id\r\n"
+ "a=msid:stream_id2 track_id2\r\n";
+
+ JsepSessionDescription jdesc_output(kDummyType);
+ EXPECT_FALSE(SdpDeserialize(sdp, &jdesc_output));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeMsidAttributeWithoutAppData) {
+ std::string sdp =
+ "v=0\r\n"
+ "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "m=audio 9 RTP/SAVPF 111\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=rtpmap:111 opus/48000/2\r\n"
+ "a=msid:stream_id\r\n";
+
+ JsepSessionDescription jdesc_output(kDummyType);
+ EXPECT_TRUE(SdpDeserialize(sdp, &jdesc_output));
+ auto stream = jdesc_output.description()
+ ->contents()[0]
+ .media_description()
+ ->streams()[0];
+ ASSERT_EQ(stream.stream_ids().size(), 1u);
+ EXPECT_EQ(stream.stream_ids()[0], "stream_id");
+ // Track id is randomly generated.
+ EXPECT_NE(stream.id, "");
+}
+
+TEST_F(WebRtcSdpTest, DeserializeMsidAttributeWithoutAppDataTwoStreams) {
+ std::string sdp =
+ "v=0\r\n"
+ "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "m=audio 9 RTP/SAVPF 111\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=rtpmap:111 opus/48000/2\r\n"
+ "a=msid:stream_id\r\n"
+ "a=msid:stream_id2\r\n";
+
+ JsepSessionDescription jdesc_output(kDummyType);
+ EXPECT_TRUE(SdpDeserialize(sdp, &jdesc_output));
+ auto stream = jdesc_output.description()
+ ->contents()[0]
+ .media_description()
+ ->streams()[0];
+ ASSERT_EQ(stream.stream_ids().size(), 2u);
+ EXPECT_EQ(stream.stream_ids()[0], "stream_id");
+ EXPECT_EQ(stream.stream_ids()[1], "stream_id2");
+ // Track id is randomly generated.
+ EXPECT_NE(stream.id, "");
+}
+
+TEST_F(WebRtcSdpTest, DeserializeMsidAttributeWithoutAppDataDuplicate) {
+ std::string sdp =
+ "v=0\r\n"
+ "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "m=audio 9 RTP/SAVPF 111\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=rtpmap:111 opus/48000/2\r\n"
+ "a=msid:stream_id\r\n"
+ "a=msid:stream_id\r\n";
+
+ JsepSessionDescription jdesc_output(kDummyType);
+ // This is somewhat silly but accept it. Duplicates get filtered.
+ EXPECT_TRUE(SdpDeserialize(sdp, &jdesc_output));
+ auto stream = jdesc_output.description()
+ ->contents()[0]
+ .media_description()
+ ->streams()[0];
+ ASSERT_EQ(stream.stream_ids().size(), 1u);
+ EXPECT_EQ(stream.stream_ids()[0], "stream_id");
+ // Track id is randomly generated.
+ EXPECT_NE(stream.id, "");
+}
+
+TEST_F(WebRtcSdpTest, DeserializeMsidAttributeWithoutAppDataMixed) {
+ std::string sdp =
+ "v=0\r\n"
+ "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "m=audio 9 RTP/SAVPF 111\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=rtpmap:111 opus/48000/2\r\n"
+ "a=msid:stream_id\r\n"
+ "a=msid:stream_id2 track_id\r\n";
+
+ JsepSessionDescription jdesc_output(kDummyType);
+ // Mixing the syntax like this is not a good idea but we accept it
+ // and the result is the second track_id.
+ EXPECT_TRUE(SdpDeserialize(sdp, &jdesc_output));
+ auto stream = jdesc_output.description()
+ ->contents()[0]
+ .media_description()
+ ->streams()[0];
+ ASSERT_EQ(stream.stream_ids().size(), 2u);
+ EXPECT_EQ(stream.stream_ids()[0], "stream_id");
+ EXPECT_EQ(stream.stream_ids()[1], "stream_id2");
+
+ // Track id is taken from second line.
+ EXPECT_EQ(stream.id, "track_id");
+}
+
+TEST_F(WebRtcSdpTest, DeserializeMsidAttributeWithoutAppDataMixed2) {
+ std::string sdp =
+ "v=0\r\n"
+ "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "m=audio 9 RTP/SAVPF 111\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=rtpmap:111 opus/48000/2\r\n"
+ "a=msid:stream_id track_id\r\n"
+ "a=msid:stream_id2\r\n";
+
+ JsepSessionDescription jdesc_output(kDummyType);
+ // Mixing the syntax like this is not a good idea but we accept it
+ // and the result is the second track_id.
+ EXPECT_TRUE(SdpDeserialize(sdp, &jdesc_output));
+ auto stream = jdesc_output.description()
+ ->contents()[0]
+ .media_description()
+ ->streams()[0];
+ ASSERT_EQ(stream.stream_ids().size(), 2u);
+ EXPECT_EQ(stream.stream_ids()[0], "stream_id");
+ EXPECT_EQ(stream.stream_ids()[1], "stream_id2");
+
+ // Track id is taken from first line.
+ EXPECT_EQ(stream.id, "track_id");
+}
+
+TEST_F(WebRtcSdpTest, DeserializeMsidAttributeWithoutAppDataMixedNoStream) {
+ std::string sdp =
+ "v=0\r\n"
+ "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "m=audio 9 RTP/SAVPF 111\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=rtpmap:111 opus/48000/2\r\n"
+ "a=msid:stream_id\r\n"
+ "a=msid:- track_id\r\n";
+
+ JsepSessionDescription jdesc_output(kDummyType);
+ // This is somewhat undefined behavior but accept it and expect a single
+ // stream.
+ EXPECT_TRUE(SdpDeserialize(sdp, &jdesc_output));
+ auto stream = jdesc_output.description()
+ ->contents()[0]
+ .media_description()
+ ->streams()[0];
+ ASSERT_EQ(stream.stream_ids().size(), 1u);
+ EXPECT_EQ(stream.stream_ids()[0], "stream_id");
+ EXPECT_EQ(stream.id, "track_id");
+}
+
+TEST_F(WebRtcSdpTest, DeserializeMsidAttributeWithMissingStreamId) {
+ std::string sdp =
+ "v=0\r\n"
+ "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "m=audio 9 RTP/SAVPF 111\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=rtpmap:111 opus/48000/2\r\n"
+ "a=msid: track_id\r\n";
+
+ JsepSessionDescription jdesc_output(kDummyType);
+ EXPECT_FALSE(SdpDeserialize(sdp, &jdesc_output));
+}
+
+// Tests that if both session-level address and media-level address exist, use
+// the media-level address.
+TEST_F(WebRtcSdpTest, ParseConnectionData) {
+ JsepSessionDescription jsep_desc(kDummyType);
+
+ // Sesssion-level address.
+ std::string sdp = kSdpFullString;
+ InjectAfter("s=-\r\n", "c=IN IP4 192.168.0.3\r\n", &sdp);
+ EXPECT_TRUE(SdpDeserialize(sdp, &jsep_desc));
+
+ const auto& content1 = jsep_desc.description()->contents()[0];
+ EXPECT_EQ("74.125.127.126:2345",
+ content1.media_description()->connection_address().ToString());
+ const auto& content2 = jsep_desc.description()->contents()[1];
+ EXPECT_EQ("74.125.224.39:3457",
+ content2.media_description()->connection_address().ToString());
+}
+
+// Tests that the session-level connection address will be used if the media
+// level-addresses are not specified.
+TEST_F(WebRtcSdpTest, ParseConnectionDataSessionLevelOnly) {
+ JsepSessionDescription jsep_desc(kDummyType);
+
+ // Sesssion-level address.
+ std::string sdp = kSdpString;
+ InjectAfter("s=-\r\n", "c=IN IP4 192.168.0.3\r\n", &sdp);
+ // Remove the media level addresses.
+ Replace("c=IN IP4 0.0.0.0\r\n", "", &sdp);
+ Replace("c=IN IP4 0.0.0.0\r\n", "", &sdp);
+ EXPECT_TRUE(SdpDeserialize(sdp, &jsep_desc));
+
+ const auto& content1 = jsep_desc.description()->contents()[0];
+ EXPECT_EQ("192.168.0.3:9",
+ content1.media_description()->connection_address().ToString());
+ const auto& content2 = jsep_desc.description()->contents()[1];
+ EXPECT_EQ("192.168.0.3:9",
+ content2.media_description()->connection_address().ToString());
+}
+
+TEST_F(WebRtcSdpTest, ParseConnectionDataIPv6) {
+ JsepSessionDescription jsep_desc(kDummyType);
+
+ std::string sdp = kSdpString;
+ EXPECT_TRUE(SdpDeserialize(sdp, &jsep_desc));
+ Replace("m=audio 9 RTP/SAVPF 111 103 104\r\nc=IN IP4 0.0.0.0\r\n",
+ "m=audio 9 RTP/SAVPF 111 103 104\r\nc=IN IP6 "
+ "2001:0db8:85a3:0000:0000:8a2e:0370:7335\r\n",
+ &sdp);
+ Replace("m=video 9 RTP/SAVPF 120\r\nc=IN IP4 0.0.0.0\r\n",
+ "m=video 9 RTP/SAVPF 120\r\nc=IN IP6 "
+ "2001:0db8:85a3:0000:0000:8a2e:0370:7336\r\n",
+ &sdp);
+ EXPECT_TRUE(SdpDeserialize(sdp, &jsep_desc));
+ const auto& content1 = jsep_desc.description()->contents()[0];
+ EXPECT_EQ("[2001:db8:85a3::8a2e:370:7335]:9",
+ content1.media_description()->connection_address().ToString());
+ const auto& content2 = jsep_desc.description()->contents()[1];
+ EXPECT_EQ("[2001:db8:85a3::8a2e:370:7336]:9",
+ content2.media_description()->connection_address().ToString());
+}
+
+// Test that a c= line that contains a hostname connection address can be
+// parsed.
+TEST_F(WebRtcSdpTest, ParseConnectionDataWithHostnameConnectionAddress) {
+ JsepSessionDescription jsep_desc(kDummyType);
+ std::string sdp = kSdpString;
+ EXPECT_TRUE(SdpDeserialize(sdp, &jsep_desc));
+
+ sdp = kSdpString;
+ Replace("c=IN IP4 0.0.0.0\r\n", "c=IN IP4 example.local\r\n", &sdp);
+ Replace("c=IN IP4 0.0.0.0\r\n", "c=IN IP4 example.local\r\n", &sdp);
+ ASSERT_TRUE(SdpDeserialize(sdp, &jsep_desc));
+
+ ASSERT_NE(nullptr, jsep_desc.description());
+ const auto& content1 = jsep_desc.description()->contents()[0];
+ EXPECT_EQ("example.local:9",
+ content1.media_description()->connection_address().ToString());
+ const auto& content2 = jsep_desc.description()->contents()[1];
+ EXPECT_EQ("example.local:9",
+ content2.media_description()->connection_address().ToString());
+}
+
+// Test that the invalid or unsupported connection data cannot be parsed.
+TEST_F(WebRtcSdpTest, ParseConnectionDataFailure) {
+ JsepSessionDescription jsep_desc(kDummyType);
+ std::string sdp = kSdpString;
+ EXPECT_TRUE(SdpDeserialize(sdp, &jsep_desc));
+
+ // Unsupported multicast IPv4 address.
+ sdp = kSdpFullString;
+ Replace("c=IN IP4 74.125.224.39\r\n", "c=IN IP4 74.125.224.39/127\r\n", &sdp);
+ EXPECT_FALSE(SdpDeserialize(sdp, &jsep_desc));
+
+ // Unsupported multicast IPv6 address.
+ sdp = kSdpFullString;
+ Replace("c=IN IP4 74.125.224.39\r\n", "c=IN IP6 ::1/3\r\n", &sdp);
+ EXPECT_FALSE(SdpDeserialize(sdp, &jsep_desc));
+
+ // Mismatched address type.
+ sdp = kSdpFullString;
+ Replace("c=IN IP4 74.125.224.39\r\n", "c=IN IP6 74.125.224.39\r\n", &sdp);
+ EXPECT_FALSE(SdpDeserialize(sdp, &jsep_desc));
+
+ sdp = kSdpFullString;
+ Replace("c=IN IP4 74.125.224.39\r\n",
+ "c=IN IP4 2001:0db8:85a3:0000:0000:8a2e:0370:7334\r\n", &sdp);
+ EXPECT_FALSE(SdpDeserialize(sdp, &jsep_desc));
+}
+
+TEST_F(WebRtcSdpTest, SerializeAndDeserializeWithConnectionAddress) {
+ JsepSessionDescription expected_jsep(kDummyType);
+ MakeDescriptionWithoutCandidates(&expected_jsep);
+ // Serialization.
+ std::string message = webrtc::SdpSerialize(expected_jsep);
+ // Deserialization.
+ JsepSessionDescription jdesc(kDummyType);
+ EXPECT_TRUE(SdpDeserialize(message, &jdesc));
+ auto audio_desc = jdesc.description()
+ ->GetContentByName(kAudioContentName)
+ ->media_description();
+ auto video_desc = jdesc.description()
+ ->GetContentByName(kVideoContentName)
+ ->media_description();
+ EXPECT_EQ(audio_desc_->connection_address().ToString(),
+ audio_desc->connection_address().ToString());
+ EXPECT_EQ(video_desc_->connection_address().ToString(),
+ video_desc->connection_address().ToString());
+}
+
+// RFC4566 says "If a session has no meaningful name, the value "s= " SHOULD be
+// used (i.e., a single space as the session name)." So we should accept that.
+TEST_F(WebRtcSdpTest, DeserializeEmptySessionName) {
+ JsepSessionDescription jsep_desc(kDummyType);
+ std::string sdp = kSdpString;
+ Replace("s=-\r\n", "s= \r\n", &sdp);
+ EXPECT_TRUE(SdpDeserialize(sdp, &jsep_desc));
+}
+
+// Simulcast malformed input test for invalid format.
+TEST_F(WebRtcSdpTest, DeserializeSimulcastNegative_EmptyAttribute) {
+ ExpectParseFailureWithNewLines(
+ "a=ssrc:3 msid:local_stream_1 video_track_id_1\r\n", "a=simulcast:\r\n",
+ "a=simulcast:");
+}
+
+// Tests that duplicate simulcast entries in the SDP triggers a parse failure.
+TEST_F(WebRtcSdpTest, DeserializeSimulcastNegative_DuplicateAttribute) {
+ ExpectParseFailureWithNewLines(
+ "a=ssrc:3 msid:local_stream_1 video_track_id_1\r\n",
+ "a=simulcast:send 1\r\na=simulcast:recv 2\r\n", "a=simulcast:");
+}
+
+// Validates that deserialization uses the a=simulcast: attribute
+TEST_F(WebRtcSdpTest, TestDeserializeSimulcastAttribute) {
+ std::string sdp = kUnifiedPlanSdpFullStringNoSsrc;
+ sdp += "a=rid:1 send\r\n";
+ sdp += "a=rid:2 send\r\n";
+ sdp += "a=rid:3 send\r\n";
+ sdp += "a=rid:4 recv\r\n";
+ sdp += "a=rid:5 recv\r\n";
+ sdp += "a=rid:6 recv\r\n";
+ sdp += "a=simulcast:send 1,2;3 recv 4;5;6\r\n";
+ JsepSessionDescription output(kDummyType);
+ SdpParseError error;
+ EXPECT_TRUE(webrtc::SdpDeserialize(sdp, &output, &error));
+ const cricket::ContentInfos& contents = output.description()->contents();
+ const cricket::MediaContentDescription* media =
+ contents.back().media_description();
+ EXPECT_TRUE(media->HasSimulcast());
+ EXPECT_EQ(2ul, media->simulcast_description().send_layers().size());
+ EXPECT_EQ(3ul, media->simulcast_description().receive_layers().size());
+ EXPECT_FALSE(media->streams().empty());
+ const std::vector<RidDescription>& rids = media->streams()[0].rids();
+ CompareRidDescriptionIds(rids, {"1", "2", "3"});
+}
+
+// Validates that deserialization removes rids that do not appear in SDP
+TEST_F(WebRtcSdpTest, TestDeserializeSimulcastAttributeRemovesUnknownRids) {
+ std::string sdp = kUnifiedPlanSdpFullStringNoSsrc;
+ sdp += "a=rid:1 send\r\n";
+ sdp += "a=rid:3 send\r\n";
+ sdp += "a=rid:4 recv\r\n";
+ sdp += "a=simulcast:send 1,2;3 recv 4;5,6\r\n";
+ JsepSessionDescription output(kDummyType);
+ SdpParseError error;
+ EXPECT_TRUE(webrtc::SdpDeserialize(sdp, &output, &error));
+ const cricket::ContentInfos& contents = output.description()->contents();
+ const cricket::MediaContentDescription* media =
+ contents.back().media_description();
+ EXPECT_TRUE(media->HasSimulcast());
+ const SimulcastDescription& simulcast = media->simulcast_description();
+ EXPECT_EQ(2ul, simulcast.send_layers().size());
+ EXPECT_EQ(1ul, simulcast.receive_layers().size());
+
+ std::vector<SimulcastLayer> all_send_layers =
+ simulcast.send_layers().GetAllLayers();
+ EXPECT_EQ(2ul, all_send_layers.size());
+ EXPECT_EQ(0,
+ absl::c_count_if(all_send_layers, [](const SimulcastLayer& layer) {
+ return layer.rid == "2";
+ }));
+
+ std::vector<SimulcastLayer> all_receive_layers =
+ simulcast.receive_layers().GetAllLayers();
+ ASSERT_EQ(1ul, all_receive_layers.size());
+ EXPECT_EQ("4", all_receive_layers[0].rid);
+
+ EXPECT_FALSE(media->streams().empty());
+ const std::vector<RidDescription>& rids = media->streams()[0].rids();
+ CompareRidDescriptionIds(rids, {"1", "3"});
+}
+
+// Validates that Simulcast removes rids that appear in both send and receive.
+TEST_F(WebRtcSdpTest,
+ TestDeserializeSimulcastAttributeRemovesDuplicateSendReceive) {
+ std::string sdp = kUnifiedPlanSdpFullStringNoSsrc;
+ sdp += "a=rid:1 send\r\n";
+ sdp += "a=rid:2 send\r\n";
+ sdp += "a=rid:3 send\r\n";
+ sdp += "a=rid:4 recv\r\n";
+ sdp += "a=simulcast:send 1;2;3 recv 2;4\r\n";
+ JsepSessionDescription output(kDummyType);
+ SdpParseError error;
+ EXPECT_TRUE(webrtc::SdpDeserialize(sdp, &output, &error));
+ const cricket::ContentInfos& contents = output.description()->contents();
+ const cricket::MediaContentDescription* media =
+ contents.back().media_description();
+ EXPECT_TRUE(media->HasSimulcast());
+ const SimulcastDescription& simulcast = media->simulcast_description();
+ EXPECT_EQ(2ul, simulcast.send_layers().size());
+ EXPECT_EQ(1ul, simulcast.receive_layers().size());
+ EXPECT_EQ(2ul, simulcast.send_layers().GetAllLayers().size());
+ EXPECT_EQ(1ul, simulcast.receive_layers().GetAllLayers().size());
+
+ EXPECT_FALSE(media->streams().empty());
+ const std::vector<RidDescription>& rids = media->streams()[0].rids();
+ CompareRidDescriptionIds(rids, {"1", "3"});
+}
+
+// Ignores empty rid line.
+TEST_F(WebRtcSdpTest, TestDeserializeIgnoresEmptyRidLines) {
+ std::string sdp = kUnifiedPlanSdpFullStringNoSsrc;
+ sdp += "a=rid:1 send\r\n";
+ sdp += "a=rid:2 send\r\n";
+ sdp += "a=rid\r\n"; // Should ignore this line.
+ sdp += "a=rid:\r\n"; // Should ignore this line.
+ sdp += "a=simulcast:send 1;2\r\n";
+ JsepSessionDescription output(kDummyType);
+ SdpParseError error;
+ EXPECT_TRUE(webrtc::SdpDeserialize(sdp, &output, &error));
+ const cricket::ContentInfos& contents = output.description()->contents();
+ const cricket::MediaContentDescription* media =
+ contents.back().media_description();
+ EXPECT_TRUE(media->HasSimulcast());
+ const SimulcastDescription& simulcast = media->simulcast_description();
+ EXPECT_TRUE(simulcast.receive_layers().empty());
+ EXPECT_EQ(2ul, simulcast.send_layers().size());
+ EXPECT_EQ(2ul, simulcast.send_layers().GetAllLayers().size());
+
+ EXPECT_FALSE(media->streams().empty());
+ const std::vector<RidDescription>& rids = media->streams()[0].rids();
+ CompareRidDescriptionIds(rids, {"1", "2"});
+}
+
+// Ignores malformed rid lines.
+TEST_F(WebRtcSdpTest, TestDeserializeIgnoresMalformedRidLines) {
+ std::string sdp = kUnifiedPlanSdpFullStringNoSsrc;
+ sdp += "a=rid:1 send pt=\r\n"; // Should ignore this line.
+ sdp += "a=rid:2 receive\r\n"; // Should ignore this line.
+ sdp += "a=rid:3 max-width=720;pt=120\r\n"; // Should ignore this line.
+ sdp += "a=rid:4\r\n"; // Should ignore this line.
+ sdp += "a=rid:5 send\r\n";
+ sdp += "a=simulcast:send 1,2,3;4,5\r\n";
+ JsepSessionDescription output(kDummyType);
+ SdpParseError error;
+ EXPECT_TRUE(webrtc::SdpDeserialize(sdp, &output, &error));
+ const cricket::ContentInfos& contents = output.description()->contents();
+ const cricket::MediaContentDescription* media =
+ contents.back().media_description();
+ EXPECT_TRUE(media->HasSimulcast());
+ const SimulcastDescription& simulcast = media->simulcast_description();
+ EXPECT_TRUE(simulcast.receive_layers().empty());
+ EXPECT_EQ(1ul, simulcast.send_layers().size());
+ EXPECT_EQ(1ul, simulcast.send_layers().GetAllLayers().size());
+
+ EXPECT_FALSE(media->streams().empty());
+ const std::vector<RidDescription>& rids = media->streams()[0].rids();
+ CompareRidDescriptionIds(rids, {"5"});
+}
+
+// Removes RIDs that specify a different format than the m= section.
+TEST_F(WebRtcSdpTest, TestDeserializeRemovesRidsWithInvalidCodec) {
+ std::string sdp = kUnifiedPlanSdpFullStringNoSsrc;
+ sdp += "a=rid:1 send pt=121,120\r\n"; // Should remove 121 and keep RID.
+ sdp += "a=rid:2 send pt=121\r\n"; // Should remove RID altogether.
+ sdp += "a=simulcast:send 1;2\r\n";
+ JsepSessionDescription output(kDummyType);
+ SdpParseError error;
+ EXPECT_TRUE(webrtc::SdpDeserialize(sdp, &output, &error));
+ const cricket::ContentInfos& contents = output.description()->contents();
+ const cricket::MediaContentDescription* media =
+ contents.back().media_description();
+ EXPECT_TRUE(media->HasSimulcast());
+ const SimulcastDescription& simulcast = media->simulcast_description();
+ EXPECT_TRUE(simulcast.receive_layers().empty());
+ EXPECT_EQ(1ul, simulcast.send_layers().size());
+ EXPECT_EQ(1ul, simulcast.send_layers().GetAllLayers().size());
+ EXPECT_EQ("1", simulcast.send_layers()[0][0].rid);
+ EXPECT_EQ(1ul, media->streams().size());
+ const std::vector<RidDescription>& rids = media->streams()[0].rids();
+ EXPECT_EQ(1ul, rids.size());
+ EXPECT_EQ("1", rids[0].rid);
+ EXPECT_EQ(1ul, rids[0].payload_types.size());
+ EXPECT_EQ(120, rids[0].payload_types[0]);
+}
+
+// Ignores duplicate rid lines
+TEST_F(WebRtcSdpTest, TestDeserializeIgnoresDuplicateRidLines) {
+ std::string sdp = kUnifiedPlanSdpFullStringNoSsrc;
+ sdp += "a=rid:1 send\r\n";
+ sdp += "a=rid:2 send\r\n";
+ sdp += "a=rid:2 send\r\n";
+ sdp += "a=rid:3 send\r\n";
+ sdp += "a=rid:4 recv\r\n";
+ sdp += "a=simulcast:send 1,2;3 recv 4\r\n";
+ JsepSessionDescription output(kDummyType);
+ SdpParseError error;
+ EXPECT_TRUE(webrtc::SdpDeserialize(sdp, &output, &error));
+ const cricket::ContentInfos& contents = output.description()->contents();
+ const cricket::MediaContentDescription* media =
+ contents.back().media_description();
+ EXPECT_TRUE(media->HasSimulcast());
+ const SimulcastDescription& simulcast = media->simulcast_description();
+ EXPECT_EQ(2ul, simulcast.send_layers().size());
+ EXPECT_EQ(1ul, simulcast.receive_layers().size());
+ EXPECT_EQ(2ul, simulcast.send_layers().GetAllLayers().size());
+ EXPECT_EQ(1ul, simulcast.receive_layers().GetAllLayers().size());
+
+ EXPECT_FALSE(media->streams().empty());
+ const std::vector<RidDescription>& rids = media->streams()[0].rids();
+ CompareRidDescriptionIds(rids, {"1", "3"});
+}
+
+TEST_F(WebRtcSdpTest, TestDeserializeRidSendDirection) {
+ std::string sdp = kUnifiedPlanSdpFullStringNoSsrc;
+ sdp += "a=rid:1 recv\r\n";
+ sdp += "a=rid:2 recv\r\n";
+ sdp += "a=simulcast:send 1;2\r\n";
+ JsepSessionDescription output(kDummyType);
+ SdpParseError error;
+ EXPECT_TRUE(webrtc::SdpDeserialize(sdp, &output, &error));
+ const cricket::ContentInfos& contents = output.description()->contents();
+ const cricket::MediaContentDescription* media =
+ contents.back().media_description();
+ EXPECT_FALSE(media->HasSimulcast());
+}
+
+TEST_F(WebRtcSdpTest, TestDeserializeRidRecvDirection) {
+ std::string sdp = kUnifiedPlanSdpFullStringNoSsrc;
+ sdp += "a=rid:1 send\r\n";
+ sdp += "a=rid:2 send\r\n";
+ sdp += "a=simulcast:recv 1;2\r\n";
+ JsepSessionDescription output(kDummyType);
+ SdpParseError error;
+ EXPECT_TRUE(webrtc::SdpDeserialize(sdp, &output, &error));
+ const cricket::ContentInfos& contents = output.description()->contents();
+ const cricket::MediaContentDescription* media =
+ contents.back().media_description();
+ EXPECT_FALSE(media->HasSimulcast());
+}
+
+TEST_F(WebRtcSdpTest, TestDeserializeIgnoresWrongRidDirectionLines) {
+ std::string sdp = kUnifiedPlanSdpFullStringNoSsrc;
+ sdp += "a=rid:1 send\r\n";
+ sdp += "a=rid:2 send\r\n";
+ sdp += "a=rid:3 send\r\n";
+ sdp += "a=rid:4 recv\r\n";
+ sdp += "a=rid:5 recv\r\n";
+ sdp += "a=rid:6 recv\r\n";
+ sdp += "a=simulcast:send 1;5;3 recv 4;2;6\r\n";
+ JsepSessionDescription output(kDummyType);
+ SdpParseError error;
+ EXPECT_TRUE(webrtc::SdpDeserialize(sdp, &output, &error));
+ const cricket::ContentInfos& contents = output.description()->contents();
+ const cricket::MediaContentDescription* media =
+ contents.back().media_description();
+ EXPECT_TRUE(media->HasSimulcast());
+ const SimulcastDescription& simulcast = media->simulcast_description();
+ EXPECT_EQ(2ul, simulcast.send_layers().size());
+ EXPECT_EQ(2ul, simulcast.receive_layers().size());
+ EXPECT_EQ(2ul, simulcast.send_layers().GetAllLayers().size());
+ EXPECT_EQ(2ul, simulcast.receive_layers().GetAllLayers().size());
+
+ EXPECT_FALSE(media->streams().empty());
+ const std::vector<RidDescription>& rids = media->streams()[0].rids();
+ CompareRidDescriptionIds(rids, {"1", "3"});
+}
+
+// Simulcast serialization integration test.
+// This test will serialize and deserialize the description and compare.
+// More detailed tests for parsing simulcast can be found in
+// unit tests for SdpSerializer.
+TEST_F(WebRtcSdpTest, SerializeSimulcast_ComplexSerialization) {
+ MakeUnifiedPlanDescription(/* use_ssrcs = */ false);
+ auto description = jdesc_.description();
+ auto media = description->GetContentDescriptionByName(kVideoContentName3);
+ ASSERT_EQ(media->streams().size(), 1ul);
+ StreamParams& send_stream = media->mutable_streams()[0];
+ std::vector<RidDescription> send_rids;
+ send_rids.push_back(RidDescription("1", RidDirection::kSend));
+ send_rids.push_back(RidDescription("2", RidDirection::kSend));
+ send_rids.push_back(RidDescription("3", RidDirection::kSend));
+ send_rids.push_back(RidDescription("4", RidDirection::kSend));
+ send_stream.set_rids(send_rids);
+ std::vector<RidDescription> receive_rids;
+ receive_rids.push_back(RidDescription("5", RidDirection::kReceive));
+ receive_rids.push_back(RidDescription("6", RidDirection::kReceive));
+ receive_rids.push_back(RidDescription("7", RidDirection::kReceive));
+ media->set_receive_rids(receive_rids);
+
+ SimulcastDescription& simulcast = media->simulcast_description();
+ simulcast.send_layers().AddLayerWithAlternatives(
+ {SimulcastLayer("2", false), SimulcastLayer("1", true)});
+ simulcast.send_layers().AddLayerWithAlternatives(
+ {SimulcastLayer("4", false), SimulcastLayer("3", false)});
+ simulcast.receive_layers().AddLayer({SimulcastLayer("5", false)});
+ simulcast.receive_layers().AddLayer({SimulcastLayer("6", false)});
+ simulcast.receive_layers().AddLayer({SimulcastLayer("7", false)});
+
+ TestSerialize(jdesc_);
+}
+
+// Test that the content name is empty if the media section does not have an
+// a=mid line.
+TEST_F(WebRtcSdpTest, ParseNoMid) {
+ std::string sdp = kSdpString;
+ Replace("a=mid:audio_content_name\r\n", "", &sdp);
+ Replace("a=mid:video_content_name\r\n", "", &sdp);
+
+ JsepSessionDescription output(kDummyType);
+ SdpParseError error;
+ ASSERT_TRUE(webrtc::SdpDeserialize(sdp, &output, &error));
+
+ EXPECT_THAT(output.description()->contents(),
+ ElementsAre(Field("name", &cricket::ContentInfo::name, ""),
+ Field("name", &cricket::ContentInfo::name, "")));
+}
+
+TEST_F(WebRtcSdpTest, SerializeWithDefaultSctpProtocol) {
+ AddSctpDataChannel(false); // Don't use sctpmap
+ JsepSessionDescription jsep_desc(kDummyType);
+ MakeDescriptionWithoutCandidates(&jsep_desc);
+ std::string message = webrtc::SdpSerialize(jsep_desc);
+ EXPECT_NE(std::string::npos,
+ message.find(cricket::kMediaProtocolUdpDtlsSctp));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeWithAllSctpProtocols) {
+ AddSctpDataChannel(false);
+ std::string protocols[] = {cricket::kMediaProtocolDtlsSctp,
+ cricket::kMediaProtocolUdpDtlsSctp,
+ cricket::kMediaProtocolTcpDtlsSctp};
+ for (const auto& protocol : protocols) {
+ sctp_desc_->set_protocol(protocol);
+ JsepSessionDescription jsep_desc(kDummyType);
+ MakeDescriptionWithoutCandidates(&jsep_desc);
+ std::string message = webrtc::SdpSerialize(jsep_desc);
+ EXPECT_NE(std::string::npos, message.find(protocol));
+ JsepSessionDescription jsep_output(kDummyType);
+ SdpParseError error;
+ EXPECT_TRUE(webrtc::SdpDeserialize(message, &jsep_output, &error));
+ }
+}
+
+// According to https://tools.ietf.org/html/rfc5576#section-6.1, the CNAME
+// attribute is mandatory, but we relax that restriction.
+TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithoutCname) {
+ std::string sdp_without_cname = kSdpFullString;
+ Replace("a=ssrc:1 cname:stream_1_cname\r\n", "", &sdp_without_cname);
+ JsepSessionDescription new_jdesc(kDummyType);
+ EXPECT_TRUE(SdpDeserialize(sdp_without_cname, &new_jdesc));
+
+ audio_desc_->mutable_streams()[0].cname = "";
+ ASSERT_TRUE(jdesc_.Initialize(desc_.Clone(), jdesc_.session_id(),
+ jdesc_.session_version()));
+ EXPECT_TRUE(CompareSessionDescription(jdesc_, new_jdesc));
+}
+
+TEST_F(WebRtcSdpTest,
+ DeserializeSdpWithUnrecognizedApplicationProtocolRejectsSection) {
+ const char* unsupported_application_protocols[] = {
+ "bogus/RTP/", "RTP/SAVPF", "DTLS/SCTP/RTP/", "DTLS/SCTPRTP/",
+ "obviously-bogus", "UDP/TL/RTSP/SAVPF", "UDP/TL/RTSP/S"};
+
+ for (auto proto : unsupported_application_protocols) {
+ JsepSessionDescription jdesc_output(kDummyType);
+ std::string sdp = kSdpSessionString;
+ sdp.append("m=application 9 ");
+ sdp.append(proto);
+ sdp.append(" 101\r\n");
+
+ EXPECT_TRUE(SdpDeserialize(sdp, &jdesc_output));
+
+ // Make sure we actually parsed a single media section
+ ASSERT_EQ(1u, jdesc_output.description()->contents().size());
+
+ // Content is not getting parsed as sctp but instead unsupported.
+ EXPECT_EQ(nullptr, jdesc_output.description()
+ ->contents()[0]
+ .media_description()
+ ->as_sctp());
+ EXPECT_NE(nullptr, jdesc_output.description()
+ ->contents()[0]
+ .media_description()
+ ->as_unsupported());
+
+ // Reject the content
+ EXPECT_TRUE(jdesc_output.description()->contents()[0].rejected);
+ }
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSdpWithUnsupportedMediaType) {
+ std::string sdp = kSdpSessionString;
+ sdp +=
+ "m=bogus 9 RTP/SAVPF 0 8\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=mid:bogusmid\r\n";
+ sdp +=
+ "m=audio/something 9 RTP/SAVPF 0 8\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=mid:somethingmid\r\n";
+
+ JsepSessionDescription jdesc_output(kDummyType);
+ EXPECT_TRUE(SdpDeserialize(sdp, &jdesc_output));
+
+ ASSERT_EQ(2u, jdesc_output.description()->contents().size());
+ ASSERT_NE(nullptr, jdesc_output.description()
+ ->contents()[0]
+ .media_description()
+ ->as_unsupported());
+ ASSERT_NE(nullptr, jdesc_output.description()
+ ->contents()[1]
+ .media_description()
+ ->as_unsupported());
+
+ EXPECT_TRUE(jdesc_output.description()->contents()[0].rejected);
+ EXPECT_TRUE(jdesc_output.description()->contents()[1].rejected);
+
+ EXPECT_EQ(jdesc_output.description()->contents()[0].name, "bogusmid");
+ EXPECT_EQ(jdesc_output.description()->contents()[1].name, "somethingmid");
+}
+
+TEST_F(WebRtcSdpTest, MediaTypeProtocolMismatch) {
+ std::string sdp =
+ "v=0\r\n"
+ "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n";
+
+ ExpectParseFailure(std::string(sdp + "m=audio 9 UDP/DTLS/SCTP 120\r\n"),
+ "m=audio");
+ ExpectParseFailure(std::string(sdp + "m=video 9 UDP/DTLS/SCTP 120\r\n"),
+ "m=video");
+ ExpectParseFailure(std::string(sdp + "m=video 9 SOMETHING 120\r\n"),
+ "m=video");
+}
+
+// Regression test for:
+// https://bugs.chromium.org/p/chromium/issues/detail?id=1171965
+TEST_F(WebRtcSdpTest, SctpPortInUnsupportedContent) {
+ std::string sdp =
+ "v=0\r\n"
+ "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "m=o 1 DTLS/SCTP 5000\r\n"
+ "a=sctp-port\r\n";
+
+ JsepSessionDescription jdesc_output(kDummyType);
+ EXPECT_TRUE(SdpDeserialize(sdp, &jdesc_output));
+}
+
+TEST_F(WebRtcSdpTest, IllegalMidCharacterValue) {
+ std::string sdp = kSdpString;
+ // [ is an illegal token value.
+ Replace("a=mid:", "a=mid:[]", &sdp);
+ ExpectParseFailure(std::string(sdp), "a=mid:[]");
+}
+
+TEST_F(WebRtcSdpTest, MaxChannels) {
+ std::string sdp =
+ "v=0\r\n"
+ "o=- 11 22 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "m=audio 49232 RTP/AVP 108\r\n"
+ "a=rtpmap:108 ISAC/16000/512\r\n";
+
+ ExpectParseFailure(sdp, "a=rtpmap:108 ISAC/16000/512");
+}
+
+TEST_F(WebRtcSdpTest, DuplicateAudioRtpmapWithConflict) {
+ std::string sdp =
+ "v=0\r\n"
+ "o=- 11 22 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "m=audio 49232 RTP/AVP 108\r\n"
+ "a=rtpmap:108 ISAC/16000\r\n"
+ "a=rtpmap:108 G711/16000\r\n";
+
+ ExpectParseFailure(sdp, "a=rtpmap:108 G711/16000");
+}
+
+TEST_F(WebRtcSdpTest, DuplicateVideoRtpmapWithConflict) {
+ std::string sdp =
+ "v=0\r\n"
+ "o=- 11 22 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "m=video 49232 RTP/AVP 108\r\n"
+ "a=rtpmap:108 VP8/90000\r\n"
+ "a=rtpmap:108 VP9/90000\r\n";
+
+ ExpectParseFailure(sdp, "a=rtpmap:108 VP9/90000");
+}
+
+TEST_F(WebRtcSdpTest, FmtpBeforeRtpMap) {
+ std::string sdp =
+ "v=0\r\n"
+ "o=- 11 22 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "m=video 49232 RTP/AVP 108\r\n"
+ "a=fmtp:108 profile-level=1\r\n"
+ "a=rtpmap:108 VP9/90000\r\n";
+
+ JsepSessionDescription jdesc_output(kDummyType);
+ EXPECT_TRUE(SdpDeserialize(sdp, &jdesc_output));
+}
+
+TEST_F(WebRtcSdpTest, StaticallyAssignedPayloadTypeWithDifferentCasing) {
+ std::string sdp =
+ "v=0\r\n"
+ "o=- 11 22 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "m=audio 49232 RTP/AVP 18\r\n"
+ // Casing differs from statically assigned type, this should
+ // still be accepted.
+ "a=rtpmap:18 g729/8000\r\n";
+
+ JsepSessionDescription jdesc_output(kDummyType);
+ EXPECT_TRUE(SdpDeserialize(sdp, &jdesc_output));
+}
+
+// This tests parsing of SDP with unknown ssrc-specific attributes.
+TEST_F(WebRtcSdpTest, ParseIgnoreUnknownSsrcSpecificAttribute) {
+ std::string sdp = kSdpString;
+ sdp += "a=ssrc:1 mslabel:something\r\n";
+
+ JsepSessionDescription output(kDummyType);
+ SdpParseError error;
+ ASSERT_TRUE(webrtc::SdpDeserialize(sdp, &output, &error));
+}
+
+TEST_F(WebRtcSdpTest, ParseSessionLevelExtmapAttributes) {
+ std::string sdp =
+ "v=0\r\n"
+ "o=- 0 3 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "a=group:BUNDLE 0\r\n"
+ "a=fingerprint:sha-1 "
+ "4A:AD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B:19:E5:7C:AB\r\n"
+ "a=setup:actpass\r\n"
+ "a=ice-ufrag:ETEn\r\n"
+ "a=ice-pwd:OtSK0WpNtpUjkY4+86js7Z/l\r\n"
+ "a=extmap:3 "
+ "http://www.ietf.org/id/"
+ "draft-holmer-rmcat-transport-wide-cc-extensions-01\r\n"
+ "m=audio 9 UDP/TLS/RTP/SAVPF 111\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=rtcp-mux\r\n"
+ "a=sendonly\r\n"
+ "a=mid:0\r\n"
+ "a=rtpmap:111 opus/48000/2\r\n";
+ JsepSessionDescription jdesc(kDummyType);
+ EXPECT_TRUE(SdpDeserialize(sdp, &jdesc));
+ ASSERT_EQ(1u, jdesc.description()->contents().size());
+ const auto content = jdesc.description()->contents()[0];
+ const auto* audio_description = content.media_description()->as_audio();
+ ASSERT_NE(audio_description, nullptr);
+ const auto& extensions = audio_description->rtp_header_extensions();
+ ASSERT_EQ(1u, extensions.size());
+ EXPECT_EQ(extensions[0].uri,
+ "http://www.ietf.org/id/"
+ "draft-holmer-rmcat-transport-wide-cc-extensions-01");
+ EXPECT_EQ(extensions[0].id, 3);
+}
+
+TEST_F(WebRtcSdpTest, RejectSessionLevelMediaLevelExtmapMixedUsage) {
+ std::string sdp =
+ "v=0\r\n"
+ "o=- 0 3 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "a=group:BUNDLE 0\r\n"
+ "a=fingerprint:sha-1 "
+ "4A:AD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B:19:E5:7C:AB\r\n"
+ "a=setup:actpass\r\n"
+ "a=ice-ufrag:ETEn\r\n"
+ "a=ice-pwd:OtSK0WpNtpUjkY4+86js7Z/l\r\n"
+ "a=extmap:3 "
+ "http://www.ietf.org/id/"
+ "draft-holmer-rmcat-transport-wide-cc-extensions-01\r\n"
+ "m=audio 9 UDP/TLS/RTP/SAVPF 111\r\n"
+ "a=extmap:2 "
+ "http://www.webrtc.org/experiments/rtp-hdrext/abs-send-time\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=rtcp-mux\r\n"
+ "a=sendonly\r\n"
+ "a=mid:0\r\n"
+ "a=rtpmap:111 opus/48000/2\r\n";
+ JsepSessionDescription jdesc(kDummyType);
+ EXPECT_FALSE(SdpDeserialize(sdp, &jdesc));
+}
+
+TEST_F(WebRtcSdpTest, RejectDuplicateSsrcInSsrcGroup) {
+ std::string sdp =
+ "v=0\r\n"
+ "o=- 0 3 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "a=group:BUNDLE 0\r\n"
+ "a=fingerprint:sha-1 "
+ "4A:AD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B:19:E5:7C:AB\r\n"
+ "a=setup:actpass\r\n"
+ "a=ice-ufrag:ETEn\r\n"
+ "a=ice-pwd:OtSK0WpNtpUjkY4+86js7Z/l\r\n"
+ "m=video 9 UDP/TLS/RTP/SAVPF 96 97\r\n"
+ "c=IN IP4 0.0.0.0\r\n"
+ "a=rtcp-mux\r\n"
+ "a=sendonly\r\n"
+ "a=mid:0\r\n"
+ "a=rtpmap:96 VP8/90000\r\n"
+ "a=rtpmap:97 rtx/90000\r\n"
+ "a=fmtp:97 apt=96\r\n"
+ "a=ssrc-group:FID 1234 1234\r\n"
+ "a=ssrc:1234 cname:test\r\n";
+ JsepSessionDescription jdesc(kDummyType);
+ EXPECT_FALSE(SdpDeserialize(sdp, &jdesc));
+}
+
+TEST_F(WebRtcSdpTest, ExpectsTLineBeforeAttributeLine) {
+ // https://www.rfc-editor.org/rfc/rfc4566#page-9
+ // says a= attributes must come last.
+ std::string sdp =
+ "v=0\r\n"
+ "o=- 0 3 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "a=thisisnottherightplace\r\n"
+ "t=0 0\r\n";
+ JsepSessionDescription jdesc(kDummyType);
+ EXPECT_FALSE(SdpDeserialize(sdp, &jdesc));
+}
+
+TEST_F(WebRtcSdpTest, IgnoresUnknownAttributeLines) {
+ std::string sdp =
+ "v=0\r\n"
+ "o=- 0 3 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "a=somethingthatisnotunderstood\r\n";
+ JsepSessionDescription jdesc(kDummyType);
+ EXPECT_TRUE(SdpDeserialize(sdp, &jdesc));
+}
diff --git a/third_party/libwebrtc/pc/webrtc_session_description_factory.cc b/third_party/libwebrtc/pc/webrtc_session_description_factory.cc
new file mode 100644
index 0000000000..42a8da3e70
--- /dev/null
+++ b/third_party/libwebrtc/pc/webrtc_session_description_factory.cc
@@ -0,0 +1,465 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/webrtc_session_description_factory.h"
+
+#include <stddef.h>
+
+#include <queue>
+#include <string>
+#include <type_traits>
+#include <utility>
+#include <vector>
+
+#include "absl/algorithm/container.h"
+#include "absl/types/optional.h"
+#include "api/jsep.h"
+#include "api/jsep_session_description.h"
+#include "api/rtc_error.h"
+#include "api/sequence_checker.h"
+#include "pc/connection_context.h"
+#include "pc/sdp_state_provider.h"
+#include "pc/session_description.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/ssl_identity.h"
+#include "rtc_base/ssl_stream_adapter.h"
+#include "rtc_base/string_encode.h"
+#include "rtc_base/unique_id_generator.h"
+
+using cricket::MediaSessionOptions;
+using rtc::UniqueRandomIdGenerator;
+
+namespace webrtc {
+namespace {
+static const char kFailedDueToIdentityFailed[] =
+ " failed because DTLS identity request failed";
+static const char kFailedDueToSessionShutdown[] =
+ " failed because the session was shut down";
+
+static const uint64_t kInitSessionVersion = 2;
+
+// Check that each sender has a unique ID.
+static bool ValidMediaSessionOptions(
+ const cricket::MediaSessionOptions& session_options) {
+ std::vector<cricket::SenderOptions> sorted_senders;
+ for (const cricket::MediaDescriptionOptions& media_description_options :
+ session_options.media_description_options) {
+ sorted_senders.insert(sorted_senders.end(),
+ media_description_options.sender_options.begin(),
+ media_description_options.sender_options.end());
+ }
+ absl::c_sort(sorted_senders, [](const cricket::SenderOptions& sender1,
+ const cricket::SenderOptions& sender2) {
+ return sender1.track_id < sender2.track_id;
+ });
+ return absl::c_adjacent_find(sorted_senders,
+ [](const cricket::SenderOptions& sender1,
+ const cricket::SenderOptions& sender2) {
+ return sender1.track_id == sender2.track_id;
+ }) == sorted_senders.end();
+}
+} // namespace
+
+// static
+void WebRtcSessionDescriptionFactory::CopyCandidatesFromSessionDescription(
+ const SessionDescriptionInterface* source_desc,
+ const std::string& content_name,
+ SessionDescriptionInterface* dest_desc) {
+ if (!source_desc) {
+ return;
+ }
+ const cricket::ContentInfos& contents =
+ source_desc->description()->contents();
+ const cricket::ContentInfo* cinfo =
+ source_desc->description()->GetContentByName(content_name);
+ if (!cinfo) {
+ return;
+ }
+ size_t mediasection_index = static_cast<int>(cinfo - &contents[0]);
+ const IceCandidateCollection* source_candidates =
+ source_desc->candidates(mediasection_index);
+ const IceCandidateCollection* dest_candidates =
+ dest_desc->candidates(mediasection_index);
+ if (!source_candidates || !dest_candidates) {
+ return;
+ }
+ for (size_t n = 0; n < source_candidates->count(); ++n) {
+ const IceCandidateInterface* new_candidate = source_candidates->at(n);
+ if (!dest_candidates->HasCandidate(new_candidate)) {
+ dest_desc->AddCandidate(source_candidates->at(n));
+ }
+ }
+}
+
+WebRtcSessionDescriptionFactory::WebRtcSessionDescriptionFactory(
+ ConnectionContext* context,
+ const SdpStateProvider* sdp_info,
+ const std::string& session_id,
+ bool dtls_enabled,
+ std::unique_ptr<rtc::RTCCertificateGeneratorInterface> cert_generator,
+ rtc::scoped_refptr<rtc::RTCCertificate> certificate,
+ std::function<void(const rtc::scoped_refptr<rtc::RTCCertificate>&)>
+ on_certificate_ready,
+ const FieldTrialsView& field_trials)
+ : signaling_thread_(context->signaling_thread()),
+ transport_desc_factory_(field_trials),
+ session_desc_factory_(context->media_engine(),
+ context->use_rtx(),
+ context->ssrc_generator(),
+ &transport_desc_factory_),
+ // RFC 4566 suggested a Network Time Protocol (NTP) format timestamp
+ // as the session id and session version. To simplify, it should be fine
+ // to just use a random number as session id and start version from
+ // `kInitSessionVersion`.
+ session_version_(kInitSessionVersion),
+ cert_generator_(dtls_enabled ? std::move(cert_generator) : nullptr),
+ sdp_info_(sdp_info),
+ session_id_(session_id),
+ certificate_request_state_(CERTIFICATE_NOT_NEEDED),
+ on_certificate_ready_(on_certificate_ready) {
+ RTC_DCHECK(signaling_thread_);
+
+ if (!dtls_enabled) {
+ SetSdesPolicy(cricket::SEC_REQUIRED);
+ RTC_LOG(LS_VERBOSE) << "DTLS-SRTP disabled.";
+ return;
+ }
+
+ // SRTP-SDES is disabled if DTLS is on.
+ SetSdesPolicy(cricket::SEC_DISABLED);
+ if (certificate) {
+ // Use `certificate`.
+ certificate_request_state_ = CERTIFICATE_WAITING;
+
+ RTC_LOG(LS_VERBOSE) << "DTLS-SRTP enabled; has certificate parameter.";
+ RTC_LOG(LS_INFO) << "Using certificate supplied to the constructor.";
+ SetCertificate(certificate);
+ } else {
+ // Generate certificate.
+ certificate_request_state_ = CERTIFICATE_WAITING;
+
+ auto callback = [weak_ptr = weak_factory_.GetWeakPtr()](
+ rtc::scoped_refptr<rtc::RTCCertificate> certificate) {
+ if (!weak_ptr) {
+ return;
+ }
+ if (certificate) {
+ weak_ptr->SetCertificate(std::move(certificate));
+ } else {
+ weak_ptr->OnCertificateRequestFailed();
+ }
+ };
+
+ rtc::KeyParams key_params = rtc::KeyParams();
+ RTC_LOG(LS_VERBOSE)
+ << "DTLS-SRTP enabled; sending DTLS identity request (key type: "
+ << key_params.type() << ").";
+
+ // Request certificate. This happens asynchronously on a different thread.
+ cert_generator_->GenerateCertificateAsync(key_params, absl::nullopt,
+ std::move(callback));
+ }
+}
+
+WebRtcSessionDescriptionFactory::~WebRtcSessionDescriptionFactory() {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+
+ // Fail any requests that were asked for before identity generation completed.
+ FailPendingRequests(kFailedDueToSessionShutdown);
+
+ // Process all pending notifications. If we don't do this, requests will
+ // linger and not know they succeeded or failed.
+ // All tasks that suppose to run them are protected with weak_factory_ and
+ // will be cancelled. If we don't protect them, they might trigger after peer
+ // connection is destroyed, which might be surprising.
+ while (!callbacks_.empty()) {
+ std::move(callbacks_.front())();
+ callbacks_.pop();
+ }
+}
+
+void WebRtcSessionDescriptionFactory::CreateOffer(
+ CreateSessionDescriptionObserver* observer,
+ const PeerConnectionInterface::RTCOfferAnswerOptions& options,
+ const cricket::MediaSessionOptions& session_options) {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ std::string error = "CreateOffer";
+ if (certificate_request_state_ == CERTIFICATE_FAILED) {
+ error += kFailedDueToIdentityFailed;
+ PostCreateSessionDescriptionFailed(
+ observer, RTCError(RTCErrorType::INTERNAL_ERROR, std::move(error)));
+ return;
+ }
+
+ if (!ValidMediaSessionOptions(session_options)) {
+ error += " called with invalid session options";
+ PostCreateSessionDescriptionFailed(
+ observer, RTCError(RTCErrorType::INTERNAL_ERROR, std::move(error)));
+ return;
+ }
+
+ CreateSessionDescriptionRequest request(
+ CreateSessionDescriptionRequest::kOffer, observer, session_options);
+ if (certificate_request_state_ == CERTIFICATE_WAITING) {
+ create_session_description_requests_.push(request);
+ } else {
+ RTC_DCHECK(certificate_request_state_ == CERTIFICATE_SUCCEEDED ||
+ certificate_request_state_ == CERTIFICATE_NOT_NEEDED);
+ InternalCreateOffer(request);
+ }
+}
+
+void WebRtcSessionDescriptionFactory::CreateAnswer(
+ CreateSessionDescriptionObserver* observer,
+ const cricket::MediaSessionOptions& session_options) {
+ std::string error = "CreateAnswer";
+ if (certificate_request_state_ == CERTIFICATE_FAILED) {
+ error += kFailedDueToIdentityFailed;
+ PostCreateSessionDescriptionFailed(
+ observer, RTCError(RTCErrorType::INTERNAL_ERROR, std::move(error)));
+ return;
+ }
+ if (!sdp_info_->remote_description()) {
+ error += " can't be called before SetRemoteDescription.";
+ PostCreateSessionDescriptionFailed(
+ observer, RTCError(RTCErrorType::INTERNAL_ERROR, std::move(error)));
+ return;
+ }
+ if (sdp_info_->remote_description()->GetType() != SdpType::kOffer) {
+ error += " failed because remote_description is not an offer.";
+ PostCreateSessionDescriptionFailed(
+ observer, RTCError(RTCErrorType::INTERNAL_ERROR, std::move(error)));
+ return;
+ }
+
+ if (!ValidMediaSessionOptions(session_options)) {
+ error += " called with invalid session options.";
+ PostCreateSessionDescriptionFailed(
+ observer, RTCError(RTCErrorType::INTERNAL_ERROR, std::move(error)));
+ return;
+ }
+
+ CreateSessionDescriptionRequest request(
+ CreateSessionDescriptionRequest::kAnswer, observer, session_options);
+ if (certificate_request_state_ == CERTIFICATE_WAITING) {
+ create_session_description_requests_.push(request);
+ } else {
+ RTC_DCHECK(certificate_request_state_ == CERTIFICATE_SUCCEEDED ||
+ certificate_request_state_ == CERTIFICATE_NOT_NEEDED);
+ InternalCreateAnswer(request);
+ }
+}
+
+void WebRtcSessionDescriptionFactory::SetSdesPolicy(
+ cricket::SecurePolicy secure_policy) {
+ session_desc_factory_.set_secure(secure_policy);
+}
+
+cricket::SecurePolicy WebRtcSessionDescriptionFactory::SdesPolicy() const {
+ return session_desc_factory_.secure();
+}
+
+void WebRtcSessionDescriptionFactory::InternalCreateOffer(
+ CreateSessionDescriptionRequest request) {
+ if (sdp_info_->local_description()) {
+ // If the needs-ice-restart flag is set as described by JSEP, we should
+ // generate an offer with a new ufrag/password to trigger an ICE restart.
+ for (cricket::MediaDescriptionOptions& options :
+ request.options.media_description_options) {
+ if (sdp_info_->NeedsIceRestart(options.mid)) {
+ options.transport_options.ice_restart = true;
+ }
+ }
+ }
+
+ auto result = session_desc_factory_.CreateOfferOrError(
+ request.options, sdp_info_->local_description()
+ ? sdp_info_->local_description()->description()
+ : nullptr);
+ if (!result.ok()) {
+ PostCreateSessionDescriptionFailed(request.observer.get(), result.error());
+ return;
+ }
+ std::unique_ptr<cricket::SessionDescription> desc = std::move(result.value());
+ RTC_CHECK(desc);
+
+ // RFC 3264
+ // When issuing an offer that modifies the session,
+ // the "o=" line of the new SDP MUST be identical to that in the
+ // previous SDP, except that the version in the origin field MUST
+ // increment by one from the previous SDP.
+
+ // Just increase the version number by one each time when a new offer
+ // is created regardless if it's identical to the previous one or not.
+ // The `session_version_` is a uint64_t, the wrap around should not happen.
+ RTC_DCHECK(session_version_ + 1 > session_version_);
+ auto offer = std::make_unique<JsepSessionDescription>(
+ SdpType::kOffer, std::move(desc), session_id_,
+ rtc::ToString(session_version_++));
+ if (sdp_info_->local_description()) {
+ for (const cricket::MediaDescriptionOptions& options :
+ request.options.media_description_options) {
+ if (!options.transport_options.ice_restart) {
+ CopyCandidatesFromSessionDescription(sdp_info_->local_description(),
+ options.mid, offer.get());
+ }
+ }
+ }
+ PostCreateSessionDescriptionSucceeded(request.observer.get(),
+ std::move(offer));
+}
+
+void WebRtcSessionDescriptionFactory::InternalCreateAnswer(
+ CreateSessionDescriptionRequest request) {
+ if (sdp_info_->remote_description()) {
+ for (cricket::MediaDescriptionOptions& options :
+ request.options.media_description_options) {
+ // According to http://tools.ietf.org/html/rfc5245#section-9.2.1.1
+ // an answer should also contain new ICE ufrag and password if an offer
+ // has been received with new ufrag and password.
+ options.transport_options.ice_restart =
+ sdp_info_->IceRestartPending(options.mid);
+ // We should pass the current DTLS role to the transport description
+ // factory, if there is already an existing ongoing session.
+ absl::optional<rtc::SSLRole> dtls_role =
+ sdp_info_->GetDtlsRole(options.mid);
+ if (dtls_role) {
+ options.transport_options.prefer_passive_role =
+ (rtc::SSL_SERVER == *dtls_role);
+ }
+ }
+ }
+
+ auto result = session_desc_factory_.CreateAnswerOrError(
+ sdp_info_->remote_description()
+ ? sdp_info_->remote_description()->description()
+ : nullptr,
+ request.options,
+ sdp_info_->local_description()
+ ? sdp_info_->local_description()->description()
+ : nullptr);
+ if (!result.ok()) {
+ PostCreateSessionDescriptionFailed(request.observer.get(), result.error());
+ return;
+ }
+ std::unique_ptr<cricket::SessionDescription> desc = std::move(result.value());
+ RTC_CHECK(desc);
+
+ // RFC 3264
+ // If the answer is different from the offer in any way (different IP
+ // addresses, ports, etc.), the origin line MUST be different in the answer.
+ // In that case, the version number in the "o=" line of the answer is
+ // unrelated to the version number in the o line of the offer.
+ // Get a new version number by increasing the `session_version_answer_`.
+ // The `session_version_` is a uint64_t, the wrap around should not happen.
+ RTC_DCHECK(session_version_ + 1 > session_version_);
+ auto answer = std::make_unique<JsepSessionDescription>(
+ SdpType::kAnswer, std::move(desc), session_id_,
+ rtc::ToString(session_version_++));
+ if (sdp_info_->local_description()) {
+ // Include all local ICE candidates in the SessionDescription unless
+ // the remote peer has requested an ICE restart.
+ for (const cricket::MediaDescriptionOptions& options :
+ request.options.media_description_options) {
+ if (!options.transport_options.ice_restart) {
+ CopyCandidatesFromSessionDescription(sdp_info_->local_description(),
+ options.mid, answer.get());
+ }
+ }
+ }
+ PostCreateSessionDescriptionSucceeded(request.observer.get(),
+ std::move(answer));
+}
+
+void WebRtcSessionDescriptionFactory::FailPendingRequests(
+ const std::string& reason) {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ while (!create_session_description_requests_.empty()) {
+ const CreateSessionDescriptionRequest& request =
+ create_session_description_requests_.front();
+ PostCreateSessionDescriptionFailed(
+ request.observer.get(),
+ RTCError(RTCErrorType::INTERNAL_ERROR,
+ ((request.type == CreateSessionDescriptionRequest::kOffer)
+ ? "CreateOffer"
+ : "CreateAnswer") +
+ reason));
+ create_session_description_requests_.pop();
+ }
+}
+
+void WebRtcSessionDescriptionFactory::PostCreateSessionDescriptionFailed(
+ CreateSessionDescriptionObserver* observer,
+ RTCError error) {
+ Post([observer =
+ rtc::scoped_refptr<CreateSessionDescriptionObserver>(observer),
+ error]() mutable { observer->OnFailure(error); });
+ RTC_LOG(LS_ERROR) << "CreateSessionDescription failed: " << error.message();
+}
+
+void WebRtcSessionDescriptionFactory::PostCreateSessionDescriptionSucceeded(
+ CreateSessionDescriptionObserver* observer,
+ std::unique_ptr<SessionDescriptionInterface> description) {
+ Post([observer =
+ rtc::scoped_refptr<CreateSessionDescriptionObserver>(observer),
+ description = std::move(description)]() mutable {
+ observer->OnSuccess(description.release());
+ });
+}
+
+void WebRtcSessionDescriptionFactory::Post(
+ absl::AnyInvocable<void() &&> callback) {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ callbacks_.push(std::move(callback));
+ signaling_thread_->PostTask([weak_ptr = weak_factory_.GetWeakPtr()] {
+ if (weak_ptr) {
+ auto& callbacks = weak_ptr->callbacks_;
+ // Callbacks are pushed from the same thread, thus this task should
+ // corresond to the first entry in the queue.
+ RTC_DCHECK(!callbacks.empty());
+ std::move(callbacks.front())();
+ callbacks.pop();
+ }
+ });
+}
+
+void WebRtcSessionDescriptionFactory::OnCertificateRequestFailed() {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+
+ RTC_LOG(LS_ERROR) << "Asynchronous certificate generation request failed.";
+ certificate_request_state_ = CERTIFICATE_FAILED;
+
+ FailPendingRequests(kFailedDueToIdentityFailed);
+}
+
+void WebRtcSessionDescriptionFactory::SetCertificate(
+ rtc::scoped_refptr<rtc::RTCCertificate> certificate) {
+ RTC_DCHECK(certificate);
+ RTC_LOG(LS_VERBOSE) << "Setting new certificate.";
+
+ certificate_request_state_ = CERTIFICATE_SUCCEEDED;
+
+ on_certificate_ready_(certificate);
+
+ transport_desc_factory_.set_certificate(std::move(certificate));
+ transport_desc_factory_.set_secure(cricket::SEC_ENABLED);
+
+ while (!create_session_description_requests_.empty()) {
+ if (create_session_description_requests_.front().type ==
+ CreateSessionDescriptionRequest::kOffer) {
+ InternalCreateOffer(create_session_description_requests_.front());
+ } else {
+ InternalCreateAnswer(create_session_description_requests_.front());
+ }
+ create_session_description_requests_.pop();
+ }
+}
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/webrtc_session_description_factory.h b/third_party/libwebrtc/pc/webrtc_session_description_factory.h
new file mode 100644
index 0000000000..22ead41d9b
--- /dev/null
+++ b/third_party/libwebrtc/pc/webrtc_session_description_factory.h
@@ -0,0 +1,151 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_WEBRTC_SESSION_DESCRIPTION_FACTORY_H_
+#define PC_WEBRTC_SESSION_DESCRIPTION_FACTORY_H_
+
+#include <stdint.h>
+
+#include <functional>
+#include <memory>
+#include <queue>
+#include <string>
+
+#include "absl/functional/any_invocable.h"
+#include "api/jsep.h"
+#include "api/peer_connection_interface.h"
+#include "api/scoped_refptr.h"
+#include "api/task_queue/task_queue_base.h"
+#include "p2p/base/transport_description.h"
+#include "p2p/base/transport_description_factory.h"
+#include "pc/media_session.h"
+#include "pc/sdp_state_provider.h"
+#include "rtc_base/rtc_certificate.h"
+#include "rtc_base/rtc_certificate_generator.h"
+#include "rtc_base/unique_id_generator.h"
+#include "rtc_base/weak_ptr.h"
+
+namespace webrtc {
+// This class is used to create offer/answer session description. Certificates
+// for WebRtcSession/DTLS are either supplied at construction or generated
+// asynchronously. It queues the create offer/answer request until the
+// certificate generation has completed, i.e. when OnCertificateRequestFailed or
+// OnCertificateReady is called.
+class WebRtcSessionDescriptionFactory {
+ public:
+ // Can specify either a `cert_generator` or `certificate` to enable DTLS. If
+ // a certificate generator is given, starts generating the certificate
+ // asynchronously. If a certificate is given, will use that for identifying
+ // over DTLS. If neither is specified, DTLS is disabled.
+ WebRtcSessionDescriptionFactory(
+ ConnectionContext* context,
+ const SdpStateProvider* sdp_info,
+ const std::string& session_id,
+ bool dtls_enabled,
+ std::unique_ptr<rtc::RTCCertificateGeneratorInterface> cert_generator,
+ rtc::scoped_refptr<rtc::RTCCertificate> certificate,
+ std::function<void(const rtc::scoped_refptr<rtc::RTCCertificate>&)>
+ on_certificate_ready,
+ const FieldTrialsView& field_trials);
+ ~WebRtcSessionDescriptionFactory();
+
+ WebRtcSessionDescriptionFactory(const WebRtcSessionDescriptionFactory&) =
+ delete;
+ WebRtcSessionDescriptionFactory& operator=(
+ const WebRtcSessionDescriptionFactory&) = delete;
+
+ static void CopyCandidatesFromSessionDescription(
+ const SessionDescriptionInterface* source_desc,
+ const std::string& content_name,
+ SessionDescriptionInterface* dest_desc);
+
+ void CreateOffer(
+ CreateSessionDescriptionObserver* observer,
+ const PeerConnectionInterface::RTCOfferAnswerOptions& options,
+ const cricket::MediaSessionOptions& session_options);
+ void CreateAnswer(CreateSessionDescriptionObserver* observer,
+ const cricket::MediaSessionOptions& session_options);
+
+ void SetSdesPolicy(cricket::SecurePolicy secure_policy);
+ cricket::SecurePolicy SdesPolicy() const;
+
+ void set_enable_encrypted_rtp_header_extensions(bool enable) {
+ session_desc_factory_.set_enable_encrypted_rtp_header_extensions(enable);
+ }
+
+ void set_is_unified_plan(bool is_unified_plan) {
+ session_desc_factory_.set_is_unified_plan(is_unified_plan);
+ }
+
+ // For testing.
+ bool waiting_for_certificate_for_testing() const {
+ return certificate_request_state_ == CERTIFICATE_WAITING;
+ }
+
+ private:
+ enum CertificateRequestState {
+ CERTIFICATE_NOT_NEEDED,
+ CERTIFICATE_WAITING,
+ CERTIFICATE_SUCCEEDED,
+ CERTIFICATE_FAILED,
+ };
+
+ struct CreateSessionDescriptionRequest {
+ enum Type {
+ kOffer,
+ kAnswer,
+ };
+
+ CreateSessionDescriptionRequest(Type type,
+ CreateSessionDescriptionObserver* observer,
+ const cricket::MediaSessionOptions& options)
+ : type(type), observer(observer), options(options) {}
+
+ Type type;
+ rtc::scoped_refptr<CreateSessionDescriptionObserver> observer;
+ cricket::MediaSessionOptions options;
+ };
+
+ void InternalCreateOffer(CreateSessionDescriptionRequest request);
+ void InternalCreateAnswer(CreateSessionDescriptionRequest request);
+ // Posts failure notifications for all pending session description requests.
+ void FailPendingRequests(const std::string& reason);
+ void PostCreateSessionDescriptionFailed(
+ CreateSessionDescriptionObserver* observer,
+ RTCError error);
+ void PostCreateSessionDescriptionSucceeded(
+ CreateSessionDescriptionObserver* observer,
+ std::unique_ptr<SessionDescriptionInterface> description);
+ // Posts `callback` to `signaling_thread_`, and ensures it will be called no
+ // later than in the destructor.
+ void Post(absl::AnyInvocable<void() &&> callback);
+
+ void OnCertificateRequestFailed();
+ void SetCertificate(rtc::scoped_refptr<rtc::RTCCertificate> certificate);
+
+ std::queue<CreateSessionDescriptionRequest>
+ create_session_description_requests_;
+ TaskQueueBase* const signaling_thread_;
+ cricket::TransportDescriptionFactory transport_desc_factory_;
+ cricket::MediaSessionDescriptionFactory session_desc_factory_;
+ uint64_t session_version_;
+ const std::unique_ptr<rtc::RTCCertificateGeneratorInterface> cert_generator_;
+ const SdpStateProvider* sdp_info_;
+ const std::string session_id_;
+ CertificateRequestState certificate_request_state_;
+ std::queue<absl::AnyInvocable<void() &&>> callbacks_;
+
+ std::function<void(const rtc::scoped_refptr<rtc::RTCCertificate>&)>
+ on_certificate_ready_;
+ rtc::WeakPtrFactory<WebRtcSessionDescriptionFactory> weak_factory_{this};
+};
+} // namespace webrtc
+
+#endif // PC_WEBRTC_SESSION_DESCRIPTION_FACTORY_H_