summaryrefslogtreecommitdiffstats
path: root/third_party/libwebrtc/pc/test
diff options
context:
space:
mode:
Diffstat (limited to 'third_party/libwebrtc/pc/test')
-rw-r--r--third_party/libwebrtc/pc/test/DEPS5
-rw-r--r--third_party/libwebrtc/pc/test/android_test_initializer.cc51
-rw-r--r--third_party/libwebrtc/pc/test/android_test_initializer.h20
-rw-r--r--third_party/libwebrtc/pc/test/fake_audio_capture_module.cc521
-rw-r--r--third_party/libwebrtc/pc/test/fake_audio_capture_module.h235
-rw-r--r--third_party/libwebrtc/pc/test/fake_audio_capture_module_unittest.cc198
-rw-r--r--third_party/libwebrtc/pc/test/fake_data_channel_controller.h160
-rw-r--r--third_party/libwebrtc/pc/test/fake_peer_connection_base.h379
-rw-r--r--third_party/libwebrtc/pc/test/fake_peer_connection_for_stats.h512
-rw-r--r--third_party/libwebrtc/pc/test/fake_periodic_video_source.h101
-rw-r--r--third_party/libwebrtc/pc/test/fake_periodic_video_track_source.h45
-rw-r--r--third_party/libwebrtc/pc/test/fake_rtc_certificate_generator.h222
-rw-r--r--third_party/libwebrtc/pc/test/fake_video_track_renderer.h33
-rw-r--r--third_party/libwebrtc/pc/test/fake_video_track_source.h54
-rw-r--r--third_party/libwebrtc/pc/test/frame_generator_capturer_video_track_source.h86
-rw-r--r--third_party/libwebrtc/pc/test/integration_test_helpers.cc92
-rw-r--r--third_party/libwebrtc/pc/test/integration_test_helpers.h1929
-rw-r--r--third_party/libwebrtc/pc/test/mock_channel_interface.h73
-rw-r--r--third_party/libwebrtc/pc/test/mock_data_channel.h72
-rw-r--r--third_party/libwebrtc/pc/test/mock_peer_connection_internal.h332
-rw-r--r--third_party/libwebrtc/pc/test/mock_peer_connection_observers.h599
-rw-r--r--third_party/libwebrtc/pc/test/mock_rtp_receiver_internal.h82
-rw-r--r--third_party/libwebrtc/pc/test/mock_rtp_sender_internal.h109
-rw-r--r--third_party/libwebrtc/pc/test/mock_voice_media_channel.h163
-rw-r--r--third_party/libwebrtc/pc/test/peer_connection_test_wrapper.cc356
-rw-r--r--third_party/libwebrtc/pc/test/peer_connection_test_wrapper.h135
-rw-r--r--third_party/libwebrtc/pc/test/rtc_stats_obtainer.h55
-rw-r--r--third_party/libwebrtc/pc/test/rtp_transport_test_util.h78
-rw-r--r--third_party/libwebrtc/pc/test/srtp_test_util.h45
-rw-r--r--third_party/libwebrtc/pc/test/svc_e2e_tests.cc507
-rw-r--r--third_party/libwebrtc/pc/test/test_sdp_strings.h184
31 files changed, 7433 insertions, 0 deletions
diff --git a/third_party/libwebrtc/pc/test/DEPS b/third_party/libwebrtc/pc/test/DEPS
new file mode 100644
index 0000000000..33e6d94b25
--- /dev/null
+++ b/third_party/libwebrtc/pc/test/DEPS
@@ -0,0 +1,5 @@
+include_rules = [
+ # Allow include of sdk/android to allow accessing the JVM and Env in tests.
+ "+sdk/android",
+ "+modules/utility/include/jvm_android.h",
+]
diff --git a/third_party/libwebrtc/pc/test/android_test_initializer.cc b/third_party/libwebrtc/pc/test/android_test_initializer.cc
new file mode 100644
index 0000000000..963544cb4b
--- /dev/null
+++ b/third_party/libwebrtc/pc/test/android_test_initializer.cc
@@ -0,0 +1,51 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/test/android_test_initializer.h"
+
+#include <jni.h>
+#include <pthread.h>
+#include <stddef.h>
+
+#include "modules/utility/include/jvm_android.h"
+#include "rtc_base/checks.h"
+#include "sdk/android/src/jni/jvm.h"
+// TODO(phoglund): This include is to a target we can't really depend on.
+// We need to either break it out into a smaller target or find some way to
+// not use it.
+#include "rtc_base/ssl_adapter.h"
+
+namespace webrtc {
+
+namespace {
+
+static pthread_once_t g_initialize_once = PTHREAD_ONCE_INIT;
+
+// There can only be one JNI_OnLoad in each binary. So since this is a GTEST
+// C++ runner binary, we want to initialize the same global objects we normally
+// do if this had been a Java binary.
+void EnsureInitializedOnce() {
+ RTC_CHECK(::webrtc::jni::GetJVM() != nullptr);
+ JNIEnv* jni = ::webrtc::jni::AttachCurrentThreadIfNeeded();
+ JavaVM* jvm = NULL;
+ RTC_CHECK_EQ(0, jni->GetJavaVM(&jvm));
+
+ RTC_CHECK(rtc::InitializeSSL()) << "Failed to InitializeSSL()";
+
+ webrtc::JVM::Initialize(jvm);
+}
+
+} // anonymous namespace
+
+void InitializeAndroidObjects() {
+ RTC_CHECK_EQ(0, pthread_once(&g_initialize_once, &EnsureInitializedOnce));
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/test/android_test_initializer.h b/third_party/libwebrtc/pc/test/android_test_initializer.h
new file mode 100644
index 0000000000..4181dd286c
--- /dev/null
+++ b/third_party/libwebrtc/pc/test/android_test_initializer.h
@@ -0,0 +1,20 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_TEST_ANDROID_TEST_INITIALIZER_H_
+#define PC_TEST_ANDROID_TEST_INITIALIZER_H_
+
+namespace webrtc {
+
+void InitializeAndroidObjects();
+
+} // namespace webrtc
+
+#endif // PC_TEST_ANDROID_TEST_INITIALIZER_H_
diff --git a/third_party/libwebrtc/pc/test/fake_audio_capture_module.cc b/third_party/libwebrtc/pc/test/fake_audio_capture_module.cc
new file mode 100644
index 0000000000..132328291c
--- /dev/null
+++ b/third_party/libwebrtc/pc/test/fake_audio_capture_module.cc
@@ -0,0 +1,521 @@
+/*
+ * Copyright 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/test/fake_audio_capture_module.h"
+
+#include <string.h>
+
+#include "api/make_ref_counted.h"
+#include "api/units/time_delta.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/thread.h"
+#include "rtc_base/time_utils.h"
+
+using ::webrtc::TimeDelta;
+
+// Audio sample value that is high enough that it doesn't occur naturally when
+// frames are being faked. E.g. NetEq will not generate this large sample value
+// unless it has received an audio frame containing a sample of this value.
+// Even simpler buffers would likely just contain audio sample values of 0.
+static const int kHighSampleValue = 10000;
+
+// Constants here are derived by running VoE using a real ADM.
+// The constants correspond to 10ms of mono audio at 44kHz.
+static const int kTimePerFrameMs = 10;
+static const uint8_t kNumberOfChannels = 1;
+static const int kSamplesPerSecond = 44000;
+static const int kTotalDelayMs = 0;
+static const int kClockDriftMs = 0;
+static const uint32_t kMaxVolume = 14392;
+
+FakeAudioCaptureModule::FakeAudioCaptureModule()
+ : audio_callback_(nullptr),
+ recording_(false),
+ playing_(false),
+ play_is_initialized_(false),
+ rec_is_initialized_(false),
+ current_mic_level_(kMaxVolume),
+ started_(false),
+ next_frame_time_(0),
+ frames_received_(0) {
+ process_thread_checker_.Detach();
+}
+
+FakeAudioCaptureModule::~FakeAudioCaptureModule() {
+ if (process_thread_) {
+ process_thread_->Stop();
+ }
+}
+
+rtc::scoped_refptr<FakeAudioCaptureModule> FakeAudioCaptureModule::Create() {
+ auto capture_module = rtc::make_ref_counted<FakeAudioCaptureModule>();
+ if (!capture_module->Initialize()) {
+ return nullptr;
+ }
+ return capture_module;
+}
+
+int FakeAudioCaptureModule::frames_received() const {
+ webrtc::MutexLock lock(&mutex_);
+ return frames_received_;
+}
+
+int32_t FakeAudioCaptureModule::ActiveAudioLayer(
+ AudioLayer* /*audio_layer*/) const {
+ RTC_DCHECK_NOTREACHED();
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::RegisterAudioCallback(
+ webrtc::AudioTransport* audio_callback) {
+ webrtc::MutexLock lock(&mutex_);
+ audio_callback_ = audio_callback;
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::Init() {
+ // Initialize is called by the factory method. Safe to ignore this Init call.
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::Terminate() {
+ // Clean up in the destructor. No action here, just success.
+ return 0;
+}
+
+bool FakeAudioCaptureModule::Initialized() const {
+ RTC_DCHECK_NOTREACHED();
+ return 0;
+}
+
+int16_t FakeAudioCaptureModule::PlayoutDevices() {
+ RTC_DCHECK_NOTREACHED();
+ return 0;
+}
+
+int16_t FakeAudioCaptureModule::RecordingDevices() {
+ RTC_DCHECK_NOTREACHED();
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::PlayoutDeviceName(
+ uint16_t /*index*/,
+ char /*name*/[webrtc::kAdmMaxDeviceNameSize],
+ char /*guid*/[webrtc::kAdmMaxGuidSize]) {
+ RTC_DCHECK_NOTREACHED();
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::RecordingDeviceName(
+ uint16_t /*index*/,
+ char /*name*/[webrtc::kAdmMaxDeviceNameSize],
+ char /*guid*/[webrtc::kAdmMaxGuidSize]) {
+ RTC_DCHECK_NOTREACHED();
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::SetPlayoutDevice(uint16_t /*index*/) {
+ // No playout device, just playing from file. Return success.
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::SetPlayoutDevice(WindowsDeviceType /*device*/) {
+ if (play_is_initialized_) {
+ return -1;
+ }
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::SetRecordingDevice(uint16_t /*index*/) {
+ // No recording device, just dropping audio. Return success.
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::SetRecordingDevice(
+ WindowsDeviceType /*device*/) {
+ if (rec_is_initialized_) {
+ return -1;
+ }
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::PlayoutIsAvailable(bool* /*available*/) {
+ RTC_DCHECK_NOTREACHED();
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::InitPlayout() {
+ play_is_initialized_ = true;
+ return 0;
+}
+
+bool FakeAudioCaptureModule::PlayoutIsInitialized() const {
+ return play_is_initialized_;
+}
+
+int32_t FakeAudioCaptureModule::RecordingIsAvailable(bool* /*available*/) {
+ RTC_DCHECK_NOTREACHED();
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::InitRecording() {
+ rec_is_initialized_ = true;
+ return 0;
+}
+
+bool FakeAudioCaptureModule::RecordingIsInitialized() const {
+ return rec_is_initialized_;
+}
+
+int32_t FakeAudioCaptureModule::StartPlayout() {
+ if (!play_is_initialized_) {
+ return -1;
+ }
+ {
+ webrtc::MutexLock lock(&mutex_);
+ playing_ = true;
+ }
+ bool start = true;
+ UpdateProcessing(start);
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::StopPlayout() {
+ bool start = false;
+ {
+ webrtc::MutexLock lock(&mutex_);
+ playing_ = false;
+ start = ShouldStartProcessing();
+ }
+ UpdateProcessing(start);
+ return 0;
+}
+
+bool FakeAudioCaptureModule::Playing() const {
+ webrtc::MutexLock lock(&mutex_);
+ return playing_;
+}
+
+int32_t FakeAudioCaptureModule::StartRecording() {
+ if (!rec_is_initialized_) {
+ return -1;
+ }
+ {
+ webrtc::MutexLock lock(&mutex_);
+ recording_ = true;
+ }
+ bool start = true;
+ UpdateProcessing(start);
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::StopRecording() {
+ bool start = false;
+ {
+ webrtc::MutexLock lock(&mutex_);
+ recording_ = false;
+ start = ShouldStartProcessing();
+ }
+ UpdateProcessing(start);
+ return 0;
+}
+
+bool FakeAudioCaptureModule::Recording() const {
+ webrtc::MutexLock lock(&mutex_);
+ return recording_;
+}
+
+int32_t FakeAudioCaptureModule::InitSpeaker() {
+ // No speaker, just playing from file. Return success.
+ return 0;
+}
+
+bool FakeAudioCaptureModule::SpeakerIsInitialized() const {
+ RTC_DCHECK_NOTREACHED();
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::InitMicrophone() {
+ // No microphone, just playing from file. Return success.
+ return 0;
+}
+
+bool FakeAudioCaptureModule::MicrophoneIsInitialized() const {
+ RTC_DCHECK_NOTREACHED();
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::SpeakerVolumeIsAvailable(bool* /*available*/) {
+ RTC_DCHECK_NOTREACHED();
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::SetSpeakerVolume(uint32_t /*volume*/) {
+ RTC_DCHECK_NOTREACHED();
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::SpeakerVolume(uint32_t* /*volume*/) const {
+ RTC_DCHECK_NOTREACHED();
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::MaxSpeakerVolume(
+ uint32_t* /*max_volume*/) const {
+ RTC_DCHECK_NOTREACHED();
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::MinSpeakerVolume(
+ uint32_t* /*min_volume*/) const {
+ RTC_DCHECK_NOTREACHED();
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::MicrophoneVolumeIsAvailable(
+ bool* /*available*/) {
+ RTC_DCHECK_NOTREACHED();
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::SetMicrophoneVolume(uint32_t volume) {
+ webrtc::MutexLock lock(&mutex_);
+ current_mic_level_ = volume;
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::MicrophoneVolume(uint32_t* volume) const {
+ webrtc::MutexLock lock(&mutex_);
+ *volume = current_mic_level_;
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::MaxMicrophoneVolume(
+ uint32_t* max_volume) const {
+ *max_volume = kMaxVolume;
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::MinMicrophoneVolume(
+ uint32_t* /*min_volume*/) const {
+ RTC_DCHECK_NOTREACHED();
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::SpeakerMuteIsAvailable(bool* /*available*/) {
+ RTC_DCHECK_NOTREACHED();
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::SetSpeakerMute(bool /*enable*/) {
+ RTC_DCHECK_NOTREACHED();
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::SpeakerMute(bool* /*enabled*/) const {
+ RTC_DCHECK_NOTREACHED();
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::MicrophoneMuteIsAvailable(bool* /*available*/) {
+ RTC_DCHECK_NOTREACHED();
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::SetMicrophoneMute(bool /*enable*/) {
+ RTC_DCHECK_NOTREACHED();
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::MicrophoneMute(bool* /*enabled*/) const {
+ RTC_DCHECK_NOTREACHED();
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::StereoPlayoutIsAvailable(
+ bool* available) const {
+ // No recording device, just dropping audio. Stereo can be dropped just
+ // as easily as mono.
+ *available = true;
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::SetStereoPlayout(bool /*enable*/) {
+ // No recording device, just dropping audio. Stereo can be dropped just
+ // as easily as mono.
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::StereoPlayout(bool* /*enabled*/) const {
+ RTC_DCHECK_NOTREACHED();
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::StereoRecordingIsAvailable(
+ bool* available) const {
+ // Keep thing simple. No stereo recording.
+ *available = false;
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::SetStereoRecording(bool enable) {
+ if (!enable) {
+ return 0;
+ }
+ return -1;
+}
+
+int32_t FakeAudioCaptureModule::StereoRecording(bool* /*enabled*/) const {
+ RTC_DCHECK_NOTREACHED();
+ return 0;
+}
+
+int32_t FakeAudioCaptureModule::PlayoutDelay(uint16_t* delay_ms) const {
+ // No delay since audio frames are dropped.
+ *delay_ms = 0;
+ return 0;
+}
+
+bool FakeAudioCaptureModule::Initialize() {
+ // Set the send buffer samples high enough that it would not occur on the
+ // remote side unless a packet containing a sample of that magnitude has been
+ // sent to it. Note that the audio processing pipeline will likely distort the
+ // original signal.
+ SetSendBuffer(kHighSampleValue);
+ return true;
+}
+
+void FakeAudioCaptureModule::SetSendBuffer(int value) {
+ Sample* buffer_ptr = reinterpret_cast<Sample*>(send_buffer_);
+ const size_t buffer_size_in_samples =
+ sizeof(send_buffer_) / kNumberBytesPerSample;
+ for (size_t i = 0; i < buffer_size_in_samples; ++i) {
+ buffer_ptr[i] = value;
+ }
+}
+
+void FakeAudioCaptureModule::ResetRecBuffer() {
+ memset(rec_buffer_, 0, sizeof(rec_buffer_));
+}
+
+bool FakeAudioCaptureModule::CheckRecBuffer(int value) {
+ const Sample* buffer_ptr = reinterpret_cast<const Sample*>(rec_buffer_);
+ const size_t buffer_size_in_samples =
+ sizeof(rec_buffer_) / kNumberBytesPerSample;
+ for (size_t i = 0; i < buffer_size_in_samples; ++i) {
+ if (buffer_ptr[i] >= value)
+ return true;
+ }
+ return false;
+}
+
+bool FakeAudioCaptureModule::ShouldStartProcessing() {
+ return recording_ || playing_;
+}
+
+void FakeAudioCaptureModule::UpdateProcessing(bool start) {
+ if (start) {
+ if (!process_thread_) {
+ process_thread_ = rtc::Thread::Create();
+ process_thread_->Start();
+ }
+ process_thread_->PostTask([this] { StartProcessP(); });
+ } else {
+ if (process_thread_) {
+ process_thread_->Stop();
+ process_thread_.reset(nullptr);
+ process_thread_checker_.Detach();
+ }
+ webrtc::MutexLock lock(&mutex_);
+ started_ = false;
+ }
+}
+
+void FakeAudioCaptureModule::StartProcessP() {
+ RTC_DCHECK_RUN_ON(&process_thread_checker_);
+ {
+ webrtc::MutexLock lock(&mutex_);
+ if (started_) {
+ // Already started.
+ return;
+ }
+ }
+ ProcessFrameP();
+}
+
+void FakeAudioCaptureModule::ProcessFrameP() {
+ RTC_DCHECK_RUN_ON(&process_thread_checker_);
+ {
+ webrtc::MutexLock lock(&mutex_);
+ if (!started_) {
+ next_frame_time_ = rtc::TimeMillis();
+ started_ = true;
+ }
+
+ // Receive and send frames every kTimePerFrameMs.
+ if (playing_) {
+ ReceiveFrameP();
+ }
+ if (recording_) {
+ SendFrameP();
+ }
+ }
+
+ next_frame_time_ += kTimePerFrameMs;
+ const int64_t current_time = rtc::TimeMillis();
+ const int64_t wait_time =
+ (next_frame_time_ > current_time) ? next_frame_time_ - current_time : 0;
+ process_thread_->PostDelayedTask([this] { ProcessFrameP(); },
+ TimeDelta::Millis(wait_time));
+}
+
+void FakeAudioCaptureModule::ReceiveFrameP() {
+ RTC_DCHECK_RUN_ON(&process_thread_checker_);
+ if (!audio_callback_) {
+ return;
+ }
+ ResetRecBuffer();
+ size_t nSamplesOut = 0;
+ int64_t elapsed_time_ms = 0;
+ int64_t ntp_time_ms = 0;
+ if (audio_callback_->NeedMorePlayData(kNumberSamples, kNumberBytesPerSample,
+ kNumberOfChannels, kSamplesPerSecond,
+ rec_buffer_, nSamplesOut,
+ &elapsed_time_ms, &ntp_time_ms) != 0) {
+ RTC_DCHECK_NOTREACHED();
+ }
+ RTC_CHECK(nSamplesOut == kNumberSamples);
+
+ // The SetBuffer() function ensures that after decoding, the audio buffer
+ // should contain samples of similar magnitude (there is likely to be some
+ // distortion due to the audio pipeline). If one sample is detected to
+ // have the same or greater magnitude somewhere in the frame, an actual frame
+ // has been received from the remote side (i.e. faked frames are not being
+ // pulled).
+ if (CheckRecBuffer(kHighSampleValue)) {
+ ++frames_received_;
+ }
+}
+
+void FakeAudioCaptureModule::SendFrameP() {
+ RTC_DCHECK_RUN_ON(&process_thread_checker_);
+ if (!audio_callback_) {
+ return;
+ }
+ bool key_pressed = false;
+ uint32_t current_mic_level = current_mic_level_;
+ if (audio_callback_->RecordedDataIsAvailable(
+ send_buffer_, kNumberSamples, kNumberBytesPerSample,
+ kNumberOfChannels, kSamplesPerSecond, kTotalDelayMs, kClockDriftMs,
+ current_mic_level, key_pressed, current_mic_level) != 0) {
+ RTC_DCHECK_NOTREACHED();
+ }
+ current_mic_level_ = current_mic_level;
+}
diff --git a/third_party/libwebrtc/pc/test/fake_audio_capture_module.h b/third_party/libwebrtc/pc/test/fake_audio_capture_module.h
new file mode 100644
index 0000000000..84ddacb26f
--- /dev/null
+++ b/third_party/libwebrtc/pc/test/fake_audio_capture_module.h
@@ -0,0 +1,235 @@
+/*
+ * Copyright 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This class implements an AudioCaptureModule that can be used to detect if
+// audio is being received properly if it is fed by another AudioCaptureModule
+// in some arbitrary audio pipeline where they are connected. It does not play
+// out or record any audio so it does not need access to any hardware and can
+// therefore be used in the gtest testing framework.
+
+// Note P postfix of a function indicates that it should only be called by the
+// processing thread.
+
+#ifndef PC_TEST_FAKE_AUDIO_CAPTURE_MODULE_H_
+#define PC_TEST_FAKE_AUDIO_CAPTURE_MODULE_H_
+
+#include <stddef.h>
+#include <stdint.h>
+
+#include <memory>
+
+#include "api/scoped_refptr.h"
+#include "api/sequence_checker.h"
+#include "modules/audio_device/include/audio_device.h"
+#include "modules/audio_device/include/audio_device_defines.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "rtc_base/thread_annotations.h"
+
+namespace rtc {
+class Thread;
+} // namespace rtc
+
+class FakeAudioCaptureModule : public webrtc::AudioDeviceModule {
+ public:
+ typedef uint16_t Sample;
+
+ // The value for the following constants have been derived by running VoE
+ // using a real ADM. The constants correspond to 10ms of mono audio at 44kHz.
+ static const size_t kNumberSamples = 440;
+ static const size_t kNumberBytesPerSample = sizeof(Sample);
+
+ // Creates a FakeAudioCaptureModule or returns NULL on failure.
+ static rtc::scoped_refptr<FakeAudioCaptureModule> Create();
+
+ // Returns the number of frames that have been successfully pulled by the
+ // instance. Note that correctly detecting success can only be done if the
+ // pulled frame was generated/pushed from a FakeAudioCaptureModule.
+ int frames_received() const RTC_LOCKS_EXCLUDED(mutex_);
+
+ int32_t ActiveAudioLayer(AudioLayer* audio_layer) const override;
+
+ // Note: Calling this method from a callback may result in deadlock.
+ int32_t RegisterAudioCallback(webrtc::AudioTransport* audio_callback) override
+ RTC_LOCKS_EXCLUDED(mutex_);
+
+ int32_t Init() override;
+ int32_t Terminate() override;
+ bool Initialized() const override;
+
+ int16_t PlayoutDevices() override;
+ int16_t RecordingDevices() override;
+ int32_t PlayoutDeviceName(uint16_t index,
+ char name[webrtc::kAdmMaxDeviceNameSize],
+ char guid[webrtc::kAdmMaxGuidSize]) override;
+ int32_t RecordingDeviceName(uint16_t index,
+ char name[webrtc::kAdmMaxDeviceNameSize],
+ char guid[webrtc::kAdmMaxGuidSize]) override;
+
+ int32_t SetPlayoutDevice(uint16_t index) override;
+ int32_t SetPlayoutDevice(WindowsDeviceType device) override;
+ int32_t SetRecordingDevice(uint16_t index) override;
+ int32_t SetRecordingDevice(WindowsDeviceType device) override;
+
+ int32_t PlayoutIsAvailable(bool* available) override;
+ int32_t InitPlayout() override;
+ bool PlayoutIsInitialized() const override;
+ int32_t RecordingIsAvailable(bool* available) override;
+ int32_t InitRecording() override;
+ bool RecordingIsInitialized() const override;
+
+ int32_t StartPlayout() RTC_LOCKS_EXCLUDED(mutex_) override;
+ int32_t StopPlayout() RTC_LOCKS_EXCLUDED(mutex_) override;
+ bool Playing() const RTC_LOCKS_EXCLUDED(mutex_) override;
+ int32_t StartRecording() RTC_LOCKS_EXCLUDED(mutex_) override;
+ int32_t StopRecording() RTC_LOCKS_EXCLUDED(mutex_) override;
+ bool Recording() const RTC_LOCKS_EXCLUDED(mutex_) override;
+
+ int32_t InitSpeaker() override;
+ bool SpeakerIsInitialized() const override;
+ int32_t InitMicrophone() override;
+ bool MicrophoneIsInitialized() const override;
+
+ int32_t SpeakerVolumeIsAvailable(bool* available) override;
+ int32_t SetSpeakerVolume(uint32_t volume) override;
+ int32_t SpeakerVolume(uint32_t* volume) const override;
+ int32_t MaxSpeakerVolume(uint32_t* max_volume) const override;
+ int32_t MinSpeakerVolume(uint32_t* min_volume) const override;
+
+ int32_t MicrophoneVolumeIsAvailable(bool* available) override;
+ int32_t SetMicrophoneVolume(uint32_t volume)
+ RTC_LOCKS_EXCLUDED(mutex_) override;
+ int32_t MicrophoneVolume(uint32_t* volume) const
+ RTC_LOCKS_EXCLUDED(mutex_) override;
+ int32_t MaxMicrophoneVolume(uint32_t* max_volume) const override;
+
+ int32_t MinMicrophoneVolume(uint32_t* min_volume) const override;
+
+ int32_t SpeakerMuteIsAvailable(bool* available) override;
+ int32_t SetSpeakerMute(bool enable) override;
+ int32_t SpeakerMute(bool* enabled) const override;
+
+ int32_t MicrophoneMuteIsAvailable(bool* available) override;
+ int32_t SetMicrophoneMute(bool enable) override;
+ int32_t MicrophoneMute(bool* enabled) const override;
+
+ int32_t StereoPlayoutIsAvailable(bool* available) const override;
+ int32_t SetStereoPlayout(bool enable) override;
+ int32_t StereoPlayout(bool* enabled) const override;
+ int32_t StereoRecordingIsAvailable(bool* available) const override;
+ int32_t SetStereoRecording(bool enable) override;
+ int32_t StereoRecording(bool* enabled) const override;
+
+ int32_t PlayoutDelay(uint16_t* delay_ms) const override;
+
+ bool BuiltInAECIsAvailable() const override { return false; }
+ int32_t EnableBuiltInAEC(bool enable) override { return -1; }
+ bool BuiltInAGCIsAvailable() const override { return false; }
+ int32_t EnableBuiltInAGC(bool enable) override { return -1; }
+ bool BuiltInNSIsAvailable() const override { return false; }
+ int32_t EnableBuiltInNS(bool enable) override { return -1; }
+
+ int32_t GetPlayoutUnderrunCount() const override { return -1; }
+
+ absl::optional<webrtc::AudioDeviceModule::Stats> GetStats() const override {
+ return webrtc::AudioDeviceModule::Stats();
+ }
+#if defined(WEBRTC_IOS)
+ int GetPlayoutAudioParameters(
+ webrtc::AudioParameters* params) const override {
+ return -1;
+ }
+ int GetRecordAudioParameters(webrtc::AudioParameters* params) const override {
+ return -1;
+ }
+#endif // WEBRTC_IOS
+
+ // End of functions inherited from webrtc::AudioDeviceModule.
+
+ protected:
+ // The constructor is protected because the class needs to be created as a
+ // reference counted object (for memory managment reasons). It could be
+ // exposed in which case the burden of proper instantiation would be put on
+ // the creator of a FakeAudioCaptureModule instance. To create an instance of
+ // this class use the Create(..) API.
+ FakeAudioCaptureModule();
+ // The destructor is protected because it is reference counted and should not
+ // be deleted directly.
+ virtual ~FakeAudioCaptureModule();
+
+ private:
+ // Initializes the state of the FakeAudioCaptureModule. This API is called on
+ // creation by the Create() API.
+ bool Initialize();
+ // SetBuffer() sets all samples in send_buffer_ to `value`.
+ void SetSendBuffer(int value);
+ // Resets rec_buffer_. I.e., sets all rec_buffer_ samples to 0.
+ void ResetRecBuffer();
+ // Returns true if rec_buffer_ contains one or more sample greater than or
+ // equal to `value`.
+ bool CheckRecBuffer(int value);
+
+ // Returns true/false depending on if recording or playback has been
+ // enabled/started.
+ bool ShouldStartProcessing() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
+
+ // Starts or stops the pushing and pulling of audio frames.
+ void UpdateProcessing(bool start) RTC_LOCKS_EXCLUDED(mutex_);
+
+ // Starts the periodic calling of ProcessFrame() in a thread safe way.
+ void StartProcessP();
+ // Periodcally called function that ensures that frames are pulled and pushed
+ // periodically if enabled/started.
+ void ProcessFrameP() RTC_LOCKS_EXCLUDED(mutex_);
+ // Pulls frames from the registered webrtc::AudioTransport.
+ void ReceiveFrameP() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
+ // Pushes frames to the registered webrtc::AudioTransport.
+ void SendFrameP() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
+
+ // Callback for playout and recording.
+ webrtc::AudioTransport* audio_callback_ RTC_GUARDED_BY(mutex_);
+
+ bool recording_ RTC_GUARDED_BY(
+ mutex_); // True when audio is being pushed from the instance.
+ bool playing_ RTC_GUARDED_BY(
+ mutex_); // True when audio is being pulled by the instance.
+
+ bool play_is_initialized_; // True when the instance is ready to pull audio.
+ bool rec_is_initialized_; // True when the instance is ready to push audio.
+
+ // Input to and output from RecordedDataIsAvailable(..) makes it possible to
+ // modify the current mic level. The implementation does not care about the
+ // mic level so it just feeds back what it receives.
+ uint32_t current_mic_level_ RTC_GUARDED_BY(mutex_);
+
+ // next_frame_time_ is updated in a non-drifting manner to indicate the next
+ // wall clock time the next frame should be generated and received. started_
+ // ensures that next_frame_time_ can be initialized properly on first call.
+ bool started_ RTC_GUARDED_BY(mutex_);
+ int64_t next_frame_time_ RTC_GUARDED_BY(process_thread_checker_);
+
+ std::unique_ptr<rtc::Thread> process_thread_;
+
+ // Buffer for storing samples received from the webrtc::AudioTransport.
+ char rec_buffer_[kNumberSamples * kNumberBytesPerSample];
+ // Buffer for samples to send to the webrtc::AudioTransport.
+ char send_buffer_[kNumberSamples * kNumberBytesPerSample];
+
+ // Counter of frames received that have samples of high enough amplitude to
+ // indicate that the frames are not faked somewhere in the audio pipeline
+ // (e.g. by a jitter buffer).
+ int frames_received_;
+
+ // Protects variables that are accessed from process_thread_ and
+ // the main thread.
+ mutable webrtc::Mutex mutex_;
+ webrtc::SequenceChecker process_thread_checker_;
+};
+
+#endif // PC_TEST_FAKE_AUDIO_CAPTURE_MODULE_H_
diff --git a/third_party/libwebrtc/pc/test/fake_audio_capture_module_unittest.cc b/third_party/libwebrtc/pc/test/fake_audio_capture_module_unittest.cc
new file mode 100644
index 0000000000..64141b13a9
--- /dev/null
+++ b/third_party/libwebrtc/pc/test/fake_audio_capture_module_unittest.cc
@@ -0,0 +1,198 @@
+/*
+ * Copyright 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/test/fake_audio_capture_module.h"
+
+#include <string.h>
+
+#include <algorithm>
+
+#include "api/scoped_refptr.h"
+#include "rtc_base/gunit.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "test/gtest.h"
+
+class FakeAdmTest : public ::testing::Test, public webrtc::AudioTransport {
+ protected:
+ static const int kMsInSecond = 1000;
+
+ FakeAdmTest()
+ : push_iterations_(0), pull_iterations_(0), rec_buffer_bytes_(0) {
+ memset(rec_buffer_, 0, sizeof(rec_buffer_));
+ }
+
+ void SetUp() override {
+ fake_audio_capture_module_ = FakeAudioCaptureModule::Create();
+ EXPECT_TRUE(fake_audio_capture_module_.get() != NULL);
+ }
+
+ // Callbacks inherited from webrtc::AudioTransport.
+ // ADM is pushing data.
+ int32_t RecordedDataIsAvailable(const void* audioSamples,
+ const size_t nSamples,
+ const size_t nBytesPerSample,
+ const size_t nChannels,
+ const uint32_t samplesPerSec,
+ const uint32_t totalDelayMS,
+ const int32_t clockDrift,
+ const uint32_t currentMicLevel,
+ const bool keyPressed,
+ uint32_t& newMicLevel) override {
+ webrtc::MutexLock lock(&mutex_);
+ rec_buffer_bytes_ = nSamples * nBytesPerSample;
+ if ((rec_buffer_bytes_ == 0) ||
+ (rec_buffer_bytes_ >
+ FakeAudioCaptureModule::kNumberSamples *
+ FakeAudioCaptureModule::kNumberBytesPerSample)) {
+ ADD_FAILURE();
+ return -1;
+ }
+ memcpy(rec_buffer_, audioSamples, rec_buffer_bytes_);
+ ++push_iterations_;
+ newMicLevel = currentMicLevel;
+ return 0;
+ }
+
+ void PullRenderData(int bits_per_sample,
+ int sample_rate,
+ size_t number_of_channels,
+ size_t number_of_frames,
+ void* audio_data,
+ int64_t* elapsed_time_ms,
+ int64_t* ntp_time_ms) override {}
+
+ // ADM is pulling data.
+ int32_t NeedMorePlayData(const size_t nSamples,
+ const size_t nBytesPerSample,
+ const size_t nChannels,
+ const uint32_t samplesPerSec,
+ void* audioSamples,
+ size_t& nSamplesOut,
+ int64_t* elapsed_time_ms,
+ int64_t* ntp_time_ms) override {
+ webrtc::MutexLock lock(&mutex_);
+ ++pull_iterations_;
+ const size_t audio_buffer_size = nSamples * nBytesPerSample;
+ const size_t bytes_out =
+ RecordedDataReceived()
+ ? CopyFromRecBuffer(audioSamples, audio_buffer_size)
+ : GenerateZeroBuffer(audioSamples, audio_buffer_size);
+ nSamplesOut = bytes_out / nBytesPerSample;
+ *elapsed_time_ms = 0;
+ *ntp_time_ms = 0;
+ return 0;
+ }
+
+ int push_iterations() const {
+ webrtc::MutexLock lock(&mutex_);
+ return push_iterations_;
+ }
+ int pull_iterations() const {
+ webrtc::MutexLock lock(&mutex_);
+ return pull_iterations_;
+ }
+
+ rtc::scoped_refptr<FakeAudioCaptureModule> fake_audio_capture_module_;
+
+ private:
+ bool RecordedDataReceived() const { return rec_buffer_bytes_ != 0; }
+ size_t GenerateZeroBuffer(void* audio_buffer, size_t audio_buffer_size) {
+ memset(audio_buffer, 0, audio_buffer_size);
+ return audio_buffer_size;
+ }
+ size_t CopyFromRecBuffer(void* audio_buffer, size_t audio_buffer_size) {
+ EXPECT_EQ(audio_buffer_size, rec_buffer_bytes_);
+ const size_t min_buffer_size =
+ std::min(audio_buffer_size, rec_buffer_bytes_);
+ memcpy(audio_buffer, rec_buffer_, min_buffer_size);
+ return min_buffer_size;
+ }
+
+ rtc::AutoThread main_thread_;
+
+ mutable webrtc::Mutex mutex_;
+
+ int push_iterations_;
+ int pull_iterations_;
+
+ char rec_buffer_[FakeAudioCaptureModule::kNumberSamples *
+ FakeAudioCaptureModule::kNumberBytesPerSample];
+ size_t rec_buffer_bytes_;
+};
+
+TEST_F(FakeAdmTest, PlayoutTest) {
+ EXPECT_EQ(0, fake_audio_capture_module_->RegisterAudioCallback(this));
+
+ bool stereo_available = false;
+ EXPECT_EQ(0, fake_audio_capture_module_->StereoPlayoutIsAvailable(
+ &stereo_available));
+ EXPECT_TRUE(stereo_available);
+
+ EXPECT_NE(0, fake_audio_capture_module_->StartPlayout());
+ EXPECT_FALSE(fake_audio_capture_module_->PlayoutIsInitialized());
+ EXPECT_FALSE(fake_audio_capture_module_->Playing());
+ EXPECT_EQ(0, fake_audio_capture_module_->StopPlayout());
+
+ EXPECT_EQ(0, fake_audio_capture_module_->InitPlayout());
+ EXPECT_TRUE(fake_audio_capture_module_->PlayoutIsInitialized());
+ EXPECT_FALSE(fake_audio_capture_module_->Playing());
+
+ EXPECT_EQ(0, fake_audio_capture_module_->StartPlayout());
+ EXPECT_TRUE(fake_audio_capture_module_->Playing());
+
+ uint16_t delay_ms = 10;
+ EXPECT_EQ(0, fake_audio_capture_module_->PlayoutDelay(&delay_ms));
+ EXPECT_EQ(0, delay_ms);
+
+ EXPECT_TRUE_WAIT(pull_iterations() > 0, kMsInSecond);
+ EXPECT_GE(0, push_iterations());
+
+ EXPECT_EQ(0, fake_audio_capture_module_->StopPlayout());
+ EXPECT_FALSE(fake_audio_capture_module_->Playing());
+}
+
+TEST_F(FakeAdmTest, RecordTest) {
+ EXPECT_EQ(0, fake_audio_capture_module_->RegisterAudioCallback(this));
+
+ bool stereo_available = false;
+ EXPECT_EQ(0, fake_audio_capture_module_->StereoRecordingIsAvailable(
+ &stereo_available));
+ EXPECT_FALSE(stereo_available);
+
+ EXPECT_NE(0, fake_audio_capture_module_->StartRecording());
+ EXPECT_FALSE(fake_audio_capture_module_->Recording());
+ EXPECT_EQ(0, fake_audio_capture_module_->StopRecording());
+
+ EXPECT_EQ(0, fake_audio_capture_module_->InitRecording());
+ EXPECT_EQ(0, fake_audio_capture_module_->StartRecording());
+ EXPECT_TRUE(fake_audio_capture_module_->Recording());
+
+ EXPECT_TRUE_WAIT(push_iterations() > 0, kMsInSecond);
+ EXPECT_GE(0, pull_iterations());
+
+ EXPECT_EQ(0, fake_audio_capture_module_->StopRecording());
+ EXPECT_FALSE(fake_audio_capture_module_->Recording());
+}
+
+TEST_F(FakeAdmTest, DuplexTest) {
+ EXPECT_EQ(0, fake_audio_capture_module_->RegisterAudioCallback(this));
+
+ EXPECT_EQ(0, fake_audio_capture_module_->InitPlayout());
+ EXPECT_EQ(0, fake_audio_capture_module_->StartPlayout());
+
+ EXPECT_EQ(0, fake_audio_capture_module_->InitRecording());
+ EXPECT_EQ(0, fake_audio_capture_module_->StartRecording());
+
+ EXPECT_TRUE_WAIT(push_iterations() > 0, kMsInSecond);
+ EXPECT_TRUE_WAIT(pull_iterations() > 0, kMsInSecond);
+
+ EXPECT_EQ(0, fake_audio_capture_module_->StopPlayout());
+ EXPECT_EQ(0, fake_audio_capture_module_->StopRecording());
+}
diff --git a/third_party/libwebrtc/pc/test/fake_data_channel_controller.h b/third_party/libwebrtc/pc/test/fake_data_channel_controller.h
new file mode 100644
index 0000000000..bdab7d2ec9
--- /dev/null
+++ b/third_party/libwebrtc/pc/test/fake_data_channel_controller.h
@@ -0,0 +1,160 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_TEST_FAKE_DATA_CHANNEL_CONTROLLER_H_
+#define PC_TEST_FAKE_DATA_CHANNEL_CONTROLLER_H_
+
+#include <set>
+
+#include "pc/sctp_data_channel.h"
+#include "rtc_base/checks.h"
+
+class FakeDataChannelController
+ : public webrtc::SctpDataChannelControllerInterface {
+ public:
+ FakeDataChannelController()
+ : send_blocked_(false),
+ transport_available_(false),
+ ready_to_send_(false),
+ transport_error_(false) {}
+ virtual ~FakeDataChannelController() {}
+
+ bool SendData(int sid,
+ const webrtc::SendDataParams& params,
+ const rtc::CopyOnWriteBuffer& payload,
+ cricket::SendDataResult* result) override {
+ RTC_CHECK(ready_to_send_);
+ RTC_CHECK(transport_available_);
+ if (send_blocked_) {
+ *result = cricket::SDR_BLOCK;
+ return false;
+ }
+
+ if (transport_error_) {
+ *result = cricket::SDR_ERROR;
+ return false;
+ }
+
+ last_sid_ = sid;
+ last_send_data_params_ = params;
+ return true;
+ }
+
+ bool ConnectDataChannel(webrtc::SctpDataChannel* data_channel) override {
+ RTC_CHECK(connected_channels_.find(data_channel) ==
+ connected_channels_.end());
+ if (!transport_available_) {
+ return false;
+ }
+ RTC_LOG(LS_INFO) << "DataChannel connected " << data_channel;
+ connected_channels_.insert(data_channel);
+ return true;
+ }
+
+ void DisconnectDataChannel(webrtc::SctpDataChannel* data_channel) override {
+ RTC_CHECK(connected_channels_.find(data_channel) !=
+ connected_channels_.end());
+ RTC_LOG(LS_INFO) << "DataChannel disconnected " << data_channel;
+ connected_channels_.erase(data_channel);
+ }
+
+ void AddSctpDataStream(int sid) override {
+ RTC_CHECK(sid >= 0);
+ if (!transport_available_) {
+ return;
+ }
+ send_ssrcs_.insert(sid);
+ recv_ssrcs_.insert(sid);
+ }
+
+ void RemoveSctpDataStream(int sid) override {
+ RTC_CHECK(sid >= 0);
+ send_ssrcs_.erase(sid);
+ recv_ssrcs_.erase(sid);
+ // Unlike the real SCTP transport, act like the closing procedure finished
+ // instantly, doing the same snapshot thing as below.
+ for (webrtc::SctpDataChannel* ch : std::set<webrtc::SctpDataChannel*>(
+ connected_channels_.begin(), connected_channels_.end())) {
+ if (connected_channels_.count(ch)) {
+ ch->OnClosingProcedureComplete(sid);
+ }
+ }
+ }
+
+ bool ReadyToSendData() const override { return ready_to_send_; }
+
+ // Set true to emulate the SCTP stream being blocked by congestion control.
+ void set_send_blocked(bool blocked) {
+ send_blocked_ = blocked;
+ if (!blocked) {
+ // Take a snapshot of the connected channels and check to see whether
+ // each value is still in connected_channels_ before calling
+ // OnTransportReady(). This avoids problems where the set gets modified
+ // in response to OnTransportReady().
+ for (webrtc::SctpDataChannel* ch : std::set<webrtc::SctpDataChannel*>(
+ connected_channels_.begin(), connected_channels_.end())) {
+ if (connected_channels_.count(ch)) {
+ ch->OnTransportReady(true);
+ }
+ }
+ }
+ }
+
+ // Set true to emulate the transport channel creation, e.g. after
+ // setLocalDescription/setRemoteDescription called with data content.
+ void set_transport_available(bool available) {
+ transport_available_ = available;
+ }
+
+ // Set true to emulate the transport ReadyToSendData signal when the transport
+ // becomes writable for the first time.
+ void set_ready_to_send(bool ready) {
+ RTC_CHECK(transport_available_);
+ ready_to_send_ = ready;
+ if (ready) {
+ std::set<webrtc::SctpDataChannel*>::iterator it;
+ for (it = connected_channels_.begin(); it != connected_channels_.end();
+ ++it) {
+ (*it)->OnTransportReady(true);
+ }
+ }
+ }
+
+ void set_transport_error() { transport_error_ = true; }
+
+ int last_sid() const { return last_sid_; }
+ const webrtc::SendDataParams& last_send_data_params() const {
+ return last_send_data_params_;
+ }
+
+ bool IsConnected(webrtc::SctpDataChannel* data_channel) const {
+ return connected_channels_.find(data_channel) != connected_channels_.end();
+ }
+
+ bool IsSendStreamAdded(uint32_t stream) const {
+ return send_ssrcs_.find(stream) != send_ssrcs_.end();
+ }
+
+ bool IsRecvStreamAdded(uint32_t stream) const {
+ return recv_ssrcs_.find(stream) != recv_ssrcs_.end();
+ }
+
+ private:
+ int last_sid_;
+ webrtc::SendDataParams last_send_data_params_;
+ bool send_blocked_;
+ bool transport_available_;
+ bool ready_to_send_;
+ bool transport_error_;
+ std::set<webrtc::SctpDataChannel*> connected_channels_;
+ std::set<uint32_t> send_ssrcs_;
+ std::set<uint32_t> recv_ssrcs_;
+};
+#endif // PC_TEST_FAKE_DATA_CHANNEL_CONTROLLER_H_
diff --git a/third_party/libwebrtc/pc/test/fake_peer_connection_base.h b/third_party/libwebrtc/pc/test/fake_peer_connection_base.h
new file mode 100644
index 0000000000..18b8824c28
--- /dev/null
+++ b/third_party/libwebrtc/pc/test/fake_peer_connection_base.h
@@ -0,0 +1,379 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_TEST_FAKE_PEER_CONNECTION_BASE_H_
+#define PC_TEST_FAKE_PEER_CONNECTION_BASE_H_
+
+#include <map>
+#include <memory>
+#include <set>
+#include <string>
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "api/field_trials_view.h"
+#include "api/sctp_transport_interface.h"
+#include "pc/peer_connection_internal.h"
+#include "test/scoped_key_value_config.h"
+
+namespace webrtc {
+
+// Customized PeerConnection fakes can be created by subclassing
+// FakePeerConnectionBase then overriding the interesting methods. This class
+// takes care of providing default implementations for all the pure virtual
+// functions specified in the interfaces.
+class FakePeerConnectionBase : public PeerConnectionInternal {
+ public:
+ // PeerConnectionInterface implementation.
+
+ rtc::scoped_refptr<StreamCollectionInterface> local_streams() override {
+ return nullptr;
+ }
+
+ rtc::scoped_refptr<StreamCollectionInterface> remote_streams() override {
+ return nullptr;
+ }
+
+ bool AddStream(MediaStreamInterface* stream) override { return false; }
+
+ void RemoveStream(MediaStreamInterface* stream) override {}
+
+ RTCErrorOr<rtc::scoped_refptr<RtpSenderInterface>> AddTrack(
+ rtc::scoped_refptr<MediaStreamTrackInterface> track,
+ const std::vector<std::string>& stream_ids) override {
+ return RTCError(RTCErrorType::UNSUPPORTED_OPERATION, "Not implemented");
+ }
+
+ RTCErrorOr<rtc::scoped_refptr<RtpSenderInterface>> AddTrack(
+ rtc::scoped_refptr<MediaStreamTrackInterface> track,
+ const std::vector<std::string>& stream_ids,
+ const std::vector<RtpEncodingParameters>& init_send_encodings) override {
+ return RTCError(RTCErrorType::UNSUPPORTED_OPERATION, "Not implemented");
+ }
+
+ RTCError RemoveTrackOrError(
+ rtc::scoped_refptr<RtpSenderInterface> sender) override {
+ return RTCError(RTCErrorType::UNSUPPORTED_OPERATION);
+ }
+
+ RTCErrorOr<rtc::scoped_refptr<RtpTransceiverInterface>> AddTransceiver(
+ rtc::scoped_refptr<MediaStreamTrackInterface> track) override {
+ return RTCError(RTCErrorType::UNSUPPORTED_OPERATION, "Not implemented");
+ }
+
+ RTCErrorOr<rtc::scoped_refptr<RtpTransceiverInterface>> AddTransceiver(
+ rtc::scoped_refptr<MediaStreamTrackInterface> track,
+ const RtpTransceiverInit& init) override {
+ return RTCError(RTCErrorType::UNSUPPORTED_OPERATION, "Not implemented");
+ }
+
+ RTCErrorOr<rtc::scoped_refptr<RtpTransceiverInterface>> AddTransceiver(
+ cricket::MediaType media_type) override {
+ return RTCError(RTCErrorType::UNSUPPORTED_OPERATION, "Not implemented");
+ }
+
+ RTCErrorOr<rtc::scoped_refptr<RtpTransceiverInterface>> AddTransceiver(
+ cricket::MediaType media_type,
+ const RtpTransceiverInit& init) override {
+ return RTCError(RTCErrorType::UNSUPPORTED_OPERATION, "Not implemented");
+ }
+
+ rtc::scoped_refptr<RtpSenderInterface> CreateSender(
+ const std::string& kind,
+ const std::string& stream_id) override {
+ return nullptr;
+ }
+
+ std::vector<rtc::scoped_refptr<RtpSenderInterface>> GetSenders()
+ const override {
+ return {};
+ }
+
+ std::vector<rtc::scoped_refptr<RtpReceiverInterface>> GetReceivers()
+ const override {
+ return {};
+ }
+
+ std::vector<rtc::scoped_refptr<RtpTransceiverInterface>> GetTransceivers()
+ const override {
+ return {};
+ }
+
+ bool GetStats(StatsObserver* observer,
+ MediaStreamTrackInterface* track,
+ StatsOutputLevel level) override {
+ return false;
+ }
+
+ void GetStats(RTCStatsCollectorCallback* callback) override {}
+ void GetStats(
+ rtc::scoped_refptr<RtpSenderInterface> selector,
+ rtc::scoped_refptr<RTCStatsCollectorCallback> callback) override {}
+ void GetStats(
+ rtc::scoped_refptr<RtpReceiverInterface> selector,
+ rtc::scoped_refptr<RTCStatsCollectorCallback> callback) override {}
+
+ void ClearStatsCache() override {}
+
+ rtc::scoped_refptr<SctpTransportInterface> GetSctpTransport() const {
+ return nullptr;
+ }
+
+ RTCErrorOr<rtc::scoped_refptr<DataChannelInterface>> CreateDataChannelOrError(
+ const std::string& label,
+ const DataChannelInit* config) override {
+ return RTCError(RTCErrorType::UNSUPPORTED_OPERATION,
+ "Fake function called");
+ }
+
+ const SessionDescriptionInterface* local_description() const override {
+ return nullptr;
+ }
+ const SessionDescriptionInterface* remote_description() const override {
+ return nullptr;
+ }
+
+ const SessionDescriptionInterface* current_local_description()
+ const override {
+ return nullptr;
+ }
+ const SessionDescriptionInterface* current_remote_description()
+ const override {
+ return nullptr;
+ }
+
+ const SessionDescriptionInterface* pending_local_description()
+ const override {
+ return nullptr;
+ }
+ const SessionDescriptionInterface* pending_remote_description()
+ const override {
+ return nullptr;
+ }
+
+ void RestartIce() override {}
+
+ void CreateOffer(CreateSessionDescriptionObserver* observer,
+ const RTCOfferAnswerOptions& options) override {}
+
+ void CreateAnswer(CreateSessionDescriptionObserver* observer,
+ const RTCOfferAnswerOptions& options) override {}
+
+ void SetLocalDescription(SetSessionDescriptionObserver* observer,
+ SessionDescriptionInterface* desc) override {}
+
+ void SetRemoteDescription(SetSessionDescriptionObserver* observer,
+ SessionDescriptionInterface* desc) override {}
+
+ void SetRemoteDescription(
+ std::unique_ptr<SessionDescriptionInterface> desc,
+ rtc::scoped_refptr<SetRemoteDescriptionObserverInterface> observer)
+ override {}
+
+ RTCConfiguration GetConfiguration() override { return RTCConfiguration(); }
+
+ RTCError SetConfiguration(
+ const PeerConnectionInterface::RTCConfiguration& config) override {
+ return RTCError();
+ }
+
+ bool AddIceCandidate(const IceCandidateInterface* candidate) override {
+ return false;
+ }
+
+ bool RemoveIceCandidates(
+ const std::vector<cricket::Candidate>& candidates) override {
+ return false;
+ }
+
+ RTCError SetBitrate(const BitrateSettings& bitrate) override {
+ return RTCError(RTCErrorType::UNSUPPORTED_OPERATION, "Not implemented");
+ }
+
+ void SetAudioPlayout(bool playout) override {}
+
+ void SetAudioRecording(bool recording) override {}
+
+ rtc::scoped_refptr<DtlsTransportInterface> LookupDtlsTransportByMid(
+ const std::string& mid) {
+ return nullptr;
+ }
+
+ SignalingState signaling_state() override { return SignalingState::kStable; }
+
+ IceConnectionState ice_connection_state() override {
+ return IceConnectionState::kIceConnectionNew;
+ }
+
+ IceConnectionState standardized_ice_connection_state() override {
+ return IceConnectionState::kIceConnectionNew;
+ }
+
+ PeerConnectionState peer_connection_state() override {
+ return PeerConnectionState::kNew;
+ }
+
+ IceGatheringState ice_gathering_state() override {
+ return IceGatheringState::kIceGatheringNew;
+ }
+
+ absl::optional<bool> can_trickle_ice_candidates() { return absl::nullopt; }
+
+ bool StartRtcEventLog(std::unique_ptr<RtcEventLogOutput> output,
+ int64_t output_period_ms) override {
+ return false;
+ }
+
+ bool StartRtcEventLog(std::unique_ptr<RtcEventLogOutput> output) override {
+ return false;
+ }
+
+ void StopRtcEventLog() override {}
+
+ void Close() override {}
+
+ // PeerConnectionInternal implementation.
+
+ rtc::Thread* network_thread() const override { return nullptr; }
+ rtc::Thread* worker_thread() const override { return nullptr; }
+ rtc::Thread* signaling_thread() const override { return nullptr; }
+
+ std::string session_id() const override { return ""; }
+
+ bool initial_offerer() const override { return false; }
+
+ std::vector<
+ rtc::scoped_refptr<RtpTransceiverProxyWithInternal<RtpTransceiver>>>
+ GetTransceiversInternal() const override {
+ return {};
+ }
+
+ sigslot::signal1<SctpDataChannel*>& SignalSctpDataChannelCreated() override {
+ return SignalSctpDataChannelCreated_;
+ }
+
+ absl::optional<std::string> sctp_transport_name() const override {
+ return absl::nullopt;
+ }
+
+ absl::optional<std::string> sctp_mid() const override {
+ return absl::nullopt;
+ }
+
+ std::map<std::string, cricket::TransportStats> GetTransportStatsByNames(
+ const std::set<std::string>& transport_names) override {
+ return {};
+ }
+
+ Call::Stats GetCallStats() override { return Call::Stats(); }
+
+ absl::optional<AudioDeviceModule::Stats> GetAudioDeviceStats() override {
+ return absl::nullopt;
+ }
+
+ bool GetLocalCertificate(
+ const std::string& transport_name,
+ rtc::scoped_refptr<rtc::RTCCertificate>* certificate) override {
+ return false;
+ }
+
+ std::unique_ptr<rtc::SSLCertChain> GetRemoteSSLCertChain(
+ const std::string& transport_name) override {
+ return nullptr;
+ }
+
+ bool IceRestartPending(const std::string& content_name) const override {
+ return false;
+ }
+
+ bool NeedsIceRestart(const std::string& content_name) const override {
+ return false;
+ }
+
+ bool GetSslRole(const std::string& content_name,
+ rtc::SSLRole* role) override {
+ return false;
+ }
+ const PeerConnectionInterface::RTCConfiguration* configuration()
+ const override {
+ return nullptr;
+ }
+
+ void ReportSdpBundleUsage(
+ const SessionDescriptionInterface& remote_description) override {}
+
+ PeerConnectionMessageHandler* message_handler() override { return nullptr; }
+ RtpTransmissionManager* rtp_manager() override { return nullptr; }
+ const RtpTransmissionManager* rtp_manager() const override { return nullptr; }
+ bool dtls_enabled() const override { return false; }
+ const PeerConnectionFactoryInterface::Options* options() const override {
+ return nullptr;
+ }
+
+ CryptoOptions GetCryptoOptions() override { return CryptoOptions(); }
+ JsepTransportController* transport_controller_s() override { return nullptr; }
+ JsepTransportController* transport_controller_n() override { return nullptr; }
+ DataChannelController* data_channel_controller() override { return nullptr; }
+ cricket::PortAllocator* port_allocator() override { return nullptr; }
+ LegacyStatsCollector* legacy_stats() override { return nullptr; }
+ PeerConnectionObserver* Observer() const override { return nullptr; }
+ bool GetSctpSslRole(rtc::SSLRole* role) override { return false; }
+ PeerConnectionInterface::IceConnectionState ice_connection_state_internal()
+ override {
+ return PeerConnectionInterface::IceConnectionState::kIceConnectionNew;
+ }
+ void SetIceConnectionState(
+ PeerConnectionInterface::IceConnectionState new_state) override {}
+ void NoteUsageEvent(UsageEvent event) override {}
+ bool IsClosed() const override { return false; }
+ bool IsUnifiedPlan() const override { return true; }
+ bool ValidateBundleSettings(
+ const cricket::SessionDescription* desc,
+ const std::map<std::string, const cricket::ContentGroup*>&
+ bundle_groups_by_mid) override {
+ return false;
+ }
+
+ absl::optional<std::string> GetDataMid() const override {
+ return absl::nullopt;
+ }
+ RTCErrorOr<rtc::scoped_refptr<RtpTransceiverInterface>> AddTransceiver(
+ cricket::MediaType media_type,
+ rtc::scoped_refptr<MediaStreamTrackInterface> track,
+ const RtpTransceiverInit& init,
+ bool fire_callback = true) override {
+ return RTCError(RTCErrorType::INTERNAL_ERROR, "");
+ }
+ void StartSctpTransport(int local_port,
+ int remote_port,
+ int max_message_size) override {}
+
+ void AddRemoteCandidate(const std::string& mid,
+ const cricket::Candidate& candidate) override {}
+
+ Call* call_ptr() override { return nullptr; }
+ bool SrtpRequired() const override { return false; }
+ bool SetupDataChannelTransport_n(const std::string& mid) override {
+ return false;
+ }
+ void TeardownDataChannelTransport_n() override {}
+ void SetSctpDataMid(const std::string& mid) override {}
+ void ResetSctpDataMid() override {}
+
+ const FieldTrialsView& trials() const override { return field_trials_; }
+
+ protected:
+ webrtc::test::ScopedKeyValueConfig field_trials_;
+ sigslot::signal1<SctpDataChannel*> SignalSctpDataChannelCreated_;
+};
+
+} // namespace webrtc
+
+#endif // PC_TEST_FAKE_PEER_CONNECTION_BASE_H_
diff --git a/third_party/libwebrtc/pc/test/fake_peer_connection_for_stats.h b/third_party/libwebrtc/pc/test/fake_peer_connection_for_stats.h
new file mode 100644
index 0000000000..b771d45a0b
--- /dev/null
+++ b/third_party/libwebrtc/pc/test/fake_peer_connection_for_stats.h
@@ -0,0 +1,512 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_TEST_FAKE_PEER_CONNECTION_FOR_STATS_H_
+#define PC_TEST_FAKE_PEER_CONNECTION_FOR_STATS_H_
+
+#include <map>
+#include <memory>
+#include <set>
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "media/base/fake_media_engine.h"
+#include "pc/channel.h"
+#include "pc/stream_collection.h"
+#include "pc/test/fake_data_channel_controller.h"
+#include "pc/test/fake_peer_connection_base.h"
+
+namespace webrtc {
+
+// Fake VoiceMediaChannel where the result of GetStats can be configured.
+class FakeVoiceMediaChannelForStats : public cricket::FakeVoiceMediaChannel {
+ public:
+ explicit FakeVoiceMediaChannelForStats(TaskQueueBase* network_thread)
+ : cricket::FakeVoiceMediaChannel(nullptr,
+ cricket::AudioOptions(),
+ network_thread) {}
+
+ void SetStats(const cricket::VoiceMediaInfo& voice_info) {
+ send_stats_ = cricket::VoiceMediaSendInfo();
+ send_stats_->senders = voice_info.senders;
+ send_stats_->send_codecs = voice_info.send_codecs;
+ receive_stats_ = cricket::VoiceMediaReceiveInfo();
+ receive_stats_->receivers = voice_info.receivers;
+ receive_stats_->receive_codecs = voice_info.receive_codecs;
+ receive_stats_->device_underrun_count = voice_info.device_underrun_count;
+ }
+
+ // VoiceMediaChannel overrides.
+ bool GetSendStats(cricket::VoiceMediaSendInfo* info) override {
+ if (send_stats_) {
+ *info = *send_stats_;
+ return true;
+ }
+ return false;
+ }
+ bool GetReceiveStats(cricket::VoiceMediaReceiveInfo* info,
+ bool get_and_clear_legacy_stats) override {
+ if (receive_stats_) {
+ *info = *receive_stats_;
+ return true;
+ }
+ return false;
+ }
+
+ private:
+ absl::optional<cricket::VoiceMediaSendInfo> send_stats_;
+ absl::optional<cricket::VoiceMediaReceiveInfo> receive_stats_;
+};
+
+// Fake VideoMediaChannel where the result of GetStats can be configured.
+class FakeVideoMediaChannelForStats : public cricket::FakeVideoMediaChannel {
+ public:
+ explicit FakeVideoMediaChannelForStats(TaskQueueBase* network_thread)
+ : cricket::FakeVideoMediaChannel(nullptr,
+ cricket::VideoOptions(),
+ network_thread) {}
+
+ void SetStats(const cricket::VideoMediaInfo& video_info) {
+ send_stats_ = cricket::VideoMediaSendInfo();
+ send_stats_->senders = video_info.senders;
+ send_stats_->aggregated_senders = video_info.aggregated_senders;
+ send_stats_->send_codecs = video_info.send_codecs;
+ receive_stats_ = cricket::VideoMediaReceiveInfo();
+ receive_stats_->receivers = video_info.receivers;
+ receive_stats_->receive_codecs = video_info.receive_codecs;
+ }
+
+ // VideoMediaChannel overrides.
+ bool GetSendStats(cricket::VideoMediaSendInfo* info) override {
+ if (send_stats_) {
+ *info = *send_stats_;
+ return true;
+ }
+ return false;
+ }
+ bool GetReceiveStats(cricket::VideoMediaReceiveInfo* info) override {
+ if (receive_stats_) {
+ *info = *receive_stats_;
+ return true;
+ }
+ return false;
+ }
+
+ private:
+ absl::optional<cricket::VideoMediaSendInfo> send_stats_;
+ absl::optional<cricket::VideoMediaReceiveInfo> receive_stats_;
+};
+
+constexpr bool kDefaultRtcpMuxRequired = true;
+constexpr bool kDefaultSrtpRequired = true;
+
+class VoiceChannelForTesting : public cricket::VoiceChannel {
+ public:
+ VoiceChannelForTesting(rtc::Thread* worker_thread,
+ rtc::Thread* network_thread,
+ rtc::Thread* signaling_thread,
+ std::unique_ptr<cricket::VoiceMediaChannel> channel,
+ const std::string& content_name,
+ bool srtp_required,
+ webrtc::CryptoOptions crypto_options,
+ rtc::UniqueRandomIdGenerator* ssrc_generator,
+ std::string transport_name)
+ : VoiceChannel(worker_thread,
+ network_thread,
+ signaling_thread,
+ std::move(channel),
+ content_name,
+ srtp_required,
+ std::move(crypto_options),
+ ssrc_generator),
+ test_transport_name_(std::move(transport_name)) {}
+
+ private:
+ absl::string_view transport_name() const override {
+ return test_transport_name_;
+ }
+
+ const std::string test_transport_name_;
+};
+
+class VideoChannelForTesting : public cricket::VideoChannel {
+ public:
+ VideoChannelForTesting(rtc::Thread* worker_thread,
+ rtc::Thread* network_thread,
+ rtc::Thread* signaling_thread,
+ std::unique_ptr<cricket::VideoMediaChannel> channel,
+ const std::string& content_name,
+ bool srtp_required,
+ webrtc::CryptoOptions crypto_options,
+ rtc::UniqueRandomIdGenerator* ssrc_generator,
+ std::string transport_name)
+ : VideoChannel(worker_thread,
+ network_thread,
+ signaling_thread,
+ std::move(channel),
+ content_name,
+ srtp_required,
+ std::move(crypto_options),
+ ssrc_generator),
+ test_transport_name_(std::move(transport_name)) {}
+
+ private:
+ absl::string_view transport_name() const override {
+ return test_transport_name_;
+ }
+
+ const std::string test_transport_name_;
+};
+
+// This class is intended to be fed into the StatsCollector and
+// RTCStatsCollector so that the stats functionality can be unit tested.
+// Individual tests can configure this fake as needed to simulate scenarios
+// under which to test the stats collectors.
+class FakePeerConnectionForStats : public FakePeerConnectionBase {
+ public:
+ // TODO(steveanton): Add support for specifying separate threads to test
+ // multi-threading correctness.
+ FakePeerConnectionForStats()
+ : network_thread_(rtc::Thread::Current()),
+ worker_thread_(rtc::Thread::Current()),
+ signaling_thread_(rtc::Thread::Current()),
+ // TODO(hta): remove separate thread variables and use context.
+ dependencies_(MakeDependencies()),
+ context_(ConnectionContext::Create(&dependencies_)),
+ local_streams_(StreamCollection::Create()),
+ remote_streams_(StreamCollection::Create()) {}
+
+ ~FakePeerConnectionForStats() {
+ for (auto transceiver : transceivers_) {
+ transceiver->internal()->ClearChannel();
+ }
+ }
+
+ static PeerConnectionFactoryDependencies MakeDependencies() {
+ PeerConnectionFactoryDependencies dependencies;
+ dependencies.network_thread = rtc::Thread::Current();
+ dependencies.worker_thread = rtc::Thread::Current();
+ dependencies.signaling_thread = rtc::Thread::Current();
+ dependencies.media_engine = std::make_unique<cricket::FakeMediaEngine>();
+ return dependencies;
+ }
+
+ rtc::scoped_refptr<StreamCollection> mutable_local_streams() {
+ return local_streams_;
+ }
+
+ rtc::scoped_refptr<StreamCollection> mutable_remote_streams() {
+ return remote_streams_;
+ }
+
+ rtc::scoped_refptr<RtpSenderInterface> AddSender(
+ rtc::scoped_refptr<RtpSenderInternal> sender) {
+ // TODO(steveanton): Switch tests to use RtpTransceivers directly.
+ auto sender_proxy = RtpSenderProxyWithInternal<RtpSenderInternal>::Create(
+ signaling_thread_, sender);
+ GetOrCreateFirstTransceiverOfType(sender->media_type())
+ ->internal()
+ ->AddSender(sender_proxy);
+ return sender_proxy;
+ }
+
+ void RemoveSender(rtc::scoped_refptr<RtpSenderInterface> sender) {
+ GetOrCreateFirstTransceiverOfType(sender->media_type())
+ ->internal()
+ ->RemoveSender(sender.get());
+ }
+
+ rtc::scoped_refptr<RtpReceiverInterface> AddReceiver(
+ rtc::scoped_refptr<RtpReceiverInternal> receiver) {
+ // TODO(steveanton): Switch tests to use RtpTransceivers directly.
+ auto receiver_proxy =
+ RtpReceiverProxyWithInternal<RtpReceiverInternal>::Create(
+ signaling_thread_, worker_thread_, receiver);
+ GetOrCreateFirstTransceiverOfType(receiver->media_type())
+ ->internal()
+ ->AddReceiver(receiver_proxy);
+ return receiver_proxy;
+ }
+
+ void RemoveReceiver(rtc::scoped_refptr<RtpReceiverInterface> receiver) {
+ GetOrCreateFirstTransceiverOfType(receiver->media_type())
+ ->internal()
+ ->RemoveReceiver(receiver.get());
+ }
+
+ FakeVoiceMediaChannelForStats* AddVoiceChannel(
+ const std::string& mid,
+ const std::string& transport_name,
+ cricket::VoiceMediaInfo initial_stats = cricket::VoiceMediaInfo()) {
+ auto voice_media_channel =
+ std::make_unique<FakeVoiceMediaChannelForStats>(network_thread_);
+ auto* voice_media_channel_ptr = voice_media_channel.get();
+ auto voice_channel = std::make_unique<VoiceChannelForTesting>(
+ worker_thread_, network_thread_, signaling_thread_,
+ std::move(voice_media_channel), mid, kDefaultSrtpRequired,
+ webrtc::CryptoOptions(), context_->ssrc_generator(), transport_name);
+ auto transceiver =
+ GetOrCreateFirstTransceiverOfType(cricket::MEDIA_TYPE_AUDIO)
+ ->internal();
+ if (transceiver->channel()) {
+ // This transceiver already has a channel, create a new one.
+ transceiver =
+ CreateTransceiverOfType(cricket::MEDIA_TYPE_AUDIO)->internal();
+ }
+ RTC_DCHECK(!transceiver->channel());
+ transceiver->SetChannel(std::move(voice_channel),
+ [](const std::string&) { return nullptr; });
+ voice_media_channel_ptr->SetStats(initial_stats);
+ return voice_media_channel_ptr;
+ }
+
+ FakeVideoMediaChannelForStats* AddVideoChannel(
+ const std::string& mid,
+ const std::string& transport_name,
+ cricket::VideoMediaInfo initial_stats = cricket::VideoMediaInfo()) {
+ auto video_media_channel =
+ std::make_unique<FakeVideoMediaChannelForStats>(network_thread_);
+ auto video_media_channel_ptr = video_media_channel.get();
+ auto video_channel = std::make_unique<VideoChannelForTesting>(
+ worker_thread_, network_thread_, signaling_thread_,
+ std::move(video_media_channel), mid, kDefaultSrtpRequired,
+ webrtc::CryptoOptions(), context_->ssrc_generator(), transport_name);
+ auto transceiver =
+ GetOrCreateFirstTransceiverOfType(cricket::MEDIA_TYPE_VIDEO)
+ ->internal();
+ if (transceiver->channel()) {
+ // This transceiver already has a channel, create a new one.
+ transceiver =
+ CreateTransceiverOfType(cricket::MEDIA_TYPE_VIDEO)->internal();
+ }
+ RTC_DCHECK(!transceiver->channel());
+ transceiver->SetChannel(std::move(video_channel),
+ [](const std::string&) { return nullptr; });
+ video_media_channel_ptr->SetStats(initial_stats);
+ return video_media_channel_ptr;
+ }
+
+ void AddSctpDataChannel(const std::string& label) {
+ AddSctpDataChannel(label, InternalDataChannelInit());
+ }
+
+ void AddSctpDataChannel(const std::string& label,
+ const InternalDataChannelInit& init) {
+ // TODO(bugs.webrtc.org/11547): Supply a separate network thread.
+ AddSctpDataChannel(SctpDataChannel::Create(&data_channel_controller_, label,
+ init, rtc::Thread::Current(),
+ rtc::Thread::Current()));
+ }
+
+ void AddSctpDataChannel(rtc::scoped_refptr<SctpDataChannel> data_channel) {
+ sctp_data_channels_.push_back(data_channel);
+ }
+
+ void SetTransportStats(const std::string& transport_name,
+ const cricket::TransportChannelStats& channel_stats) {
+ SetTransportStats(
+ transport_name,
+ std::vector<cricket::TransportChannelStats>{channel_stats});
+ }
+
+ void SetTransportStats(
+ const std::string& transport_name,
+ const std::vector<cricket::TransportChannelStats>& channel_stats_list) {
+ cricket::TransportStats transport_stats;
+ transport_stats.transport_name = transport_name;
+ transport_stats.channel_stats = channel_stats_list;
+ transport_stats_by_name_[transport_name] = transport_stats;
+ }
+
+ void SetCallStats(const Call::Stats& call_stats) { call_stats_ = call_stats; }
+
+ void SetAudioDeviceStats(
+ absl::optional<AudioDeviceModule::Stats> audio_device_stats) {
+ audio_device_stats_ = audio_device_stats;
+ }
+
+ void SetLocalCertificate(
+ const std::string& transport_name,
+ rtc::scoped_refptr<rtc::RTCCertificate> certificate) {
+ local_certificates_by_transport_[transport_name] = certificate;
+ }
+
+ void SetRemoteCertChain(const std::string& transport_name,
+ std::unique_ptr<rtc::SSLCertChain> chain) {
+ remote_cert_chains_by_transport_[transport_name] = std::move(chain);
+ }
+
+ // PeerConnectionInterface overrides.
+
+ rtc::scoped_refptr<StreamCollectionInterface> local_streams() override {
+ return local_streams_;
+ }
+
+ rtc::scoped_refptr<StreamCollectionInterface> remote_streams() override {
+ return remote_streams_;
+ }
+
+ std::vector<rtc::scoped_refptr<RtpSenderInterface>> GetSenders()
+ const override {
+ std::vector<rtc::scoped_refptr<RtpSenderInterface>> senders;
+ for (auto transceiver : transceivers_) {
+ for (auto sender : transceiver->internal()->senders()) {
+ senders.push_back(sender);
+ }
+ }
+ return senders;
+ }
+
+ std::vector<rtc::scoped_refptr<RtpReceiverInterface>> GetReceivers()
+ const override {
+ std::vector<rtc::scoped_refptr<RtpReceiverInterface>> receivers;
+ for (auto transceiver : transceivers_) {
+ for (auto receiver : transceiver->internal()->receivers()) {
+ receivers.push_back(receiver);
+ }
+ }
+ return receivers;
+ }
+
+ // PeerConnectionInternal overrides.
+
+ rtc::Thread* network_thread() const override { return network_thread_; }
+
+ rtc::Thread* worker_thread() const override { return worker_thread_; }
+
+ rtc::Thread* signaling_thread() const override { return signaling_thread_; }
+
+ std::vector<
+ rtc::scoped_refptr<RtpTransceiverProxyWithInternal<RtpTransceiver>>>
+ GetTransceiversInternal() const override {
+ return transceivers_;
+ }
+
+ std::vector<DataChannelStats> GetDataChannelStats() const override {
+ RTC_DCHECK_RUN_ON(signaling_thread());
+ std::vector<DataChannelStats> stats;
+ for (const auto& channel : sctp_data_channels_)
+ stats.push_back(channel->GetStats());
+ return stats;
+ }
+
+ cricket::CandidateStatsList GetPooledCandidateStats() const override {
+ return {};
+ }
+
+ std::map<std::string, cricket::TransportStats> GetTransportStatsByNames(
+ const std::set<std::string>& transport_names) override {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ std::map<std::string, cricket::TransportStats> transport_stats_by_name;
+ for (const std::string& transport_name : transport_names) {
+ transport_stats_by_name[transport_name] =
+ GetTransportStatsByName(transport_name);
+ }
+ return transport_stats_by_name;
+ }
+
+ Call::Stats GetCallStats() override { return call_stats_; }
+
+ absl::optional<AudioDeviceModule::Stats> GetAudioDeviceStats() override {
+ return audio_device_stats_;
+ }
+
+ bool GetLocalCertificate(
+ const std::string& transport_name,
+ rtc::scoped_refptr<rtc::RTCCertificate>* certificate) override {
+ auto it = local_certificates_by_transport_.find(transport_name);
+ if (it != local_certificates_by_transport_.end()) {
+ *certificate = it->second;
+ return true;
+ } else {
+ return false;
+ }
+ }
+
+ std::unique_ptr<rtc::SSLCertChain> GetRemoteSSLCertChain(
+ const std::string& transport_name) override {
+ auto it = remote_cert_chains_by_transport_.find(transport_name);
+ if (it != remote_cert_chains_by_transport_.end()) {
+ return it->second->Clone();
+ } else {
+ return nullptr;
+ }
+ }
+
+ private:
+ cricket::TransportStats GetTransportStatsByName(
+ const std::string& transport_name) {
+ auto it = transport_stats_by_name_.find(transport_name);
+ if (it != transport_stats_by_name_.end()) {
+ // If specific transport stats have been specified, return those.
+ return it->second;
+ }
+ // Otherwise, generate some dummy stats.
+ cricket::TransportChannelStats channel_stats;
+ channel_stats.component = cricket::ICE_CANDIDATE_COMPONENT_RTP;
+ cricket::TransportStats transport_stats;
+ transport_stats.transport_name = transport_name;
+ transport_stats.channel_stats.push_back(channel_stats);
+ return transport_stats;
+ }
+
+ rtc::scoped_refptr<RtpTransceiverProxyWithInternal<RtpTransceiver>>
+ GetOrCreateFirstTransceiverOfType(cricket::MediaType media_type) {
+ for (auto transceiver : transceivers_) {
+ if (transceiver->internal()->media_type() == media_type) {
+ return transceiver;
+ }
+ }
+ return CreateTransceiverOfType(media_type);
+ }
+
+ rtc::scoped_refptr<RtpTransceiverProxyWithInternal<RtpTransceiver>>
+ CreateTransceiverOfType(cricket::MediaType media_type) {
+ auto transceiver = RtpTransceiverProxyWithInternal<RtpTransceiver>::Create(
+ signaling_thread_,
+ rtc::make_ref_counted<RtpTransceiver>(media_type, context_.get()));
+ transceivers_.push_back(transceiver);
+ return transceiver;
+ }
+
+ rtc::Thread* const network_thread_;
+ rtc::Thread* const worker_thread_;
+ rtc::Thread* const signaling_thread_;
+
+ PeerConnectionFactoryDependencies dependencies_;
+ rtc::scoped_refptr<ConnectionContext> context_;
+
+ rtc::scoped_refptr<StreamCollection> local_streams_;
+ rtc::scoped_refptr<StreamCollection> remote_streams_;
+
+ std::vector<
+ rtc::scoped_refptr<RtpTransceiverProxyWithInternal<RtpTransceiver>>>
+ transceivers_;
+
+ FakeDataChannelController data_channel_controller_;
+
+ std::vector<rtc::scoped_refptr<SctpDataChannel>> sctp_data_channels_;
+
+ std::map<std::string, cricket::TransportStats> transport_stats_by_name_;
+
+ Call::Stats call_stats_;
+
+ absl::optional<AudioDeviceModule::Stats> audio_device_stats_;
+
+ std::map<std::string, rtc::scoped_refptr<rtc::RTCCertificate>>
+ local_certificates_by_transport_;
+ std::map<std::string, std::unique_ptr<rtc::SSLCertChain>>
+ remote_cert_chains_by_transport_;
+};
+
+} // namespace webrtc
+
+#endif // PC_TEST_FAKE_PEER_CONNECTION_FOR_STATS_H_
diff --git a/third_party/libwebrtc/pc/test/fake_periodic_video_source.h b/third_party/libwebrtc/pc/test/fake_periodic_video_source.h
new file mode 100644
index 0000000000..871c29cbae
--- /dev/null
+++ b/third_party/libwebrtc/pc/test/fake_periodic_video_source.h
@@ -0,0 +1,101 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_TEST_FAKE_PERIODIC_VIDEO_SOURCE_H_
+#define PC_TEST_FAKE_PERIODIC_VIDEO_SOURCE_H_
+
+#include <memory>
+
+#include "api/video/video_source_interface.h"
+#include "media/base/fake_frame_source.h"
+#include "media/base/video_broadcaster.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "rtc_base/task_queue_for_test.h"
+#include "rtc_base/task_utils/repeating_task.h"
+
+namespace webrtc {
+
+class FakePeriodicVideoSource final
+ : public rtc::VideoSourceInterface<VideoFrame> {
+ public:
+ static constexpr int kDefaultFrameIntervalMs = 33;
+ static constexpr int kDefaultWidth = 640;
+ static constexpr int kDefaultHeight = 480;
+
+ struct Config {
+ int width = kDefaultWidth;
+ int height = kDefaultHeight;
+ int frame_interval_ms = kDefaultFrameIntervalMs;
+ VideoRotation rotation = kVideoRotation_0;
+ int64_t timestamp_offset_ms = 0;
+ };
+
+ FakePeriodicVideoSource() : FakePeriodicVideoSource(Config()) {}
+ explicit FakePeriodicVideoSource(Config config)
+ : frame_source_(
+ config.width,
+ config.height,
+ config.frame_interval_ms * rtc::kNumMicrosecsPerMillisec,
+ config.timestamp_offset_ms * rtc::kNumMicrosecsPerMillisec),
+ task_queue_(std::make_unique<TaskQueueForTest>(
+ "FakePeriodicVideoTrackSource")) {
+ thread_checker_.Detach();
+ frame_source_.SetRotation(config.rotation);
+
+ TimeDelta frame_interval = TimeDelta::Millis(config.frame_interval_ms);
+ RepeatingTaskHandle::Start(task_queue_->Get(), [this, frame_interval] {
+ if (broadcaster_.wants().rotation_applied) {
+ broadcaster_.OnFrame(frame_source_.GetFrameRotationApplied());
+ } else {
+ broadcaster_.OnFrame(frame_source_.GetFrame());
+ }
+ return frame_interval;
+ });
+ }
+
+ rtc::VideoSinkWants wants() const {
+ MutexLock lock(&mutex_);
+ return wants_;
+ }
+
+ void RemoveSink(rtc::VideoSinkInterface<webrtc::VideoFrame>* sink) override {
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ broadcaster_.RemoveSink(sink);
+ }
+
+ void AddOrUpdateSink(rtc::VideoSinkInterface<webrtc::VideoFrame>* sink,
+ const rtc::VideoSinkWants& wants) override {
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ {
+ MutexLock lock(&mutex_);
+ wants_ = wants;
+ }
+ broadcaster_.AddOrUpdateSink(sink, wants);
+ }
+
+ void Stop() {
+ RTC_DCHECK(task_queue_);
+ task_queue_.reset();
+ }
+
+ private:
+ SequenceChecker thread_checker_;
+
+ rtc::VideoBroadcaster broadcaster_;
+ cricket::FakeFrameSource frame_source_;
+ mutable Mutex mutex_;
+ rtc::VideoSinkWants wants_ RTC_GUARDED_BY(&mutex_);
+
+ std::unique_ptr<TaskQueueForTest> task_queue_;
+};
+
+} // namespace webrtc
+
+#endif // PC_TEST_FAKE_PERIODIC_VIDEO_SOURCE_H_
diff --git a/third_party/libwebrtc/pc/test/fake_periodic_video_track_source.h b/third_party/libwebrtc/pc/test/fake_periodic_video_track_source.h
new file mode 100644
index 0000000000..98a456f232
--- /dev/null
+++ b/third_party/libwebrtc/pc/test/fake_periodic_video_track_source.h
@@ -0,0 +1,45 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_TEST_FAKE_PERIODIC_VIDEO_TRACK_SOURCE_H_
+#define PC_TEST_FAKE_PERIODIC_VIDEO_TRACK_SOURCE_H_
+
+#include "pc/test/fake_periodic_video_source.h"
+#include "pc/video_track_source.h"
+
+namespace webrtc {
+
+// A VideoTrackSource generating frames with configured size and frame interval.
+class FakePeriodicVideoTrackSource : public VideoTrackSource {
+ public:
+ explicit FakePeriodicVideoTrackSource(bool remote)
+ : FakePeriodicVideoTrackSource(FakePeriodicVideoSource::Config(),
+ remote) {}
+
+ FakePeriodicVideoTrackSource(FakePeriodicVideoSource::Config config,
+ bool remote)
+ : VideoTrackSource(remote), source_(config) {}
+
+ ~FakePeriodicVideoTrackSource() = default;
+
+ const FakePeriodicVideoSource& fake_periodic_source() const {
+ return source_;
+ }
+
+ protected:
+ rtc::VideoSourceInterface<VideoFrame>* source() override { return &source_; }
+
+ private:
+ FakePeriodicVideoSource source_;
+};
+
+} // namespace webrtc
+
+#endif // PC_TEST_FAKE_PERIODIC_VIDEO_TRACK_SOURCE_H_
diff --git a/third_party/libwebrtc/pc/test/fake_rtc_certificate_generator.h b/third_party/libwebrtc/pc/test/fake_rtc_certificate_generator.h
new file mode 100644
index 0000000000..61da26a12f
--- /dev/null
+++ b/third_party/libwebrtc/pc/test/fake_rtc_certificate_generator.h
@@ -0,0 +1,222 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_TEST_FAKE_RTC_CERTIFICATE_GENERATOR_H_
+#define PC_TEST_FAKE_RTC_CERTIFICATE_GENERATOR_H_
+
+#include <string>
+#include <utility>
+
+#include "absl/types/optional.h"
+#include "api/peer_connection_interface.h"
+#include "api/task_queue/task_queue_base.h"
+#include "api/units/time_delta.h"
+#include "rtc_base/rtc_certificate.h"
+#include "rtc_base/rtc_certificate_generator.h"
+
+// RSA with mod size 1024, pub exp 0x10001.
+static const rtc::RTCCertificatePEM kRsaPems[] = {
+ rtc::RTCCertificatePEM(
+ "-----BEGIN RSA PRIVATE KEY-----\n"
+ "MIICdwIBADANBgkqhkiG9w0BAQEFAASCAmEwggJdAgEAAoGBAMYRkbhmI7kVA/rM\n"
+ "czsZ+6JDhDvnkF+vn6yCAGuRPV03zuRqZtDy4N4to7PZu9PjqrRl7nDMXrG3YG9y\n"
+ "rlIAZ72KjcKKFAJxQyAKLCIdawKRyp8RdK3LEySWEZb0AV58IadqPZDTNHHRX8dz\n"
+ "5aTSMsbbkZ+C/OzTnbiMqLL/vg6jAgMBAAECgYAvgOs4FJcgvp+TuREx7YtiYVsH\n"
+ "mwQPTum2z/8VzWGwR8BBHBvIpVe1MbD/Y4seyI2aco/7UaisatSgJhsU46/9Y4fq\n"
+ "2TwXH9QANf4at4d9n/R6rzwpAJOpgwZgKvdQjkfrKTtgLV+/dawvpxUYkRH4JZM1\n"
+ "CVGukMfKNrSVH4Ap4QJBAOJmGV1ASPnB4r4nc99at7JuIJmd7fmuVUwUgYi4XgaR\n"
+ "WhScBsgYwZ/JoywdyZJgnbcrTDuVcWG56B3vXbhdpMsCQQDf9zeJrjnPZ3Cqm79y\n"
+ "kdqANep0uwZciiNiWxsQrCHztywOvbFhdp8iYVFG9EK8DMY41Y5TxUwsHD+67zao\n"
+ "ZNqJAkEA1suLUP/GvL8IwuRneQd2tWDqqRQ/Td3qq03hP7e77XtF/buya3Ghclo5\n"
+ "54czUR89QyVfJEC6278nzA7n2h1uVQJAcG6mztNL6ja/dKZjYZye2CY44QjSlLo0\n"
+ "MTgTSjdfg/28fFn2Jjtqf9Pi/X+50LWI/RcYMC2no606wRk9kyOuIQJBAK6VSAim\n"
+ "1pOEjsYQn0X5KEIrz1G3bfCbB848Ime3U2/FWlCHMr6ch8kCZ5d1WUeJD3LbwMNG\n"
+ "UCXiYxSsu20QNVw=\n"
+ "-----END RSA PRIVATE KEY-----\n",
+ "-----BEGIN CERTIFICATE-----\n"
+ "MIIBmTCCAQKgAwIBAgIEbzBSAjANBgkqhkiG9w0BAQsFADARMQ8wDQYDVQQDEwZX\n"
+ "ZWJSVEMwHhcNMTQwMTAyMTgyNDQ3WhcNMTQwMjAxMTgyNDQ3WjARMQ8wDQYDVQQD\n"
+ "EwZXZWJSVEMwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBAMYRkbhmI7kVA/rM\n"
+ "czsZ+6JDhDvnkF+vn6yCAGuRPV03zuRqZtDy4N4to7PZu9PjqrRl7nDMXrG3YG9y\n"
+ "rlIAZ72KjcKKFAJxQyAKLCIdawKRyp8RdK3LEySWEZb0AV58IadqPZDTNHHRX8dz\n"
+ "5aTSMsbbkZ+C/OzTnbiMqLL/vg6jAgMBAAEwDQYJKoZIhvcNAQELBQADgYEAUflI\n"
+ "VUe5Krqf5RVa5C3u/UTAOAUJBiDS3VANTCLBxjuMsvqOG0WvaYWP3HYPgrz0jXK2\n"
+ "LJE/mGw3MyFHEqi81jh95J+ypl6xKW6Rm8jKLR87gUvCaVYn/Z4/P3AqcQTB7wOv\n"
+ "UD0A8qfhfDM+LK6rPAnCsVN0NRDY3jvd6rzix9M=\n"
+ "-----END CERTIFICATE-----\n"),
+ rtc::RTCCertificatePEM(
+ "-----BEGIN RSA PRIVATE KEY-----\n"
+ "MIICXQIBAAKBgQDeYqlyJ1wuiMsi905e3X81/WA/G3ym50PIDZBVtSwZi7JVQPgj\n"
+ "Bl8CPZMvDh9EwB4Ji9ytA8dZZbQ4WbJWPr73zPpJSCvQqz6sOXSlenBRi72acNaQ\n"
+ "sOR/qPvviJx5I6Hqo4qemfnjZhAW85a5BpgrAwKgMLIQTHCTLWwVSyrDrwIDAQAB\n"
+ "AoGARni9eY8/hv+SX+I+05EdXt6MQXNUbQ+cSykBNCfVccLzIFEWUQMT2IHqwl6X\n"
+ "ShIXcq7/n1QzOAEiuzixauM3YHg4xZ1Um2Ha9a7ig5Xg4v6b43bmMkNE6LkoAtYs\n"
+ "qnQdfMh442b1liDud6IMb1Qk0amt3fSrgRMc547TZQVx4QECQQDxUeDm94r3p4ng\n"
+ "5rCLLC1K5/6HSTZsh7jatKPlz7GfP/IZlYV7iE5784/n0wRiCjZOS7hQRy/8m2Gp\n"
+ "pf4aZq+DAkEA6+np4d36FYikydvUrupLT3FkdRHGn/v83qOll/VmeNh+L1xMZlIP\n"
+ "tM26hAXCcQb7O5+J9y3cx2CAQsBS11ZXZQJAfGgTo76WG9p5UEJdXUInD2jOZPwv\n"
+ "XIATolxh6kXKcijLLLlSmT7KB0inNYIpzkkpee+7U1d/u6B3FriGaSHq9QJBAM/J\n"
+ "ICnDdLCgwNvWVraVQC3BpwSB2pswvCFwq7py94V60XFvbw80Ogc6qIv98qvQxVlX\n"
+ "hJIEgA/PjEi+0ng94Q0CQQDm8XSDby35gmjO+6eRmJtAjtB7nguLvrPXM6CPXRmD\n"
+ "sRoBocpHw6j9UdzZ6qYG0FkdXZghezXFY58ro2BYYRR3\n"
+ "-----END RSA PRIVATE KEY-----\n",
+ "-----BEGIN CERTIFICATE-----\n"
+ "MIICWDCCAcGgAwIBAgIJALgDjxMbBOhbMA0GCSqGSIb3DQEBCwUAMEUxCzAJBgNV\n"
+ "BAYTAkFVMRMwEQYDVQQIDApTb21lLVN0YXRlMSEwHwYDVQQKDBhJbnRlcm5ldCBX\n"
+ "aWRnaXRzIFB0eSBMdGQwHhcNMTUxMTEzMjIzMjEzWhcNMTYxMTEyMjIzMjEzWjBF\n"
+ "MQswCQYDVQQGEwJBVTETMBEGA1UECAwKU29tZS1TdGF0ZTEhMB8GA1UECgwYSW50\n"
+ "ZXJuZXQgV2lkZ2l0cyBQdHkgTHRkMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKB\n"
+ "gQDeYqlyJ1wuiMsi905e3X81/WA/G3ym50PIDZBVtSwZi7JVQPgjBl8CPZMvDh9E\n"
+ "wB4Ji9ytA8dZZbQ4WbJWPr73zPpJSCvQqz6sOXSlenBRi72acNaQsOR/qPvviJx5\n"
+ "I6Hqo4qemfnjZhAW85a5BpgrAwKgMLIQTHCTLWwVSyrDrwIDAQABo1AwTjAdBgNV\n"
+ "HQ4EFgQUx2tbJdlcSTCepn09UdYORXKuSTAwHwYDVR0jBBgwFoAUx2tbJdlcSTCe\n"
+ "pn09UdYORXKuSTAwDAYDVR0TBAUwAwEB/zANBgkqhkiG9w0BAQsFAAOBgQAmp9Id\n"
+ "E716gHMqeBG4S2FCgVFCr0a0ugkaneQAN/c2L9CbMemEN9W6jvucUIVOtYd90dDW\n"
+ "lXuowWmT/JctPe3D2qt4yvYW3puECHk2tVQmrJOZiZiTRtWm6HxkmoUYHYp/DtaS\n"
+ "1Xe29gSTnZtI5sQCrGMzk3SGRSSs7ejLKiVDBQ==\n"
+ "-----END CERTIFICATE-----\n")};
+
+// ECDSA with EC_NIST_P256.
+// These PEM strings were created by generating an identity with
+// `SSLIdentity::Create` and invoking `identity->PrivateKeyToPEMString()`,
+// `identity->PublicKeyToPEMString()` and
+// `identity->certificate().ToPEMString()`.
+static const rtc::RTCCertificatePEM kEcdsaPems[] = {
+ rtc::RTCCertificatePEM(
+ "-----BEGIN PRIVATE KEY-----\n"
+ "MIGHAgEAMBMGByqGSM49AgEGCCqGSM49AwEHBG0wawIBAQQg+qaRsR5uHtqG689M\n"
+ "A3PHSJNeVpyi5wUKCft62h0UWy+hRANCAAS5Mjc85q9fVq4ln+zOPlaEC/Rzj5Pb\n"
+ "MVZtf1x/8k2KsbmyZoAMDX2yer/atEuXmItMe3yd6/DXnvboU//D3Lyt\n"
+ "-----END PRIVATE KEY-----\n",
+ "-----BEGIN CERTIFICATE-----\n"
+ "MIIBFTCBu6ADAgECAgkA30tGY5XG7oowCgYIKoZIzj0EAwIwEDEOMAwGA1UEAwwF\n"
+ "dGVzdDMwHhcNMTYwNTA5MDkxODA4WhcNMTYwNjA5MDkxODA4WjAQMQ4wDAYDVQQD\n"
+ "DAV0ZXN0MzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABLkyNzzmr19WriWf7M4+\n"
+ "VoQL9HOPk9sxVm1/XH/yTYqxubJmgAwNfbJ6v9q0S5eYi0x7fJ3r8Nee9uhT/8Pc\n"
+ "vK0wCgYIKoZIzj0EAwIDSQAwRgIhAIIc3+CqfkZ9lLwTj1PvUtt3KhnqF2kD0War\n"
+ "cCoTBbCxAiEAyp9Cn4vo2ZBhRIVDKyoxmwak8Z0PAVhJAQaWCgoY2D4=\n"
+ "-----END CERTIFICATE-----\n"),
+ rtc::RTCCertificatePEM(
+ "-----BEGIN PRIVATE KEY-----\n"
+ "MIGHAgEAMBMGByqGSM49AgEGCCqGSM49AwEHBG0wawIBAQQghL/G4JRYnuDNbQuh\n"
+ "LqkytcE39Alsq6FItDVFgOesfCmhRANCAATd53FjPLyVUcwYguEPbSJM03fP6Rx5\n"
+ "GY1dEZ00+ZykjJI83VfDAyvmpRuGahNtBH0hc+7xkDCbeo6TM0tN35xr\n"
+ "-----END PRIVATE KEY-----\n",
+ "-----BEGIN CERTIFICATE-----\n"
+ "MIIBFDCBu6ADAgECAgkArZYdXMyJ5rswCgYIKoZIzj0EAwIwEDEOMAwGA1UEAwwF\n"
+ "dGVzdDQwHhcNMTYwNTA5MDkxODA4WhcNMTYwNjA5MDkxODA4WjAQMQ4wDAYDVQQD\n"
+ "DAV0ZXN0NDBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABN3ncWM8vJVRzBiC4Q9t\n"
+ "IkzTd8/pHHkZjV0RnTT5nKSMkjzdV8MDK+alG4ZqE20EfSFz7vGQMJt6jpMzS03f\n"
+ "nGswCgYIKoZIzj0EAwIDSAAwRQIgb/LBc8OtsC5lEDyjCP6M9xt5mwzUNrQBOFWZ\n"
+ "1fE/g68CIQD7uoFfbiq6dTp8ZwzbwQ8jJf08KjriamqA9OW/4268Dw==\n"
+ "-----END CERTIFICATE-----\n")};
+
+class FakeRTCCertificateGenerator
+ : public rtc::RTCCertificateGeneratorInterface {
+ public:
+ FakeRTCCertificateGenerator() : should_fail_(false), should_wait_(false) {}
+
+ void set_should_fail(bool should_fail) { should_fail_ = should_fail; }
+
+ // If set to true, stalls the generation of the fake certificate until it is
+ // set to false.
+ void set_should_wait(bool should_wait) { should_wait_ = should_wait; }
+
+ void use_original_key() { key_index_ = 0; }
+ void use_alternate_key() { key_index_ = 1; }
+
+ int generated_certificates() { return generated_certificates_; }
+ int generated_failures() { return generated_failures_; }
+
+ void GenerateCertificateAsync(const rtc::KeyParams& key_params,
+ const absl::optional<uint64_t>& expires_ms,
+ Callback callback) override {
+ // The certificates are created from constant PEM strings and use its coded
+ // expiration time, we do not support modifying it.
+ RTC_DCHECK(!expires_ms);
+
+ // Only supports RSA-1024-0x10001 and ECDSA-P256.
+ if (key_params.type() == rtc::KT_RSA) {
+ RTC_DCHECK_EQ(key_params.rsa_params().mod_size, 1024);
+ RTC_DCHECK_EQ(key_params.rsa_params().pub_exp, 0x10001);
+ } else {
+ RTC_DCHECK_EQ(key_params.type(), rtc::KT_ECDSA);
+ RTC_DCHECK_EQ(key_params.ec_curve(), rtc::EC_NIST_P256);
+ }
+ rtc::KeyType key_type = key_params.type();
+ webrtc::TaskQueueBase::Current()->PostTask(
+ [this, key_type, callback = std::move(callback)]() mutable {
+ GenerateCertificate(key_type, std::move(callback));
+ });
+ }
+
+ static rtc::scoped_refptr<rtc::RTCCertificate> GenerateCertificate() {
+ switch (rtc::KT_DEFAULT) {
+ case rtc::KT_RSA:
+ return rtc::RTCCertificate::FromPEM(kRsaPems[0]);
+ case rtc::KT_ECDSA:
+ return rtc::RTCCertificate::FromPEM(kEcdsaPems[0]);
+ default:
+ RTC_DCHECK_NOTREACHED();
+ return nullptr;
+ }
+ }
+
+ private:
+ const rtc::RTCCertificatePEM& get_pem(const rtc::KeyType& key_type) const {
+ switch (key_type) {
+ case rtc::KT_RSA:
+ return kRsaPems[key_index_];
+ case rtc::KT_ECDSA:
+ return kEcdsaPems[key_index_];
+ default:
+ RTC_DCHECK_NOTREACHED();
+ return kEcdsaPems[key_index_];
+ }
+ }
+ const std::string& get_key(const rtc::KeyType& key_type) const {
+ return get_pem(key_type).private_key();
+ }
+ const std::string& get_cert(const rtc::KeyType& key_type) const {
+ return get_pem(key_type).certificate();
+ }
+
+ void GenerateCertificate(rtc::KeyType key_type, Callback callback) {
+ // If the certificate generation should be stalled, re-post this same
+ // message to the queue with a small delay so as to wait in a loop until
+ // set_should_wait(false) is called.
+ if (should_wait_) {
+ webrtc::TaskQueueBase::Current()->PostDelayedTask(
+ [this, key_type, callback = std::move(callback)]() mutable {
+ GenerateCertificate(key_type, std::move(callback));
+ },
+ webrtc::TimeDelta::Millis(1));
+ return;
+ }
+ if (should_fail_) {
+ ++generated_failures_;
+ std::move(callback)(nullptr);
+ } else {
+ rtc::scoped_refptr<rtc::RTCCertificate> certificate =
+ rtc::RTCCertificate::FromPEM(get_pem(key_type));
+ RTC_DCHECK(certificate);
+ ++generated_certificates_;
+ std::move(callback)(std::move(certificate));
+ }
+ }
+
+ bool should_fail_;
+ bool should_wait_;
+ int key_index_ = 0;
+ int generated_certificates_ = 0;
+ int generated_failures_ = 0;
+};
+
+#endif // PC_TEST_FAKE_RTC_CERTIFICATE_GENERATOR_H_
diff --git a/third_party/libwebrtc/pc/test/fake_video_track_renderer.h b/third_party/libwebrtc/pc/test/fake_video_track_renderer.h
new file mode 100644
index 0000000000..f6e341b4b2
--- /dev/null
+++ b/third_party/libwebrtc/pc/test/fake_video_track_renderer.h
@@ -0,0 +1,33 @@
+/*
+ * Copyright 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_TEST_FAKE_VIDEO_TRACK_RENDERER_H_
+#define PC_TEST_FAKE_VIDEO_TRACK_RENDERER_H_
+
+#include "api/media_stream_interface.h"
+#include "media/base/fake_video_renderer.h"
+
+namespace webrtc {
+
+class FakeVideoTrackRenderer : public cricket::FakeVideoRenderer {
+ public:
+ explicit FakeVideoTrackRenderer(VideoTrackInterface* video_track)
+ : video_track_(video_track) {
+ video_track_->AddOrUpdateSink(this, rtc::VideoSinkWants());
+ }
+ ~FakeVideoTrackRenderer() { video_track_->RemoveSink(this); }
+
+ private:
+ rtc::scoped_refptr<VideoTrackInterface> video_track_;
+};
+
+} // namespace webrtc
+
+#endif // PC_TEST_FAKE_VIDEO_TRACK_RENDERER_H_
diff --git a/third_party/libwebrtc/pc/test/fake_video_track_source.h b/third_party/libwebrtc/pc/test/fake_video_track_source.h
new file mode 100644
index 0000000000..2042c39175
--- /dev/null
+++ b/third_party/libwebrtc/pc/test/fake_video_track_source.h
@@ -0,0 +1,54 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_TEST_FAKE_VIDEO_TRACK_SOURCE_H_
+#define PC_TEST_FAKE_VIDEO_TRACK_SOURCE_H_
+
+#include "api/media_stream_interface.h"
+#include "media/base/video_broadcaster.h"
+#include "pc/video_track_source.h"
+
+namespace webrtc {
+
+// A minimal implementation of VideoTrackSource. Includes a VideoBroadcaster for
+// injection of frames.
+class FakeVideoTrackSource : public VideoTrackSource {
+ public:
+ static rtc::scoped_refptr<FakeVideoTrackSource> Create(bool is_screencast) {
+ return rtc::make_ref_counted<FakeVideoTrackSource>(is_screencast);
+ }
+
+ static rtc::scoped_refptr<FakeVideoTrackSource> Create() {
+ return Create(false);
+ }
+
+ bool is_screencast() const override { return is_screencast_; }
+
+ void InjectFrame(const VideoFrame& frame) {
+ video_broadcaster_.OnFrame(frame);
+ }
+
+ protected:
+ explicit FakeVideoTrackSource(bool is_screencast)
+ : VideoTrackSource(false /* remote */), is_screencast_(is_screencast) {}
+ ~FakeVideoTrackSource() override = default;
+
+ rtc::VideoSourceInterface<VideoFrame>* source() override {
+ return &video_broadcaster_;
+ }
+
+ private:
+ const bool is_screencast_;
+ rtc::VideoBroadcaster video_broadcaster_;
+};
+
+} // namespace webrtc
+
+#endif // PC_TEST_FAKE_VIDEO_TRACK_SOURCE_H_
diff --git a/third_party/libwebrtc/pc/test/frame_generator_capturer_video_track_source.h b/third_party/libwebrtc/pc/test/frame_generator_capturer_video_track_source.h
new file mode 100644
index 0000000000..50a3d26c2e
--- /dev/null
+++ b/third_party/libwebrtc/pc/test/frame_generator_capturer_video_track_source.h
@@ -0,0 +1,86 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_TEST_FRAME_GENERATOR_CAPTURER_VIDEO_TRACK_SOURCE_H_
+#define PC_TEST_FRAME_GENERATOR_CAPTURER_VIDEO_TRACK_SOURCE_H_
+
+#include <memory>
+#include <utility>
+
+#include "api/task_queue/default_task_queue_factory.h"
+#include "api/task_queue/task_queue_factory.h"
+#include "api/test/create_frame_generator.h"
+#include "pc/video_track_source.h"
+#include "test/frame_generator_capturer.h"
+
+namespace webrtc {
+
+// Implements a VideoTrackSourceInterface to be used for creating VideoTracks.
+// The video source is generated using a FrameGeneratorCapturer, specifically
+// a SquareGenerator that generates frames with randomly sized and colored
+// squares.
+class FrameGeneratorCapturerVideoTrackSource : public VideoTrackSource {
+ public:
+ static const int kDefaultFramesPerSecond = 30;
+ static const int kDefaultWidth = 640;
+ static const int kDefaultHeight = 480;
+ static const int kNumSquaresGenerated = 50;
+
+ struct Config {
+ int frames_per_second = kDefaultFramesPerSecond;
+ int width = kDefaultWidth;
+ int height = kDefaultHeight;
+ int num_squares_generated = 50;
+ };
+
+ FrameGeneratorCapturerVideoTrackSource(Config config,
+ Clock* clock,
+ bool is_screencast)
+ : VideoTrackSource(false /* remote */),
+ task_queue_factory_(CreateDefaultTaskQueueFactory()),
+ is_screencast_(is_screencast) {
+ video_capturer_ = std::make_unique<test::FrameGeneratorCapturer>(
+ clock,
+ test::CreateSquareFrameGenerator(config.width, config.height,
+ absl::nullopt,
+ config.num_squares_generated),
+ config.frames_per_second, *task_queue_factory_);
+ video_capturer_->Init();
+ }
+
+ FrameGeneratorCapturerVideoTrackSource(
+ std::unique_ptr<test::FrameGeneratorCapturer> video_capturer,
+ bool is_screencast)
+ : VideoTrackSource(false /* remote */),
+ video_capturer_(std::move(video_capturer)),
+ is_screencast_(is_screencast) {}
+
+ ~FrameGeneratorCapturerVideoTrackSource() = default;
+
+ void Start() { SetState(kLive); }
+
+ void Stop() { SetState(kMuted); }
+
+ bool is_screencast() const override { return is_screencast_; }
+
+ protected:
+ rtc::VideoSourceInterface<VideoFrame>* source() override {
+ return video_capturer_.get();
+ }
+
+ private:
+ const std::unique_ptr<TaskQueueFactory> task_queue_factory_;
+ std::unique_ptr<test::FrameGeneratorCapturer> video_capturer_;
+ const bool is_screencast_;
+};
+
+} // namespace webrtc
+
+#endif // PC_TEST_FRAME_GENERATOR_CAPTURER_VIDEO_TRACK_SOURCE_H_
diff --git a/third_party/libwebrtc/pc/test/integration_test_helpers.cc b/third_party/libwebrtc/pc/test/integration_test_helpers.cc
new file mode 100644
index 0000000000..471271f068
--- /dev/null
+++ b/third_party/libwebrtc/pc/test/integration_test_helpers.cc
@@ -0,0 +1,92 @@
+/*
+ * Copyright 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/test/integration_test_helpers.h"
+
+namespace webrtc {
+
+PeerConnectionInterface::RTCOfferAnswerOptions IceRestartOfferAnswerOptions() {
+ PeerConnectionInterface::RTCOfferAnswerOptions options;
+ options.ice_restart = true;
+ return options;
+}
+
+void RemoveSsrcsAndMsids(cricket::SessionDescription* desc) {
+ for (ContentInfo& content : desc->contents()) {
+ content.media_description()->mutable_streams().clear();
+ }
+ desc->set_msid_supported(false);
+ desc->set_msid_signaling(0);
+}
+
+void RemoveSsrcsAndKeepMsids(cricket::SessionDescription* desc) {
+ for (ContentInfo& content : desc->contents()) {
+ std::string track_id;
+ std::vector<std::string> stream_ids;
+ if (!content.media_description()->streams().empty()) {
+ const StreamParams& first_stream =
+ content.media_description()->streams()[0];
+ track_id = first_stream.id;
+ stream_ids = first_stream.stream_ids();
+ }
+ content.media_description()->mutable_streams().clear();
+ StreamParams new_stream;
+ new_stream.id = track_id;
+ new_stream.set_stream_ids(stream_ids);
+ content.media_description()->AddStream(new_stream);
+ }
+}
+
+int FindFirstMediaStatsIndexByKind(
+ const std::string& kind,
+ const std::vector<const webrtc::RTCInboundRTPStreamStats*>& inbound_rtps) {
+ for (size_t i = 0; i < inbound_rtps.size(); i++) {
+ if (*inbound_rtps[i]->kind == kind) {
+ return i;
+ }
+ }
+ return -1;
+}
+
+TaskQueueMetronome::TaskQueueMetronome(TimeDelta tick_period)
+ : tick_period_(tick_period) {
+ sequence_checker_.Detach();
+}
+
+TaskQueueMetronome::~TaskQueueMetronome() {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+}
+void TaskQueueMetronome::RequestCallOnNextTick(
+ absl::AnyInvocable<void() &&> callback) {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ callbacks_.push_back(std::move(callback));
+ // Only schedule a tick callback for the first `callback` addition.
+ // Schedule on the current task queue to comply with RequestCallOnNextTick
+ // requirements.
+ if (callbacks_.size() == 1) {
+ TaskQueueBase::Current()->PostDelayedTask(
+ SafeTask(safety_.flag(),
+ [this] {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ std::vector<absl::AnyInvocable<void() &&>> callbacks;
+ callbacks_.swap(callbacks);
+ for (auto& callback : callbacks)
+ std::move(callback)();
+ }),
+ tick_period_);
+ }
+}
+
+TimeDelta TaskQueueMetronome::TickPeriod() const {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ return tick_period_;
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/test/integration_test_helpers.h b/third_party/libwebrtc/pc/test/integration_test_helpers.h
new file mode 100644
index 0000000000..4e01b333b9
--- /dev/null
+++ b/third_party/libwebrtc/pc/test/integration_test_helpers.h
@@ -0,0 +1,1929 @@
+/*
+ * Copyright 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_TEST_INTEGRATION_TEST_HELPERS_H_
+#define PC_TEST_INTEGRATION_TEST_HELPERS_H_
+
+#include <limits.h>
+#include <stdint.h>
+#include <stdio.h>
+
+#include <algorithm>
+#include <functional>
+#include <limits>
+#include <list>
+#include <map>
+#include <memory>
+#include <set>
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "absl/algorithm/container.h"
+#include "absl/memory/memory.h"
+#include "absl/strings/string_view.h"
+#include "absl/types/optional.h"
+#include "api/audio_options.h"
+#include "api/call/call_factory_interface.h"
+#include "api/candidate.h"
+#include "api/crypto/crypto_options.h"
+#include "api/data_channel_interface.h"
+#include "api/field_trials_view.h"
+#include "api/ice_transport_interface.h"
+#include "api/jsep.h"
+#include "api/media_stream_interface.h"
+#include "api/media_types.h"
+#include "api/peer_connection_interface.h"
+#include "api/rtc_error.h"
+#include "api/rtc_event_log/rtc_event_log_factory.h"
+#include "api/rtc_event_log/rtc_event_log_factory_interface.h"
+#include "api/rtc_event_log_output.h"
+#include "api/rtp_receiver_interface.h"
+#include "api/rtp_sender_interface.h"
+#include "api/rtp_transceiver_interface.h"
+#include "api/scoped_refptr.h"
+#include "api/stats/rtc_stats.h"
+#include "api/stats/rtc_stats_report.h"
+#include "api/stats/rtcstats_objects.h"
+#include "api/task_queue/default_task_queue_factory.h"
+#include "api/task_queue/pending_task_safety_flag.h"
+#include "api/task_queue/task_queue_factory.h"
+#include "api/transport/field_trial_based_config.h"
+#include "api/uma_metrics.h"
+#include "api/units/time_delta.h"
+#include "api/video/video_rotation.h"
+#include "api/video_codecs/sdp_video_format.h"
+#include "api/video_codecs/video_decoder_factory.h"
+#include "api/video_codecs/video_encoder_factory.h"
+#include "call/call.h"
+#include "logging/rtc_event_log/fake_rtc_event_log_factory.h"
+#include "media/base/media_engine.h"
+#include "media/base/stream_params.h"
+#include "media/engine/fake_webrtc_video_engine.h"
+#include "media/engine/webrtc_media_engine.h"
+#include "media/engine/webrtc_media_engine_defaults.h"
+#include "modules/audio_device/include/audio_device.h"
+#include "modules/audio_processing/include/audio_processing.h"
+#include "modules/audio_processing/test/audio_processing_builder_for_testing.h"
+#include "p2p/base/fake_ice_transport.h"
+#include "p2p/base/ice_transport_internal.h"
+#include "p2p/base/mock_async_resolver.h"
+#include "p2p/base/p2p_constants.h"
+#include "p2p/base/port.h"
+#include "p2p/base/port_allocator.h"
+#include "p2p/base/port_interface.h"
+#include "p2p/base/test_stun_server.h"
+#include "p2p/base/test_turn_customizer.h"
+#include "p2p/base/test_turn_server.h"
+#include "p2p/client/basic_port_allocator.h"
+#include "pc/dtmf_sender.h"
+#include "pc/local_audio_source.h"
+#include "pc/media_session.h"
+#include "pc/peer_connection.h"
+#include "pc/peer_connection_factory.h"
+#include "pc/peer_connection_proxy.h"
+#include "pc/rtp_media_utils.h"
+#include "pc/session_description.h"
+#include "pc/test/fake_audio_capture_module.h"
+#include "pc/test/fake_periodic_video_source.h"
+#include "pc/test/fake_periodic_video_track_source.h"
+#include "pc/test/fake_rtc_certificate_generator.h"
+#include "pc/test/fake_video_track_renderer.h"
+#include "pc/test/mock_peer_connection_observers.h"
+#include "pc/video_track_source.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/event.h"
+#include "rtc_base/fake_clock.h"
+#include "rtc_base/fake_mdns_responder.h"
+#include "rtc_base/fake_network.h"
+#include "rtc_base/firewall_socket_server.h"
+#include "rtc_base/gunit.h"
+#include "rtc_base/helpers.h"
+#include "rtc_base/ip_address.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/mdns_responder_interface.h"
+#include "rtc_base/numerics/safe_conversions.h"
+#include "rtc_base/rtc_certificate_generator.h"
+#include "rtc_base/socket_address.h"
+#include "rtc_base/ssl_stream_adapter.h"
+#include "rtc_base/task_queue_for_test.h"
+#include "rtc_base/task_utils/repeating_task.h"
+#include "rtc_base/test_certificate_verifier.h"
+#include "rtc_base/thread.h"
+#include "rtc_base/thread_annotations.h"
+#include "rtc_base/time_utils.h"
+#include "rtc_base/virtual_socket_server.h"
+#include "system_wrappers/include/metrics.h"
+#include "test/gmock.h"
+#include "test/scoped_key_value_config.h"
+
+namespace webrtc {
+
+using ::cricket::ContentInfo;
+using ::cricket::StreamParams;
+using ::rtc::SocketAddress;
+using ::testing::_;
+using ::testing::Combine;
+using ::testing::Contains;
+using ::testing::DoAll;
+using ::testing::ElementsAre;
+using ::testing::NiceMock;
+using ::testing::Return;
+using ::testing::SetArgPointee;
+using ::testing::UnorderedElementsAreArray;
+using ::testing::Values;
+using RTCConfiguration = PeerConnectionInterface::RTCConfiguration;
+
+static const int kDefaultTimeout = 10000;
+static const int kMaxWaitForStatsMs = 3000;
+static const int kMaxWaitForActivationMs = 5000;
+static const int kMaxWaitForFramesMs = 10000;
+// Default number of audio/video frames to wait for before considering a test
+// successful.
+static const int kDefaultExpectedAudioFrameCount = 3;
+static const int kDefaultExpectedVideoFrameCount = 3;
+
+static const char kDataChannelLabel[] = "data_channel";
+
+// SRTP cipher name negotiated by the tests. This must be updated if the
+// default changes.
+static const int kDefaultSrtpCryptoSuite = rtc::kSrtpAes128CmSha1_80;
+static const int kDefaultSrtpCryptoSuiteGcm = rtc::kSrtpAeadAes256Gcm;
+
+static const SocketAddress kDefaultLocalAddress("192.168.1.1", 0);
+
+// Helper function for constructing offer/answer options to initiate an ICE
+// restart.
+PeerConnectionInterface::RTCOfferAnswerOptions IceRestartOfferAnswerOptions();
+
+// Remove all stream information (SSRCs, track IDs, etc.) and "msid-semantic"
+// attribute from received SDP, simulating a legacy endpoint.
+void RemoveSsrcsAndMsids(cricket::SessionDescription* desc);
+
+// Removes all stream information besides the stream ids, simulating an
+// endpoint that only signals a=msid lines to convey stream_ids.
+void RemoveSsrcsAndKeepMsids(cricket::SessionDescription* desc);
+
+int FindFirstMediaStatsIndexByKind(
+ const std::string& kind,
+ const std::vector<const webrtc::RTCInboundRTPStreamStats*>& inbound_rtps);
+
+class TaskQueueMetronome : public webrtc::Metronome {
+ public:
+ explicit TaskQueueMetronome(TimeDelta tick_period);
+ ~TaskQueueMetronome() override;
+
+ // webrtc::Metronome implementation.
+ void RequestCallOnNextTick(absl::AnyInvocable<void() &&> callback) override;
+ TimeDelta TickPeriod() const override;
+
+ private:
+ const TimeDelta tick_period_;
+ SequenceChecker sequence_checker_;
+ std::vector<absl::AnyInvocable<void() &&>> callbacks_;
+ ScopedTaskSafetyDetached safety_;
+};
+
+class SignalingMessageReceiver {
+ public:
+ virtual void ReceiveSdpMessage(SdpType type, const std::string& msg) = 0;
+ virtual void ReceiveIceMessage(const std::string& sdp_mid,
+ int sdp_mline_index,
+ const std::string& msg) = 0;
+
+ protected:
+ SignalingMessageReceiver() {}
+ virtual ~SignalingMessageReceiver() {}
+};
+
+class MockRtpReceiverObserver : public webrtc::RtpReceiverObserverInterface {
+ public:
+ explicit MockRtpReceiverObserver(cricket::MediaType media_type)
+ : expected_media_type_(media_type) {}
+
+ void OnFirstPacketReceived(cricket::MediaType media_type) override {
+ ASSERT_EQ(expected_media_type_, media_type);
+ first_packet_received_ = true;
+ }
+
+ bool first_packet_received() const { return first_packet_received_; }
+
+ virtual ~MockRtpReceiverObserver() {}
+
+ private:
+ bool first_packet_received_ = false;
+ cricket::MediaType expected_media_type_;
+};
+
+// Helper class that wraps a peer connection, observes it, and can accept
+// signaling messages from another wrapper.
+//
+// Uses a fake network, fake A/V capture, and optionally fake
+// encoders/decoders, though they aren't used by default since they don't
+// advertise support of any codecs.
+// TODO(steveanton): See how this could become a subclass of
+// PeerConnectionWrapper defined in peerconnectionwrapper.h.
+class PeerConnectionIntegrationWrapper : public webrtc::PeerConnectionObserver,
+ public SignalingMessageReceiver {
+ public:
+ webrtc::PeerConnectionFactoryInterface* pc_factory() const {
+ return peer_connection_factory_.get();
+ }
+
+ webrtc::PeerConnectionInterface* pc() const { return peer_connection_.get(); }
+
+ // If a signaling message receiver is set (via ConnectFakeSignaling), this
+ // will set the whole offer/answer exchange in motion. Just need to wait for
+ // the signaling state to reach "stable".
+ void CreateAndSetAndSignalOffer() {
+ auto offer = CreateOfferAndWait();
+ ASSERT_NE(nullptr, offer);
+ EXPECT_TRUE(SetLocalDescriptionAndSendSdpMessage(std::move(offer)));
+ }
+
+ // Sets the options to be used when CreateAndSetAndSignalOffer is called, or
+ // when a remote offer is received (via fake signaling) and an answer is
+ // generated. By default, uses default options.
+ void SetOfferAnswerOptions(
+ const PeerConnectionInterface::RTCOfferAnswerOptions& options) {
+ offer_answer_options_ = options;
+ }
+
+ // Set a callback to be invoked when SDP is received via the fake signaling
+ // channel, which provides an opportunity to munge (modify) the SDP. This is
+ // used to test SDP being applied that a PeerConnection would normally not
+ // generate, but a non-JSEP endpoint might.
+ void SetReceivedSdpMunger(
+ std::function<void(cricket::SessionDescription*)> munger) {
+ received_sdp_munger_ = std::move(munger);
+ }
+
+ // Similar to the above, but this is run on SDP immediately after it's
+ // generated.
+ void SetGeneratedSdpMunger(
+ std::function<void(cricket::SessionDescription*)> munger) {
+ generated_sdp_munger_ = std::move(munger);
+ }
+
+ // Set a callback to be invoked when a remote offer is received via the fake
+ // signaling channel. This provides an opportunity to change the
+ // PeerConnection state before an answer is created and sent to the caller.
+ void SetRemoteOfferHandler(std::function<void()> handler) {
+ remote_offer_handler_ = std::move(handler);
+ }
+
+ void SetRemoteAsyncResolver(rtc::MockAsyncResolver* resolver) {
+ remote_async_resolver_ = resolver;
+ }
+
+ // Every ICE connection state in order that has been seen by the observer.
+ std::vector<PeerConnectionInterface::IceConnectionState>
+ ice_connection_state_history() const {
+ return ice_connection_state_history_;
+ }
+ void clear_ice_connection_state_history() {
+ ice_connection_state_history_.clear();
+ }
+
+ // Every standardized ICE connection state in order that has been seen by the
+ // observer.
+ std::vector<PeerConnectionInterface::IceConnectionState>
+ standardized_ice_connection_state_history() const {
+ return standardized_ice_connection_state_history_;
+ }
+
+ // Every PeerConnection state in order that has been seen by the observer.
+ std::vector<PeerConnectionInterface::PeerConnectionState>
+ peer_connection_state_history() const {
+ return peer_connection_state_history_;
+ }
+
+ // Every ICE gathering state in order that has been seen by the observer.
+ std::vector<PeerConnectionInterface::IceGatheringState>
+ ice_gathering_state_history() const {
+ return ice_gathering_state_history_;
+ }
+ std::vector<cricket::CandidatePairChangeEvent>
+ ice_candidate_pair_change_history() const {
+ return ice_candidate_pair_change_history_;
+ }
+
+ // Every PeerConnection signaling state in order that has been seen by the
+ // observer.
+ std::vector<PeerConnectionInterface::SignalingState>
+ peer_connection_signaling_state_history() const {
+ return peer_connection_signaling_state_history_;
+ }
+
+ void AddAudioVideoTracks() {
+ AddAudioTrack();
+ AddVideoTrack();
+ }
+
+ rtc::scoped_refptr<RtpSenderInterface> AddAudioTrack() {
+ return AddTrack(CreateLocalAudioTrack());
+ }
+
+ rtc::scoped_refptr<RtpSenderInterface> AddVideoTrack() {
+ return AddTrack(CreateLocalVideoTrack());
+ }
+
+ rtc::scoped_refptr<webrtc::AudioTrackInterface> CreateLocalAudioTrack() {
+ cricket::AudioOptions options;
+ // Disable highpass filter so that we can get all the test audio frames.
+ options.highpass_filter = false;
+ rtc::scoped_refptr<webrtc::AudioSourceInterface> source =
+ peer_connection_factory_->CreateAudioSource(options);
+ // TODO(perkj): Test audio source when it is implemented. Currently audio
+ // always use the default input.
+ return peer_connection_factory_->CreateAudioTrack(rtc::CreateRandomUuid(),
+ source.get());
+ }
+
+ rtc::scoped_refptr<webrtc::VideoTrackInterface> CreateLocalVideoTrack() {
+ webrtc::FakePeriodicVideoSource::Config config;
+ config.timestamp_offset_ms = rtc::TimeMillis();
+ return CreateLocalVideoTrackInternal(config);
+ }
+
+ rtc::scoped_refptr<webrtc::VideoTrackInterface>
+ CreateLocalVideoTrackWithConfig(
+ webrtc::FakePeriodicVideoSource::Config config) {
+ return CreateLocalVideoTrackInternal(config);
+ }
+
+ rtc::scoped_refptr<webrtc::VideoTrackInterface>
+ CreateLocalVideoTrackWithRotation(webrtc::VideoRotation rotation) {
+ webrtc::FakePeriodicVideoSource::Config config;
+ config.rotation = rotation;
+ config.timestamp_offset_ms = rtc::TimeMillis();
+ return CreateLocalVideoTrackInternal(config);
+ }
+
+ rtc::scoped_refptr<RtpSenderInterface> AddTrack(
+ rtc::scoped_refptr<MediaStreamTrackInterface> track,
+ const std::vector<std::string>& stream_ids = {}) {
+ EXPECT_TRUE(track);
+ if (!track) {
+ return nullptr;
+ }
+ auto result = pc()->AddTrack(track, stream_ids);
+ EXPECT_EQ(RTCErrorType::NONE, result.error().type());
+ if (result.ok()) {
+ return result.MoveValue();
+ } else {
+ return nullptr;
+ }
+ }
+
+ std::vector<rtc::scoped_refptr<RtpReceiverInterface>> GetReceiversOfType(
+ cricket::MediaType media_type) {
+ std::vector<rtc::scoped_refptr<RtpReceiverInterface>> receivers;
+ for (const auto& receiver : pc()->GetReceivers()) {
+ if (receiver->media_type() == media_type) {
+ receivers.push_back(receiver);
+ }
+ }
+ return receivers;
+ }
+
+ rtc::scoped_refptr<RtpTransceiverInterface> GetFirstTransceiverOfType(
+ cricket::MediaType media_type) {
+ for (auto transceiver : pc()->GetTransceivers()) {
+ if (transceiver->receiver()->media_type() == media_type) {
+ return transceiver;
+ }
+ }
+ return nullptr;
+ }
+
+ bool SignalingStateStable() {
+ return pc()->signaling_state() == webrtc::PeerConnectionInterface::kStable;
+ }
+
+ bool IceGatheringStateComplete() {
+ return pc()->ice_gathering_state() ==
+ webrtc::PeerConnectionInterface::kIceGatheringComplete;
+ }
+
+ void CreateDataChannel() { CreateDataChannel(nullptr); }
+
+ void CreateDataChannel(const webrtc::DataChannelInit* init) {
+ CreateDataChannel(kDataChannelLabel, init);
+ }
+
+ void CreateDataChannel(const std::string& label,
+ const webrtc::DataChannelInit* init) {
+ auto data_channel_or_error = pc()->CreateDataChannelOrError(label, init);
+ ASSERT_TRUE(data_channel_or_error.ok());
+ data_channels_.push_back(data_channel_or_error.MoveValue());
+ ASSERT_TRUE(data_channels_.back().get() != nullptr);
+ data_observers_.push_back(
+ std::make_unique<MockDataChannelObserver>(data_channels_.back().get()));
+ }
+
+ // Return the last observed data channel.
+ DataChannelInterface* data_channel() {
+ if (data_channels_.size() == 0) {
+ return nullptr;
+ }
+ return data_channels_.back().get();
+ }
+ // Return all data channels.
+ std::vector<rtc::scoped_refptr<DataChannelInterface>>& data_channels() {
+ return data_channels_;
+ }
+
+ const MockDataChannelObserver* data_observer() const {
+ if (data_observers_.size() == 0) {
+ return nullptr;
+ }
+ return data_observers_.back().get();
+ }
+
+ std::vector<std::unique_ptr<MockDataChannelObserver>>& data_observers() {
+ return data_observers_;
+ }
+
+ int audio_frames_received() const {
+ return fake_audio_capture_module_->frames_received();
+ }
+
+ // Takes minimum of video frames received for each track.
+ //
+ // Can be used like:
+ // EXPECT_GE(expected_frames, min_video_frames_received_per_track());
+ //
+ // To ensure that all video tracks received at least a certain number of
+ // frames.
+ int min_video_frames_received_per_track() const {
+ int min_frames = INT_MAX;
+ if (fake_video_renderers_.empty()) {
+ return 0;
+ }
+
+ for (const auto& pair : fake_video_renderers_) {
+ min_frames = std::min(min_frames, pair.second->num_rendered_frames());
+ }
+ return min_frames;
+ }
+
+ // Returns a MockStatsObserver in a state after stats gathering finished,
+ // which can be used to access the gathered stats.
+ rtc::scoped_refptr<MockStatsObserver> OldGetStatsForTrack(
+ webrtc::MediaStreamTrackInterface* track) {
+ auto observer = rtc::make_ref_counted<MockStatsObserver>();
+ EXPECT_TRUE(peer_connection_->GetStats(
+ observer.get(), nullptr,
+ PeerConnectionInterface::kStatsOutputLevelStandard));
+ EXPECT_TRUE_WAIT(observer->called(), kDefaultTimeout);
+ return observer;
+ }
+
+ // Version that doesn't take a track "filter", and gathers all stats.
+ rtc::scoped_refptr<MockStatsObserver> OldGetStats() {
+ return OldGetStatsForTrack(nullptr);
+ }
+
+ // Synchronously gets stats and returns them. If it times out, fails the test
+ // and returns null.
+ rtc::scoped_refptr<const webrtc::RTCStatsReport> NewGetStats() {
+ auto callback =
+ rtc::make_ref_counted<webrtc::MockRTCStatsCollectorCallback>();
+ peer_connection_->GetStats(callback.get());
+ EXPECT_TRUE_WAIT(callback->called(), kDefaultTimeout);
+ return callback->report();
+ }
+
+ int rendered_width() {
+ EXPECT_FALSE(fake_video_renderers_.empty());
+ return fake_video_renderers_.empty()
+ ? 0
+ : fake_video_renderers_.begin()->second->width();
+ }
+
+ int rendered_height() {
+ EXPECT_FALSE(fake_video_renderers_.empty());
+ return fake_video_renderers_.empty()
+ ? 0
+ : fake_video_renderers_.begin()->second->height();
+ }
+
+ double rendered_aspect_ratio() {
+ if (rendered_height() == 0) {
+ return 0.0;
+ }
+ return static_cast<double>(rendered_width()) / rendered_height();
+ }
+
+ webrtc::VideoRotation rendered_rotation() {
+ EXPECT_FALSE(fake_video_renderers_.empty());
+ return fake_video_renderers_.empty()
+ ? webrtc::kVideoRotation_0
+ : fake_video_renderers_.begin()->second->rotation();
+ }
+
+ int local_rendered_width() {
+ return local_video_renderer_ ? local_video_renderer_->width() : 0;
+ }
+
+ int local_rendered_height() {
+ return local_video_renderer_ ? local_video_renderer_->height() : 0;
+ }
+
+ double local_rendered_aspect_ratio() {
+ if (local_rendered_height() == 0) {
+ return 0.0;
+ }
+ return static_cast<double>(local_rendered_width()) /
+ local_rendered_height();
+ }
+
+ size_t number_of_remote_streams() {
+ if (!pc()) {
+ return 0;
+ }
+ return pc()->remote_streams()->count();
+ }
+
+ StreamCollectionInterface* remote_streams() const {
+ if (!pc()) {
+ ADD_FAILURE();
+ return nullptr;
+ }
+ return pc()->remote_streams().get();
+ }
+
+ StreamCollectionInterface* local_streams() {
+ if (!pc()) {
+ ADD_FAILURE();
+ return nullptr;
+ }
+ return pc()->local_streams().get();
+ }
+
+ webrtc::PeerConnectionInterface::SignalingState signaling_state() {
+ return pc()->signaling_state();
+ }
+
+ webrtc::PeerConnectionInterface::IceConnectionState ice_connection_state() {
+ return pc()->ice_connection_state();
+ }
+
+ webrtc::PeerConnectionInterface::IceConnectionState
+ standardized_ice_connection_state() {
+ return pc()->standardized_ice_connection_state();
+ }
+
+ webrtc::PeerConnectionInterface::IceGatheringState ice_gathering_state() {
+ return pc()->ice_gathering_state();
+ }
+
+ // Returns a MockRtpReceiverObserver for each RtpReceiver returned by
+ // GetReceivers. They're updated automatically when a remote offer/answer
+ // from the fake signaling channel is applied, or when
+ // ResetRtpReceiverObservers below is called.
+ const std::vector<std::unique_ptr<MockRtpReceiverObserver>>&
+ rtp_receiver_observers() {
+ return rtp_receiver_observers_;
+ }
+
+ void ResetRtpReceiverObservers() {
+ rtp_receiver_observers_.clear();
+ for (const rtc::scoped_refptr<RtpReceiverInterface>& receiver :
+ pc()->GetReceivers()) {
+ std::unique_ptr<MockRtpReceiverObserver> observer(
+ new MockRtpReceiverObserver(receiver->media_type()));
+ receiver->SetObserver(observer.get());
+ rtp_receiver_observers_.push_back(std::move(observer));
+ }
+ }
+
+ rtc::FakeNetworkManager* network_manager() const {
+ return fake_network_manager_.get();
+ }
+ cricket::PortAllocator* port_allocator() const { return port_allocator_; }
+
+ webrtc::FakeRtcEventLogFactory* event_log_factory() const {
+ return event_log_factory_;
+ }
+
+ const cricket::Candidate& last_candidate_gathered() const {
+ return last_candidate_gathered_;
+ }
+ const cricket::IceCandidateErrorEvent& error_event() const {
+ return error_event_;
+ }
+
+ // Sets the mDNS responder for the owned fake network manager and keeps a
+ // reference to the responder.
+ void SetMdnsResponder(
+ std::unique_ptr<webrtc::FakeMdnsResponder> mdns_responder) {
+ RTC_DCHECK(mdns_responder != nullptr);
+ mdns_responder_ = mdns_responder.get();
+ network_manager()->set_mdns_responder(std::move(mdns_responder));
+ }
+
+ // Returns null on failure.
+ std::unique_ptr<SessionDescriptionInterface> CreateOfferAndWait() {
+ auto observer =
+ rtc::make_ref_counted<MockCreateSessionDescriptionObserver>();
+ pc()->CreateOffer(observer.get(), offer_answer_options_);
+ return WaitForDescriptionFromObserver(observer.get());
+ }
+ bool Rollback() {
+ return SetRemoteDescription(
+ webrtc::CreateSessionDescription(SdpType::kRollback, ""));
+ }
+
+ // Functions for querying stats.
+ void StartWatchingDelayStats() {
+ // Get the baseline numbers for audio_packets and audio_delay.
+ auto received_stats = NewGetStats();
+ auto rtp_stats =
+ received_stats->GetStatsOfType<webrtc::RTCInboundRTPStreamStats>()[0];
+ ASSERT_TRUE(rtp_stats->relative_packet_arrival_delay.is_defined());
+ ASSERT_TRUE(rtp_stats->packets_received.is_defined());
+ ASSERT_TRUE(rtp_stats->track_id.is_defined());
+ rtp_stats_id_ = rtp_stats->id();
+ audio_packets_stat_ = *rtp_stats->packets_received;
+ audio_delay_stat_ = *rtp_stats->relative_packet_arrival_delay;
+ audio_samples_stat_ = *rtp_stats->total_samples_received;
+ audio_concealed_stat_ = *rtp_stats->concealed_samples;
+ }
+
+ void UpdateDelayStats(std::string tag, int desc_size) {
+ auto report = NewGetStats();
+ auto rtp_stats =
+ report->GetAs<webrtc::RTCInboundRTPStreamStats>(rtp_stats_id_);
+ ASSERT_TRUE(rtp_stats);
+ auto delta_packets = *rtp_stats->packets_received - audio_packets_stat_;
+ auto delta_rpad =
+ *rtp_stats->relative_packet_arrival_delay - audio_delay_stat_;
+ auto recent_delay = delta_packets > 0 ? delta_rpad / delta_packets : -1;
+ // The purpose of these checks is to sound the alarm early if we introduce
+ // serious regressions. The numbers are not acceptable for production, but
+ // occur on slow bots.
+ //
+ // An average relative packet arrival delay over the renegotiation of
+ // > 100 ms indicates that something is dramatically wrong, and will impact
+ // quality for sure.
+ // Worst bots:
+ // linux_x86_dbg at 0.206
+#if !defined(NDEBUG)
+ EXPECT_GT(0.25, recent_delay) << tag << " size " << desc_size;
+#else
+ EXPECT_GT(0.1, recent_delay) << tag << " size " << desc_size;
+#endif
+ auto delta_samples =
+ *rtp_stats->total_samples_received - audio_samples_stat_;
+ auto delta_concealed =
+ *rtp_stats->concealed_samples - audio_concealed_stat_;
+ // These limits should be adjusted down as we improve:
+ //
+ // Concealing more than 4000 samples during a renegotiation is unacceptable.
+ // But some bots are slow.
+
+ // Worst bots:
+ // linux_more_configs bot at conceal count 5184
+ // android_arm_rel at conceal count 9241
+ // linux_x86_dbg at 15174
+#if !defined(NDEBUG)
+ EXPECT_GT(18000U, delta_concealed) << "Concealed " << delta_concealed
+ << " of " << delta_samples << " samples";
+#else
+ EXPECT_GT(15000U, delta_concealed) << "Concealed " << delta_concealed
+ << " of " << delta_samples << " samples";
+#endif
+ // Concealing more than 20% of samples during a renegotiation is
+ // unacceptable.
+ // Worst bots:
+ // Nondebug: Linux32 Release at conceal rate 0.606597 (CI run)
+ // Debug: linux_x86_dbg bot at conceal rate 0.854
+ if (delta_samples > 0) {
+#if !defined(NDEBUG)
+ EXPECT_LT(1.0 * delta_concealed / delta_samples, 0.95)
+ << "Concealed " << delta_concealed << " of " << delta_samples
+ << " samples";
+#else
+ EXPECT_LT(1.0 * delta_concealed / delta_samples, 0.7)
+ << "Concealed " << delta_concealed << " of " << delta_samples
+ << " samples";
+#endif
+ }
+ // Increment trailing counters
+ audio_packets_stat_ = *rtp_stats->packets_received;
+ audio_delay_stat_ = *rtp_stats->relative_packet_arrival_delay;
+ audio_samples_stat_ = *rtp_stats->total_samples_received;
+ audio_concealed_stat_ = *rtp_stats->concealed_samples;
+ }
+
+ // Sets number of candidates expected
+ void ExpectCandidates(int candidate_count) {
+ candidates_expected_ = candidate_count;
+ }
+
+ private:
+ // Constructor used by friend class PeerConnectionIntegrationBaseTest.
+ explicit PeerConnectionIntegrationWrapper(const std::string& debug_name)
+ : debug_name_(debug_name) {}
+
+ bool Init(const PeerConnectionFactory::Options* options,
+ const PeerConnectionInterface::RTCConfiguration* config,
+ webrtc::PeerConnectionDependencies dependencies,
+ rtc::SocketServer* socket_server,
+ rtc::Thread* network_thread,
+ rtc::Thread* worker_thread,
+ std::unique_ptr<webrtc::FakeRtcEventLogFactory> event_log_factory,
+ bool reset_encoder_factory,
+ bool reset_decoder_factory,
+ bool create_media_engine) {
+ // There's an error in this test code if Init ends up being called twice.
+ RTC_DCHECK(!peer_connection_);
+ RTC_DCHECK(!peer_connection_factory_);
+
+ fake_network_manager_.reset(new rtc::FakeNetworkManager());
+ fake_network_manager_->AddInterface(kDefaultLocalAddress);
+
+ std::unique_ptr<cricket::PortAllocator> port_allocator(
+ new cricket::BasicPortAllocator(
+ fake_network_manager_.get(),
+ std::make_unique<rtc::BasicPacketSocketFactory>(socket_server)));
+ port_allocator_ = port_allocator.get();
+ fake_audio_capture_module_ = FakeAudioCaptureModule::Create();
+ if (!fake_audio_capture_module_) {
+ return false;
+ }
+ rtc::Thread* const signaling_thread = rtc::Thread::Current();
+
+ webrtc::PeerConnectionFactoryDependencies pc_factory_dependencies;
+ pc_factory_dependencies.network_thread = network_thread;
+ pc_factory_dependencies.worker_thread = worker_thread;
+ pc_factory_dependencies.signaling_thread = signaling_thread;
+ pc_factory_dependencies.task_queue_factory =
+ webrtc::CreateDefaultTaskQueueFactory();
+ pc_factory_dependencies.trials = std::make_unique<FieldTrialBasedConfig>();
+ pc_factory_dependencies.metronome =
+ std::make_unique<TaskQueueMetronome>(TimeDelta::Millis(8));
+ cricket::MediaEngineDependencies media_deps;
+ media_deps.task_queue_factory =
+ pc_factory_dependencies.task_queue_factory.get();
+ media_deps.adm = fake_audio_capture_module_;
+ webrtc::SetMediaEngineDefaults(&media_deps);
+
+ if (reset_encoder_factory) {
+ media_deps.video_encoder_factory.reset();
+ }
+ if (reset_decoder_factory) {
+ media_deps.video_decoder_factory.reset();
+ }
+
+ if (!media_deps.audio_processing) {
+ // If the standard Creation method for APM returns a null pointer, instead
+ // use the builder for testing to create an APM object.
+ media_deps.audio_processing = AudioProcessingBuilderForTesting().Create();
+ }
+
+ media_deps.trials = pc_factory_dependencies.trials.get();
+
+ if (create_media_engine) {
+ pc_factory_dependencies.media_engine =
+ cricket::CreateMediaEngine(std::move(media_deps));
+ }
+ pc_factory_dependencies.call_factory = webrtc::CreateCallFactory();
+ if (event_log_factory) {
+ event_log_factory_ = event_log_factory.get();
+ pc_factory_dependencies.event_log_factory = std::move(event_log_factory);
+ } else {
+ pc_factory_dependencies.event_log_factory =
+ std::make_unique<webrtc::RtcEventLogFactory>(
+ pc_factory_dependencies.task_queue_factory.get());
+ }
+ peer_connection_factory_ = webrtc::CreateModularPeerConnectionFactory(
+ std::move(pc_factory_dependencies));
+
+ if (!peer_connection_factory_) {
+ return false;
+ }
+ if (options) {
+ peer_connection_factory_->SetOptions(*options);
+ }
+ if (config) {
+ sdp_semantics_ = config->sdp_semantics;
+ }
+
+ dependencies.allocator = std::move(port_allocator);
+ peer_connection_ = CreatePeerConnection(config, std::move(dependencies));
+ return peer_connection_.get() != nullptr;
+ }
+
+ rtc::scoped_refptr<webrtc::PeerConnectionInterface> CreatePeerConnection(
+ const PeerConnectionInterface::RTCConfiguration* config,
+ webrtc::PeerConnectionDependencies dependencies) {
+ PeerConnectionInterface::RTCConfiguration modified_config;
+ modified_config.sdp_semantics = sdp_semantics_;
+ // If `config` is null, this will result in a default configuration being
+ // used.
+ if (config) {
+ modified_config = *config;
+ }
+ // Disable resolution adaptation; we don't want it interfering with the
+ // test results.
+ // TODO(deadbeef): Do something more robust. Since we're testing for aspect
+ // ratios and not specific resolutions, is this even necessary?
+ modified_config.set_cpu_adaptation(false);
+
+ dependencies.observer = this;
+ auto peer_connection_or_error =
+ peer_connection_factory_->CreatePeerConnectionOrError(
+ modified_config, std::move(dependencies));
+ return peer_connection_or_error.ok() ? peer_connection_or_error.MoveValue()
+ : nullptr;
+ }
+
+ void set_signaling_message_receiver(
+ SignalingMessageReceiver* signaling_message_receiver) {
+ signaling_message_receiver_ = signaling_message_receiver;
+ }
+
+ void set_signaling_delay_ms(int delay_ms) { signaling_delay_ms_ = delay_ms; }
+
+ void set_signal_ice_candidates(bool signal) {
+ signal_ice_candidates_ = signal;
+ }
+
+ rtc::scoped_refptr<webrtc::VideoTrackInterface> CreateLocalVideoTrackInternal(
+ webrtc::FakePeriodicVideoSource::Config config) {
+ // Set max frame rate to 10fps to reduce the risk of test flakiness.
+ // TODO(deadbeef): Do something more robust.
+ config.frame_interval_ms = 100;
+
+ video_track_sources_.emplace_back(
+ rtc::make_ref_counted<webrtc::FakePeriodicVideoTrackSource>(
+ config, false /* remote */));
+ rtc::scoped_refptr<webrtc::VideoTrackInterface> track(
+ peer_connection_factory_->CreateVideoTrack(
+ rtc::CreateRandomUuid(), video_track_sources_.back().get()));
+ if (!local_video_renderer_) {
+ local_video_renderer_.reset(
+ new webrtc::FakeVideoTrackRenderer(track.get()));
+ }
+ return track;
+ }
+
+ void HandleIncomingOffer(const std::string& msg) {
+ RTC_LOG(LS_INFO) << debug_name_ << ": HandleIncomingOffer";
+ std::unique_ptr<SessionDescriptionInterface> desc =
+ webrtc::CreateSessionDescription(SdpType::kOffer, msg);
+ if (received_sdp_munger_) {
+ received_sdp_munger_(desc->description());
+ }
+
+ EXPECT_TRUE(SetRemoteDescription(std::move(desc)));
+ // Setting a remote description may have changed the number of receivers,
+ // so reset the receiver observers.
+ ResetRtpReceiverObservers();
+ if (remote_offer_handler_) {
+ remote_offer_handler_();
+ }
+ auto answer = CreateAnswer();
+ ASSERT_NE(nullptr, answer);
+ EXPECT_TRUE(SetLocalDescriptionAndSendSdpMessage(std::move(answer)));
+ }
+
+ void HandleIncomingAnswer(const std::string& msg) {
+ RTC_LOG(LS_INFO) << debug_name_ << ": HandleIncomingAnswer";
+ std::unique_ptr<SessionDescriptionInterface> desc =
+ webrtc::CreateSessionDescription(SdpType::kAnswer, msg);
+ if (received_sdp_munger_) {
+ received_sdp_munger_(desc->description());
+ }
+
+ EXPECT_TRUE(SetRemoteDescription(std::move(desc)));
+ // Set the RtpReceiverObserver after receivers are created.
+ ResetRtpReceiverObservers();
+ }
+
+ // Returns null on failure.
+ std::unique_ptr<SessionDescriptionInterface> CreateAnswer() {
+ auto observer =
+ rtc::make_ref_counted<MockCreateSessionDescriptionObserver>();
+ pc()->CreateAnswer(observer.get(), offer_answer_options_);
+ return WaitForDescriptionFromObserver(observer.get());
+ }
+
+ std::unique_ptr<SessionDescriptionInterface> WaitForDescriptionFromObserver(
+ MockCreateSessionDescriptionObserver* observer) {
+ EXPECT_EQ_WAIT(true, observer->called(), kDefaultTimeout);
+ if (!observer->result()) {
+ return nullptr;
+ }
+ auto description = observer->MoveDescription();
+ if (generated_sdp_munger_) {
+ generated_sdp_munger_(description->description());
+ }
+ return description;
+ }
+
+ // Setting the local description and sending the SDP message over the fake
+ // signaling channel are combined into the same method because the SDP
+ // message needs to be sent as soon as SetLocalDescription finishes, without
+ // waiting for the observer to be called. This ensures that ICE candidates
+ // don't outrace the description.
+ bool SetLocalDescriptionAndSendSdpMessage(
+ std::unique_ptr<SessionDescriptionInterface> desc) {
+ auto observer = rtc::make_ref_counted<MockSetSessionDescriptionObserver>();
+ RTC_LOG(LS_INFO) << debug_name_ << ": SetLocalDescriptionAndSendSdpMessage";
+ SdpType type = desc->GetType();
+ std::string sdp;
+ EXPECT_TRUE(desc->ToString(&sdp));
+ RTC_LOG(LS_INFO) << debug_name_ << ": local SDP contents=\n" << sdp;
+ pc()->SetLocalDescription(observer.get(), desc.release());
+ RemoveUnusedVideoRenderers();
+ // As mentioned above, we need to send the message immediately after
+ // SetLocalDescription.
+ SendSdpMessage(type, sdp);
+ EXPECT_TRUE_WAIT(observer->called(), kDefaultTimeout);
+ return true;
+ }
+
+ bool SetRemoteDescription(std::unique_ptr<SessionDescriptionInterface> desc) {
+ auto observer = rtc::make_ref_counted<MockSetSessionDescriptionObserver>();
+ RTC_LOG(LS_INFO) << debug_name_ << ": SetRemoteDescription";
+ pc()->SetRemoteDescription(observer.get(), desc.release());
+ RemoveUnusedVideoRenderers();
+ EXPECT_TRUE_WAIT(observer->called(), kDefaultTimeout);
+ return observer->result();
+ }
+
+ // This is a work around to remove unused fake_video_renderers from
+ // transceivers that have either stopped or are no longer receiving.
+ void RemoveUnusedVideoRenderers() {
+ if (sdp_semantics_ != SdpSemantics::kUnifiedPlan) {
+ return;
+ }
+ auto transceivers = pc()->GetTransceivers();
+ std::set<std::string> active_renderers;
+ for (auto& transceiver : transceivers) {
+ // Note - we don't check for direction here. This function is called
+ // before direction is set, and in that case, we should not remove
+ // the renderer.
+ if (transceiver->receiver()->media_type() == cricket::MEDIA_TYPE_VIDEO) {
+ active_renderers.insert(transceiver->receiver()->track()->id());
+ }
+ }
+ for (auto it = fake_video_renderers_.begin();
+ it != fake_video_renderers_.end();) {
+ // Remove fake video renderers belonging to any non-active transceivers.
+ if (!active_renderers.count(it->first)) {
+ it = fake_video_renderers_.erase(it);
+ } else {
+ it++;
+ }
+ }
+ }
+
+ // Simulate sending a blob of SDP with delay `signaling_delay_ms_` (0 by
+ // default).
+ void SendSdpMessage(SdpType type, const std::string& msg) {
+ if (signaling_delay_ms_ == 0) {
+ RelaySdpMessageIfReceiverExists(type, msg);
+ } else {
+ rtc::Thread::Current()->PostDelayedTask(
+ SafeTask(task_safety_.flag(),
+ [this, type, msg] {
+ RelaySdpMessageIfReceiverExists(type, msg);
+ }),
+ TimeDelta::Millis(signaling_delay_ms_));
+ }
+ }
+
+ void RelaySdpMessageIfReceiverExists(SdpType type, const std::string& msg) {
+ if (signaling_message_receiver_) {
+ signaling_message_receiver_->ReceiveSdpMessage(type, msg);
+ }
+ }
+
+ // Simulate trickling an ICE candidate with delay `signaling_delay_ms_` (0 by
+ // default).
+ void SendIceMessage(const std::string& sdp_mid,
+ int sdp_mline_index,
+ const std::string& msg) {
+ if (signaling_delay_ms_ == 0) {
+ RelayIceMessageIfReceiverExists(sdp_mid, sdp_mline_index, msg);
+ } else {
+ rtc::Thread::Current()->PostDelayedTask(
+ SafeTask(task_safety_.flag(),
+ [this, sdp_mid, sdp_mline_index, msg] {
+ RelayIceMessageIfReceiverExists(sdp_mid, sdp_mline_index,
+ msg);
+ }),
+ TimeDelta::Millis(signaling_delay_ms_));
+ }
+ }
+
+ void RelayIceMessageIfReceiverExists(const std::string& sdp_mid,
+ int sdp_mline_index,
+ const std::string& msg) {
+ if (signaling_message_receiver_) {
+ signaling_message_receiver_->ReceiveIceMessage(sdp_mid, sdp_mline_index,
+ msg);
+ }
+ }
+
+ // SignalingMessageReceiver callbacks.
+ void ReceiveSdpMessage(SdpType type, const std::string& msg) override {
+ if (type == SdpType::kOffer) {
+ HandleIncomingOffer(msg);
+ } else {
+ HandleIncomingAnswer(msg);
+ }
+ }
+
+ void ReceiveIceMessage(const std::string& sdp_mid,
+ int sdp_mline_index,
+ const std::string& msg) override {
+ RTC_LOG(LS_INFO) << debug_name_ << ": ReceiveIceMessage";
+ absl::optional<RTCError> result;
+ pc()->AddIceCandidate(absl::WrapUnique(webrtc::CreateIceCandidate(
+ sdp_mid, sdp_mline_index, msg, nullptr)),
+ [&result](RTCError r) { result = r; });
+ EXPECT_TRUE_WAIT(result.has_value(), kDefaultTimeout);
+ EXPECT_TRUE(result.value().ok());
+ }
+
+ // PeerConnectionObserver callbacks.
+ void OnSignalingChange(
+ webrtc::PeerConnectionInterface::SignalingState new_state) override {
+ EXPECT_EQ(pc()->signaling_state(), new_state);
+ peer_connection_signaling_state_history_.push_back(new_state);
+ }
+ void OnAddTrack(rtc::scoped_refptr<RtpReceiverInterface> receiver,
+ const std::vector<rtc::scoped_refptr<MediaStreamInterface>>&
+ streams) override {
+ if (receiver->media_type() == cricket::MEDIA_TYPE_VIDEO) {
+ rtc::scoped_refptr<VideoTrackInterface> video_track(
+ static_cast<VideoTrackInterface*>(receiver->track().get()));
+ ASSERT_TRUE(fake_video_renderers_.find(video_track->id()) ==
+ fake_video_renderers_.end());
+ fake_video_renderers_[video_track->id()] =
+ std::make_unique<FakeVideoTrackRenderer>(video_track.get());
+ }
+ }
+ void OnRemoveTrack(
+ rtc::scoped_refptr<RtpReceiverInterface> receiver) override {
+ if (receiver->media_type() == cricket::MEDIA_TYPE_VIDEO) {
+ auto it = fake_video_renderers_.find(receiver->track()->id());
+ if (it != fake_video_renderers_.end()) {
+ fake_video_renderers_.erase(it);
+ } else {
+ RTC_LOG(LS_ERROR) << "OnRemoveTrack called for non-active renderer";
+ }
+ }
+ }
+ void OnRenegotiationNeeded() override {}
+ void OnIceConnectionChange(
+ webrtc::PeerConnectionInterface::IceConnectionState new_state) override {
+ EXPECT_EQ(pc()->ice_connection_state(), new_state);
+ ice_connection_state_history_.push_back(new_state);
+ }
+ void OnStandardizedIceConnectionChange(
+ webrtc::PeerConnectionInterface::IceConnectionState new_state) override {
+ standardized_ice_connection_state_history_.push_back(new_state);
+ }
+ void OnConnectionChange(
+ webrtc::PeerConnectionInterface::PeerConnectionState new_state) override {
+ peer_connection_state_history_.push_back(new_state);
+ }
+
+ void OnIceGatheringChange(
+ webrtc::PeerConnectionInterface::IceGatheringState new_state) override {
+ EXPECT_EQ(pc()->ice_gathering_state(), new_state);
+ ice_gathering_state_history_.push_back(new_state);
+ }
+
+ void OnIceSelectedCandidatePairChanged(
+ const cricket::CandidatePairChangeEvent& event) {
+ ice_candidate_pair_change_history_.push_back(event);
+ }
+
+ void OnIceCandidate(const webrtc::IceCandidateInterface* candidate) override {
+ RTC_LOG(LS_INFO) << debug_name_ << ": OnIceCandidate";
+
+ if (remote_async_resolver_) {
+ const auto& local_candidate = candidate->candidate();
+ if (local_candidate.address().IsUnresolvedIP()) {
+ RTC_DCHECK(local_candidate.type() == cricket::LOCAL_PORT_TYPE);
+ rtc::SocketAddress resolved_addr(local_candidate.address());
+ const auto resolved_ip = mdns_responder_->GetMappedAddressForName(
+ local_candidate.address().hostname());
+ RTC_DCHECK(!resolved_ip.IsNil());
+ resolved_addr.SetResolvedIP(resolved_ip);
+ EXPECT_CALL(*remote_async_resolver_, GetResolvedAddress(_, _))
+ .WillOnce(DoAll(SetArgPointee<1>(resolved_addr), Return(true)));
+ EXPECT_CALL(*remote_async_resolver_, Destroy(_));
+ }
+ }
+
+ // Check if we expected to have a candidate.
+ EXPECT_GT(candidates_expected_, 1);
+ candidates_expected_--;
+ std::string ice_sdp;
+ EXPECT_TRUE(candidate->ToString(&ice_sdp));
+ if (signaling_message_receiver_ == nullptr || !signal_ice_candidates_) {
+ // Remote party may be deleted.
+ return;
+ }
+ SendIceMessage(candidate->sdp_mid(), candidate->sdp_mline_index(), ice_sdp);
+ last_candidate_gathered_ = candidate->candidate();
+ }
+ void OnIceCandidateError(const std::string& address,
+ int port,
+ const std::string& url,
+ int error_code,
+ const std::string& error_text) override {
+ error_event_ = cricket::IceCandidateErrorEvent(address, port, url,
+ error_code, error_text);
+ }
+ void OnDataChannel(
+ rtc::scoped_refptr<DataChannelInterface> data_channel) override {
+ RTC_LOG(LS_INFO) << debug_name_ << ": OnDataChannel";
+ data_channels_.push_back(data_channel);
+ data_observers_.push_back(
+ std::make_unique<MockDataChannelObserver>(data_channel.get()));
+ }
+
+ std::string debug_name_;
+
+ std::unique_ptr<rtc::FakeNetworkManager> fake_network_manager_;
+ // Reference to the mDNS responder owned by `fake_network_manager_` after set.
+ webrtc::FakeMdnsResponder* mdns_responder_ = nullptr;
+
+ rtc::scoped_refptr<webrtc::PeerConnectionInterface> peer_connection_;
+ rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface>
+ peer_connection_factory_;
+
+ cricket::PortAllocator* port_allocator_;
+ // Needed to keep track of number of frames sent.
+ rtc::scoped_refptr<FakeAudioCaptureModule> fake_audio_capture_module_;
+ // Needed to keep track of number of frames received.
+ std::map<std::string, std::unique_ptr<webrtc::FakeVideoTrackRenderer>>
+ fake_video_renderers_;
+ // Needed to ensure frames aren't received for removed tracks.
+ std::vector<std::unique_ptr<webrtc::FakeVideoTrackRenderer>>
+ removed_fake_video_renderers_;
+
+ // For remote peer communication.
+ SignalingMessageReceiver* signaling_message_receiver_ = nullptr;
+ int signaling_delay_ms_ = 0;
+ bool signal_ice_candidates_ = true;
+ cricket::Candidate last_candidate_gathered_;
+ cricket::IceCandidateErrorEvent error_event_;
+
+ // Store references to the video sources we've created, so that we can stop
+ // them, if required.
+ std::vector<rtc::scoped_refptr<webrtc::VideoTrackSource>>
+ video_track_sources_;
+ // `local_video_renderer_` attached to the first created local video track.
+ std::unique_ptr<webrtc::FakeVideoTrackRenderer> local_video_renderer_;
+
+ SdpSemantics sdp_semantics_;
+ PeerConnectionInterface::RTCOfferAnswerOptions offer_answer_options_;
+ std::function<void(cricket::SessionDescription*)> received_sdp_munger_;
+ std::function<void(cricket::SessionDescription*)> generated_sdp_munger_;
+ std::function<void()> remote_offer_handler_;
+ rtc::MockAsyncResolver* remote_async_resolver_ = nullptr;
+ // All data channels either created or observed on this peerconnection
+ std::vector<rtc::scoped_refptr<DataChannelInterface>> data_channels_;
+ std::vector<std::unique_ptr<MockDataChannelObserver>> data_observers_;
+
+ std::vector<std::unique_ptr<MockRtpReceiverObserver>> rtp_receiver_observers_;
+
+ std::vector<PeerConnectionInterface::IceConnectionState>
+ ice_connection_state_history_;
+ std::vector<PeerConnectionInterface::IceConnectionState>
+ standardized_ice_connection_state_history_;
+ std::vector<PeerConnectionInterface::PeerConnectionState>
+ peer_connection_state_history_;
+ std::vector<PeerConnectionInterface::IceGatheringState>
+ ice_gathering_state_history_;
+ std::vector<cricket::CandidatePairChangeEvent>
+ ice_candidate_pair_change_history_;
+ std::vector<PeerConnectionInterface::SignalingState>
+ peer_connection_signaling_state_history_;
+ webrtc::FakeRtcEventLogFactory* event_log_factory_;
+
+ // Number of ICE candidates expected. The default is no limit.
+ int candidates_expected_ = std::numeric_limits<int>::max();
+
+ // Variables for tracking delay stats on an audio track
+ int audio_packets_stat_ = 0;
+ double audio_delay_stat_ = 0.0;
+ uint64_t audio_samples_stat_ = 0;
+ uint64_t audio_concealed_stat_ = 0;
+ std::string rtp_stats_id_;
+
+ ScopedTaskSafety task_safety_;
+
+ friend class PeerConnectionIntegrationBaseTest;
+};
+
+class MockRtcEventLogOutput : public webrtc::RtcEventLogOutput {
+ public:
+ virtual ~MockRtcEventLogOutput() = default;
+ MOCK_METHOD(bool, IsActive, (), (const, override));
+ MOCK_METHOD(bool, Write, (absl::string_view), (override));
+};
+
+// This helper object is used for both specifying how many audio/video frames
+// are expected to be received for a caller/callee. It provides helper functions
+// to specify these expectations. The object initially starts in a state of no
+// expectations.
+class MediaExpectations {
+ public:
+ enum ExpectFrames {
+ kExpectSomeFrames,
+ kExpectNoFrames,
+ kNoExpectation,
+ };
+
+ void ExpectBidirectionalAudioAndVideo() {
+ ExpectBidirectionalAudio();
+ ExpectBidirectionalVideo();
+ }
+
+ void ExpectBidirectionalAudio() {
+ CallerExpectsSomeAudio();
+ CalleeExpectsSomeAudio();
+ }
+
+ void ExpectNoAudio() {
+ CallerExpectsNoAudio();
+ CalleeExpectsNoAudio();
+ }
+
+ void ExpectBidirectionalVideo() {
+ CallerExpectsSomeVideo();
+ CalleeExpectsSomeVideo();
+ }
+
+ void ExpectNoVideo() {
+ CallerExpectsNoVideo();
+ CalleeExpectsNoVideo();
+ }
+
+ void CallerExpectsSomeAudioAndVideo() {
+ CallerExpectsSomeAudio();
+ CallerExpectsSomeVideo();
+ }
+
+ void CalleeExpectsSomeAudioAndVideo() {
+ CalleeExpectsSomeAudio();
+ CalleeExpectsSomeVideo();
+ }
+
+ // Caller's audio functions.
+ void CallerExpectsSomeAudio(
+ int expected_audio_frames = kDefaultExpectedAudioFrameCount) {
+ caller_audio_expectation_ = kExpectSomeFrames;
+ caller_audio_frames_expected_ = expected_audio_frames;
+ }
+
+ void CallerExpectsNoAudio() {
+ caller_audio_expectation_ = kExpectNoFrames;
+ caller_audio_frames_expected_ = 0;
+ }
+
+ // Caller's video functions.
+ void CallerExpectsSomeVideo(
+ int expected_video_frames = kDefaultExpectedVideoFrameCount) {
+ caller_video_expectation_ = kExpectSomeFrames;
+ caller_video_frames_expected_ = expected_video_frames;
+ }
+
+ void CallerExpectsNoVideo() {
+ caller_video_expectation_ = kExpectNoFrames;
+ caller_video_frames_expected_ = 0;
+ }
+
+ // Callee's audio functions.
+ void CalleeExpectsSomeAudio(
+ int expected_audio_frames = kDefaultExpectedAudioFrameCount) {
+ callee_audio_expectation_ = kExpectSomeFrames;
+ callee_audio_frames_expected_ = expected_audio_frames;
+ }
+
+ void CalleeExpectsNoAudio() {
+ callee_audio_expectation_ = kExpectNoFrames;
+ callee_audio_frames_expected_ = 0;
+ }
+
+ // Callee's video functions.
+ void CalleeExpectsSomeVideo(
+ int expected_video_frames = kDefaultExpectedVideoFrameCount) {
+ callee_video_expectation_ = kExpectSomeFrames;
+ callee_video_frames_expected_ = expected_video_frames;
+ }
+
+ void CalleeExpectsNoVideo() {
+ callee_video_expectation_ = kExpectNoFrames;
+ callee_video_frames_expected_ = 0;
+ }
+
+ ExpectFrames caller_audio_expectation_ = kNoExpectation;
+ ExpectFrames caller_video_expectation_ = kNoExpectation;
+ ExpectFrames callee_audio_expectation_ = kNoExpectation;
+ ExpectFrames callee_video_expectation_ = kNoExpectation;
+ int caller_audio_frames_expected_ = 0;
+ int caller_video_frames_expected_ = 0;
+ int callee_audio_frames_expected_ = 0;
+ int callee_video_frames_expected_ = 0;
+};
+
+class MockIceTransport : public webrtc::IceTransportInterface {
+ public:
+ MockIceTransport(const std::string& name, int component)
+ : internal_(std::make_unique<cricket::FakeIceTransport>(
+ name,
+ component,
+ nullptr /* network_thread */)) {}
+ ~MockIceTransport() = default;
+ cricket::IceTransportInternal* internal() { return internal_.get(); }
+
+ private:
+ std::unique_ptr<cricket::FakeIceTransport> internal_;
+};
+
+class MockIceTransportFactory : public IceTransportFactory {
+ public:
+ ~MockIceTransportFactory() override = default;
+ rtc::scoped_refptr<IceTransportInterface> CreateIceTransport(
+ const std::string& transport_name,
+ int component,
+ IceTransportInit init) {
+ RecordIceTransportCreated();
+ return rtc::make_ref_counted<MockIceTransport>(transport_name, component);
+ }
+ MOCK_METHOD(void, RecordIceTransportCreated, ());
+};
+
+// Tests two PeerConnections connecting to each other end-to-end, using a
+// virtual network, fake A/V capture and fake encoder/decoders. The
+// PeerConnections share the threads/socket servers, but use separate versions
+// of everything else (including "PeerConnectionFactory"s).
+class PeerConnectionIntegrationBaseTest : public ::testing::Test {
+ public:
+ PeerConnectionIntegrationBaseTest(
+ SdpSemantics sdp_semantics,
+ absl::optional<std::string> field_trials = absl::nullopt)
+ : sdp_semantics_(sdp_semantics),
+ ss_(new rtc::VirtualSocketServer()),
+ fss_(new rtc::FirewallSocketServer(ss_.get())),
+ network_thread_(new rtc::Thread(fss_.get())),
+ worker_thread_(rtc::Thread::Create()),
+ // TODO(bugs.webrtc.org/10335): Pass optional ScopedKeyValueConfig.
+ field_trials_(new test::ScopedKeyValueConfig(
+ field_trials.has_value() ? *field_trials : "")) {
+ network_thread_->SetName("PCNetworkThread", this);
+ worker_thread_->SetName("PCWorkerThread", this);
+ RTC_CHECK(network_thread_->Start());
+ RTC_CHECK(worker_thread_->Start());
+ webrtc::metrics::Reset();
+ }
+
+ ~PeerConnectionIntegrationBaseTest() {
+ // The PeerConnections should be deleted before the TurnCustomizers.
+ // A TurnPort is created with a raw pointer to a TurnCustomizer. The
+ // TurnPort has the same lifetime as the PeerConnection, so it's expected
+ // that the TurnCustomizer outlives the life of the PeerConnection or else
+ // when Send() is called it will hit a seg fault.
+ if (caller_) {
+ caller_->set_signaling_message_receiver(nullptr);
+ caller_->pc()->Close();
+ delete SetCallerPcWrapperAndReturnCurrent(nullptr);
+ }
+ if (callee_) {
+ callee_->set_signaling_message_receiver(nullptr);
+ callee_->pc()->Close();
+ delete SetCalleePcWrapperAndReturnCurrent(nullptr);
+ }
+
+ // If turn servers were created for the test they need to be destroyed on
+ // the network thread.
+ SendTask(network_thread(), [this] {
+ turn_servers_.clear();
+ turn_customizers_.clear();
+ });
+ }
+
+ bool SignalingStateStable() {
+ return caller_->SignalingStateStable() && callee_->SignalingStateStable();
+ }
+
+ bool DtlsConnected() {
+ // TODO(deadbeef): kIceConnectionConnected currently means both ICE and DTLS
+ // are connected. This is an important distinction. Once we have separate
+ // ICE and DTLS state, this check needs to use the DTLS state.
+ return (callee()->ice_connection_state() ==
+ webrtc::PeerConnectionInterface::kIceConnectionConnected ||
+ callee()->ice_connection_state() ==
+ webrtc::PeerConnectionInterface::kIceConnectionCompleted) &&
+ (caller()->ice_connection_state() ==
+ webrtc::PeerConnectionInterface::kIceConnectionConnected ||
+ caller()->ice_connection_state() ==
+ webrtc::PeerConnectionInterface::kIceConnectionCompleted);
+ }
+
+ // When `event_log_factory` is null, the default implementation of the event
+ // log factory will be used.
+ std::unique_ptr<PeerConnectionIntegrationWrapper> CreatePeerConnectionWrapper(
+ const std::string& debug_name,
+ const PeerConnectionFactory::Options* options,
+ const RTCConfiguration* config,
+ webrtc::PeerConnectionDependencies dependencies,
+ std::unique_ptr<webrtc::FakeRtcEventLogFactory> event_log_factory,
+ bool reset_encoder_factory,
+ bool reset_decoder_factory,
+ bool create_media_engine = true) {
+ RTCConfiguration modified_config;
+ if (config) {
+ modified_config = *config;
+ }
+ modified_config.sdp_semantics = sdp_semantics_;
+ if (!dependencies.cert_generator) {
+ dependencies.cert_generator =
+ std::make_unique<FakeRTCCertificateGenerator>();
+ }
+ std::unique_ptr<PeerConnectionIntegrationWrapper> client(
+ new PeerConnectionIntegrationWrapper(debug_name));
+
+ if (!client->Init(options, &modified_config, std::move(dependencies),
+ fss_.get(), network_thread_.get(), worker_thread_.get(),
+ std::move(event_log_factory), reset_encoder_factory,
+ reset_decoder_factory, create_media_engine)) {
+ return nullptr;
+ }
+ return client;
+ }
+
+ std::unique_ptr<PeerConnectionIntegrationWrapper>
+ CreatePeerConnectionWrapperWithFakeRtcEventLog(
+ const std::string& debug_name,
+ const PeerConnectionFactory::Options* options,
+ const RTCConfiguration* config,
+ webrtc::PeerConnectionDependencies dependencies) {
+ return CreatePeerConnectionWrapper(
+ debug_name, options, config, std::move(dependencies),
+ std::make_unique<webrtc::FakeRtcEventLogFactory>(),
+ /*reset_encoder_factory=*/false,
+ /*reset_decoder_factory=*/false);
+ }
+
+ bool CreatePeerConnectionWrappers() {
+ return CreatePeerConnectionWrappersWithConfig(
+ PeerConnectionInterface::RTCConfiguration(),
+ PeerConnectionInterface::RTCConfiguration());
+ }
+
+ bool CreatePeerConnectionWrappersWithSdpSemantics(
+ SdpSemantics caller_semantics,
+ SdpSemantics callee_semantics) {
+ // Can't specify the sdp_semantics in the passed-in configuration since it
+ // will be overwritten by CreatePeerConnectionWrapper with whatever is
+ // stored in sdp_semantics_. So get around this by modifying the instance
+ // variable before calling CreatePeerConnectionWrapper for the caller and
+ // callee PeerConnections.
+ SdpSemantics original_semantics = sdp_semantics_;
+ sdp_semantics_ = caller_semantics;
+ caller_ = CreatePeerConnectionWrapper(
+ "Caller", nullptr, nullptr, webrtc::PeerConnectionDependencies(nullptr),
+ nullptr,
+ /*reset_encoder_factory=*/false,
+ /*reset_decoder_factory=*/false);
+ sdp_semantics_ = callee_semantics;
+ callee_ = CreatePeerConnectionWrapper(
+ "Callee", nullptr, nullptr, webrtc::PeerConnectionDependencies(nullptr),
+ nullptr,
+ /*reset_encoder_factory=*/false,
+ /*reset_decoder_factory=*/false);
+ sdp_semantics_ = original_semantics;
+ return caller_ && callee_;
+ }
+
+ bool CreatePeerConnectionWrappersWithConfig(
+ const PeerConnectionInterface::RTCConfiguration& caller_config,
+ const PeerConnectionInterface::RTCConfiguration& callee_config) {
+ caller_ = CreatePeerConnectionWrapper(
+ "Caller", nullptr, &caller_config,
+ webrtc::PeerConnectionDependencies(nullptr), nullptr,
+ /*reset_encoder_factory=*/false,
+ /*reset_decoder_factory=*/false);
+ callee_ = CreatePeerConnectionWrapper(
+ "Callee", nullptr, &callee_config,
+ webrtc::PeerConnectionDependencies(nullptr), nullptr,
+ /*reset_encoder_factory=*/false,
+ /*reset_decoder_factory=*/false);
+ return caller_ && callee_;
+ }
+
+ bool CreatePeerConnectionWrappersWithConfigAndDeps(
+ const PeerConnectionInterface::RTCConfiguration& caller_config,
+ webrtc::PeerConnectionDependencies caller_dependencies,
+ const PeerConnectionInterface::RTCConfiguration& callee_config,
+ webrtc::PeerConnectionDependencies callee_dependencies) {
+ caller_ =
+ CreatePeerConnectionWrapper("Caller", nullptr, &caller_config,
+ std::move(caller_dependencies), nullptr,
+ /*reset_encoder_factory=*/false,
+ /*reset_decoder_factory=*/false);
+ callee_ =
+ CreatePeerConnectionWrapper("Callee", nullptr, &callee_config,
+ std::move(callee_dependencies), nullptr,
+ /*reset_encoder_factory=*/false,
+ /*reset_decoder_factory=*/false);
+ return caller_ && callee_;
+ }
+
+ bool CreatePeerConnectionWrappersWithOptions(
+ const PeerConnectionFactory::Options& caller_options,
+ const PeerConnectionFactory::Options& callee_options) {
+ caller_ = CreatePeerConnectionWrapper(
+ "Caller", &caller_options, nullptr,
+ webrtc::PeerConnectionDependencies(nullptr), nullptr,
+ /*reset_encoder_factory=*/false,
+ /*reset_decoder_factory=*/false);
+ callee_ = CreatePeerConnectionWrapper(
+ "Callee", &callee_options, nullptr,
+ webrtc::PeerConnectionDependencies(nullptr), nullptr,
+ /*reset_encoder_factory=*/false,
+ /*reset_decoder_factory=*/false);
+ return caller_ && callee_;
+ }
+
+ bool CreatePeerConnectionWrappersWithFakeRtcEventLog() {
+ PeerConnectionInterface::RTCConfiguration default_config;
+ caller_ = CreatePeerConnectionWrapperWithFakeRtcEventLog(
+ "Caller", nullptr, &default_config,
+ webrtc::PeerConnectionDependencies(nullptr));
+ callee_ = CreatePeerConnectionWrapperWithFakeRtcEventLog(
+ "Callee", nullptr, &default_config,
+ webrtc::PeerConnectionDependencies(nullptr));
+ return caller_ && callee_;
+ }
+
+ std::unique_ptr<PeerConnectionIntegrationWrapper>
+ CreatePeerConnectionWrapperWithAlternateKey() {
+ std::unique_ptr<FakeRTCCertificateGenerator> cert_generator(
+ new FakeRTCCertificateGenerator());
+ cert_generator->use_alternate_key();
+
+ webrtc::PeerConnectionDependencies dependencies(nullptr);
+ dependencies.cert_generator = std::move(cert_generator);
+ return CreatePeerConnectionWrapper("New Peer", nullptr, nullptr,
+ std::move(dependencies), nullptr,
+ /*reset_encoder_factory=*/false,
+ /*reset_decoder_factory=*/false);
+ }
+
+ bool CreateOneDirectionalPeerConnectionWrappers(bool caller_to_callee) {
+ caller_ = CreatePeerConnectionWrapper(
+ "Caller", nullptr, nullptr, webrtc::PeerConnectionDependencies(nullptr),
+ nullptr,
+ /*reset_encoder_factory=*/!caller_to_callee,
+ /*reset_decoder_factory=*/caller_to_callee);
+ callee_ = CreatePeerConnectionWrapper(
+ "Callee", nullptr, nullptr, webrtc::PeerConnectionDependencies(nullptr),
+ nullptr,
+ /*reset_encoder_factory=*/caller_to_callee,
+ /*reset_decoder_factory=*/!caller_to_callee);
+ return caller_ && callee_;
+ }
+
+ bool CreatePeerConnectionWrappersWithoutMediaEngine() {
+ caller_ = CreatePeerConnectionWrapper(
+ "Caller", nullptr, nullptr, webrtc::PeerConnectionDependencies(nullptr),
+ nullptr,
+ /*reset_encoder_factory=*/false,
+ /*reset_decoder_factory=*/false,
+ /*create_media_engine=*/false);
+ callee_ = CreatePeerConnectionWrapper(
+ "Callee", nullptr, nullptr, webrtc::PeerConnectionDependencies(nullptr),
+ nullptr,
+ /*reset_encoder_factory=*/false,
+ /*reset_decoder_factory=*/false,
+ /*create_media_engine=*/false);
+ return caller_ && callee_;
+ }
+
+ cricket::TestTurnServer* CreateTurnServer(
+ rtc::SocketAddress internal_address,
+ rtc::SocketAddress external_address,
+ cricket::ProtocolType type = cricket::ProtocolType::PROTO_UDP,
+ const std::string& common_name = "test turn server") {
+ rtc::Thread* thread = network_thread();
+ rtc::SocketFactory* socket_factory = fss_.get();
+ std::unique_ptr<cricket::TestTurnServer> turn_server;
+ SendTask(network_thread(), [&] {
+ turn_server = std::make_unique<cricket::TestTurnServer>(
+ thread, socket_factory, internal_address, external_address, type,
+ /*ignore_bad_certs=*/true, common_name);
+ });
+ turn_servers_.push_back(std::move(turn_server));
+ // Interactions with the turn server should be done on the network thread.
+ return turn_servers_.back().get();
+ }
+
+ cricket::TestTurnCustomizer* CreateTurnCustomizer() {
+ std::unique_ptr<cricket::TestTurnCustomizer> turn_customizer;
+ SendTask(network_thread(), [&] {
+ turn_customizer = std::make_unique<cricket::TestTurnCustomizer>();
+ });
+ turn_customizers_.push_back(std::move(turn_customizer));
+ // Interactions with the turn customizer should be done on the network
+ // thread.
+ return turn_customizers_.back().get();
+ }
+
+ // Checks that the function counters for a TestTurnCustomizer are greater than
+ // 0.
+ void ExpectTurnCustomizerCountersIncremented(
+ cricket::TestTurnCustomizer* turn_customizer) {
+ SendTask(network_thread(), [turn_customizer] {
+ EXPECT_GT(turn_customizer->allow_channel_data_cnt_, 0u);
+ EXPECT_GT(turn_customizer->modify_cnt_, 0u);
+ });
+ }
+
+ // Once called, SDP blobs and ICE candidates will be automatically signaled
+ // between PeerConnections.
+ void ConnectFakeSignaling() {
+ caller_->set_signaling_message_receiver(callee_.get());
+ callee_->set_signaling_message_receiver(caller_.get());
+ }
+
+ // Once called, SDP blobs will be automatically signaled between
+ // PeerConnections. Note that ICE candidates will not be signaled unless they
+ // are in the exchanged SDP blobs.
+ void ConnectFakeSignalingForSdpOnly() {
+ ConnectFakeSignaling();
+ SetSignalIceCandidates(false);
+ }
+
+ void SetSignalingDelayMs(int delay_ms) {
+ caller_->set_signaling_delay_ms(delay_ms);
+ callee_->set_signaling_delay_ms(delay_ms);
+ }
+
+ void SetSignalIceCandidates(bool signal) {
+ caller_->set_signal_ice_candidates(signal);
+ callee_->set_signal_ice_candidates(signal);
+ }
+
+ // Messages may get lost on the unreliable DataChannel, so we send multiple
+ // times to avoid test flakiness.
+ void SendRtpDataWithRetries(webrtc::DataChannelInterface* dc,
+ const std::string& data,
+ int retries) {
+ for (int i = 0; i < retries; ++i) {
+ dc->Send(DataBuffer(data));
+ }
+ }
+
+ rtc::Thread* network_thread() { return network_thread_.get(); }
+
+ rtc::VirtualSocketServer* virtual_socket_server() { return ss_.get(); }
+
+ PeerConnectionIntegrationWrapper* caller() { return caller_.get(); }
+
+ // Destroy peerconnections.
+ // This can be used to ensure that all pointers to on-stack mocks
+ // get dropped before exit.
+ void DestroyPeerConnections() {
+ if (caller_) {
+ caller_->pc()->Close();
+ }
+ if (callee_) {
+ callee_->pc()->Close();
+ }
+ caller_.reset();
+ callee_.reset();
+ }
+
+ // Set the `caller_` to the `wrapper` passed in and return the
+ // original `caller_`.
+ PeerConnectionIntegrationWrapper* SetCallerPcWrapperAndReturnCurrent(
+ PeerConnectionIntegrationWrapper* wrapper) {
+ PeerConnectionIntegrationWrapper* old = caller_.release();
+ caller_.reset(wrapper);
+ return old;
+ }
+
+ PeerConnectionIntegrationWrapper* callee() { return callee_.get(); }
+
+ // Set the `callee_` to the `wrapper` passed in and return the
+ // original `callee_`.
+ PeerConnectionIntegrationWrapper* SetCalleePcWrapperAndReturnCurrent(
+ PeerConnectionIntegrationWrapper* wrapper) {
+ PeerConnectionIntegrationWrapper* old = callee_.release();
+ callee_.reset(wrapper);
+ return old;
+ }
+
+ void SetPortAllocatorFlags(uint32_t caller_flags, uint32_t callee_flags) {
+ SendTask(network_thread(), [this, caller_flags] {
+ caller()->port_allocator()->set_flags(caller_flags);
+ });
+ SendTask(network_thread(), [this, callee_flags] {
+ callee()->port_allocator()->set_flags(callee_flags);
+ });
+ }
+
+ rtc::FirewallSocketServer* firewall() const { return fss_.get(); }
+
+ // Expects the provided number of new frames to be received within
+ // kMaxWaitForFramesMs. The new expected frames are specified in
+ // `media_expectations`. Returns false if any of the expectations were
+ // not met.
+ bool ExpectNewFrames(const MediaExpectations& media_expectations) {
+ // Make sure there are no bogus tracks confusing the issue.
+ caller()->RemoveUnusedVideoRenderers();
+ callee()->RemoveUnusedVideoRenderers();
+ // First initialize the expected frame counts based upon the current
+ // frame count.
+ int total_caller_audio_frames_expected = caller()->audio_frames_received();
+ if (media_expectations.caller_audio_expectation_ ==
+ MediaExpectations::kExpectSomeFrames) {
+ total_caller_audio_frames_expected +=
+ media_expectations.caller_audio_frames_expected_;
+ }
+ int total_caller_video_frames_expected =
+ caller()->min_video_frames_received_per_track();
+ if (media_expectations.caller_video_expectation_ ==
+ MediaExpectations::kExpectSomeFrames) {
+ total_caller_video_frames_expected +=
+ media_expectations.caller_video_frames_expected_;
+ }
+ int total_callee_audio_frames_expected = callee()->audio_frames_received();
+ if (media_expectations.callee_audio_expectation_ ==
+ MediaExpectations::kExpectSomeFrames) {
+ total_callee_audio_frames_expected +=
+ media_expectations.callee_audio_frames_expected_;
+ }
+ int total_callee_video_frames_expected =
+ callee()->min_video_frames_received_per_track();
+ if (media_expectations.callee_video_expectation_ ==
+ MediaExpectations::kExpectSomeFrames) {
+ total_callee_video_frames_expected +=
+ media_expectations.callee_video_frames_expected_;
+ }
+
+ // Wait for the expected frames.
+ EXPECT_TRUE_WAIT(caller()->audio_frames_received() >=
+ total_caller_audio_frames_expected &&
+ caller()->min_video_frames_received_per_track() >=
+ total_caller_video_frames_expected &&
+ callee()->audio_frames_received() >=
+ total_callee_audio_frames_expected &&
+ callee()->min_video_frames_received_per_track() >=
+ total_callee_video_frames_expected,
+ kMaxWaitForFramesMs);
+ bool expectations_correct =
+ caller()->audio_frames_received() >=
+ total_caller_audio_frames_expected &&
+ caller()->min_video_frames_received_per_track() >=
+ total_caller_video_frames_expected &&
+ callee()->audio_frames_received() >=
+ total_callee_audio_frames_expected &&
+ callee()->min_video_frames_received_per_track() >=
+ total_callee_video_frames_expected;
+
+ // After the combined wait, print out a more detailed message upon
+ // failure.
+ EXPECT_GE(caller()->audio_frames_received(),
+ total_caller_audio_frames_expected);
+ EXPECT_GE(caller()->min_video_frames_received_per_track(),
+ total_caller_video_frames_expected);
+ EXPECT_GE(callee()->audio_frames_received(),
+ total_callee_audio_frames_expected);
+ EXPECT_GE(callee()->min_video_frames_received_per_track(),
+ total_callee_video_frames_expected);
+
+ // We want to make sure nothing unexpected was received.
+ if (media_expectations.caller_audio_expectation_ ==
+ MediaExpectations::kExpectNoFrames) {
+ EXPECT_EQ(caller()->audio_frames_received(),
+ total_caller_audio_frames_expected);
+ if (caller()->audio_frames_received() !=
+ total_caller_audio_frames_expected) {
+ expectations_correct = false;
+ }
+ }
+ if (media_expectations.caller_video_expectation_ ==
+ MediaExpectations::kExpectNoFrames) {
+ EXPECT_EQ(caller()->min_video_frames_received_per_track(),
+ total_caller_video_frames_expected);
+ if (caller()->min_video_frames_received_per_track() !=
+ total_caller_video_frames_expected) {
+ expectations_correct = false;
+ }
+ }
+ if (media_expectations.callee_audio_expectation_ ==
+ MediaExpectations::kExpectNoFrames) {
+ EXPECT_EQ(callee()->audio_frames_received(),
+ total_callee_audio_frames_expected);
+ if (callee()->audio_frames_received() !=
+ total_callee_audio_frames_expected) {
+ expectations_correct = false;
+ }
+ }
+ if (media_expectations.callee_video_expectation_ ==
+ MediaExpectations::kExpectNoFrames) {
+ EXPECT_EQ(callee()->min_video_frames_received_per_track(),
+ total_callee_video_frames_expected);
+ if (callee()->min_video_frames_received_per_track() !=
+ total_callee_video_frames_expected) {
+ expectations_correct = false;
+ }
+ }
+ return expectations_correct;
+ }
+
+ void ClosePeerConnections() {
+ if (caller())
+ caller()->pc()->Close();
+ if (callee())
+ callee()->pc()->Close();
+ }
+
+ void TestNegotiatedCipherSuite(
+ const PeerConnectionFactory::Options& caller_options,
+ const PeerConnectionFactory::Options& callee_options,
+ int expected_cipher_suite) {
+ ASSERT_TRUE(CreatePeerConnectionWrappersWithOptions(caller_options,
+ callee_options));
+ ConnectFakeSignaling();
+ caller()->AddAudioVideoTracks();
+ callee()->AddAudioVideoTracks();
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(DtlsConnected(), kDefaultTimeout);
+ EXPECT_EQ_WAIT(rtc::SrtpCryptoSuiteToName(expected_cipher_suite),
+ caller()->OldGetStats()->SrtpCipher(), kDefaultTimeout);
+ // TODO(bugs.webrtc.org/9456): Fix it.
+ EXPECT_METRIC_EQ(1, webrtc::metrics::NumEvents(
+ "WebRTC.PeerConnection.SrtpCryptoSuite.Audio",
+ expected_cipher_suite));
+ }
+
+ void TestGcmNegotiationUsesCipherSuite(bool local_gcm_enabled,
+ bool remote_gcm_enabled,
+ bool aes_ctr_enabled,
+ int expected_cipher_suite) {
+ PeerConnectionFactory::Options caller_options;
+ caller_options.crypto_options.srtp.enable_gcm_crypto_suites =
+ local_gcm_enabled;
+ caller_options.crypto_options.srtp.enable_aes128_sha1_80_crypto_cipher =
+ aes_ctr_enabled;
+ PeerConnectionFactory::Options callee_options;
+ callee_options.crypto_options.srtp.enable_gcm_crypto_suites =
+ remote_gcm_enabled;
+ callee_options.crypto_options.srtp.enable_aes128_sha1_80_crypto_cipher =
+ aes_ctr_enabled;
+ TestNegotiatedCipherSuite(caller_options, callee_options,
+ expected_cipher_suite);
+ }
+
+ const FieldTrialsView& trials() const { return *field_trials_.get(); }
+
+ protected:
+ SdpSemantics sdp_semantics_;
+
+ private:
+ rtc::AutoThread main_thread_; // Used as the signal thread by most tests.
+ // `ss_` is used by `network_thread_` so it must be destroyed later.
+ std::unique_ptr<rtc::VirtualSocketServer> ss_;
+ std::unique_ptr<rtc::FirewallSocketServer> fss_;
+ // `network_thread_` and `worker_thread_` are used by both
+ // `caller_` and `callee_` so they must be destroyed
+ // later.
+ std::unique_ptr<rtc::Thread> network_thread_;
+ std::unique_ptr<rtc::Thread> worker_thread_;
+ // The turn servers and turn customizers should be accessed & deleted on the
+ // network thread to avoid a race with the socket read/write that occurs
+ // on the network thread.
+ std::vector<std::unique_ptr<cricket::TestTurnServer>> turn_servers_;
+ std::vector<std::unique_ptr<cricket::TestTurnCustomizer>> turn_customizers_;
+ std::unique_ptr<PeerConnectionIntegrationWrapper> caller_;
+ std::unique_ptr<PeerConnectionIntegrationWrapper> callee_;
+ std::unique_ptr<FieldTrialsView> field_trials_;
+};
+
+} // namespace webrtc
+
+#endif // PC_TEST_INTEGRATION_TEST_HELPERS_H_
diff --git a/third_party/libwebrtc/pc/test/mock_channel_interface.h b/third_party/libwebrtc/pc/test/mock_channel_interface.h
new file mode 100644
index 0000000000..41c142d5ba
--- /dev/null
+++ b/third_party/libwebrtc/pc/test/mock_channel_interface.h
@@ -0,0 +1,73 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_TEST_MOCK_CHANNEL_INTERFACE_H_
+#define PC_TEST_MOCK_CHANNEL_INTERFACE_H_
+
+#include <string>
+#include <vector>
+
+#include "pc/channel_interface.h"
+#include "test/gmock.h"
+
+namespace cricket {
+
+// Mock class for BaseChannel.
+// Use this class in unit tests to avoid dependecy on a specific
+// implementation of BaseChannel.
+class MockChannelInterface : public cricket::ChannelInterface {
+ public:
+ MOCK_METHOD(cricket::MediaType, media_type, (), (const, override));
+ MOCK_METHOD(VideoChannel*, AsVideoChannel, (), (override));
+ MOCK_METHOD(VoiceChannel*, AsVoiceChannel, (), (override));
+ MOCK_METHOD(MediaChannel*, media_channel, (), (override));
+ MOCK_METHOD(MediaChannel*, media_send_channel, (), (override));
+ MOCK_METHOD(VoiceMediaChannel*, voice_media_send_channel, (), (override));
+ MOCK_METHOD(VideoMediaChannel*, video_media_send_channel, (), (override));
+ MOCK_METHOD(MediaChannel*, media_receive_channel, (), (override));
+ MOCK_METHOD(VoiceMediaChannel*, voice_media_receive_channel, (), (override));
+ MOCK_METHOD(VideoMediaChannel*, video_media_receive_channel, (), (override));
+ MOCK_METHOD(absl::string_view, transport_name, (), (const, override));
+ MOCK_METHOD(const std::string&, mid, (), (const, override));
+ MOCK_METHOD(void, Enable, (bool), (override));
+ MOCK_METHOD(void,
+ SetFirstPacketReceivedCallback,
+ (std::function<void()>),
+ (override));
+ MOCK_METHOD(bool,
+ SetLocalContent,
+ (const cricket::MediaContentDescription*,
+ webrtc::SdpType,
+ std::string&),
+ (override));
+ MOCK_METHOD(bool,
+ SetRemoteContent,
+ (const cricket::MediaContentDescription*,
+ webrtc::SdpType,
+ std::string&),
+ (override));
+ MOCK_METHOD(bool, SetPayloadTypeDemuxingEnabled, (bool), (override));
+ MOCK_METHOD(const std::vector<StreamParams>&,
+ local_streams,
+ (),
+ (const, override));
+ MOCK_METHOD(const std::vector<StreamParams>&,
+ remote_streams,
+ (),
+ (const, override));
+ MOCK_METHOD(bool,
+ SetRtpTransport,
+ (webrtc::RtpTransportInternal*),
+ (override));
+};
+
+} // namespace cricket
+
+#endif // PC_TEST_MOCK_CHANNEL_INTERFACE_H_
diff --git a/third_party/libwebrtc/pc/test/mock_data_channel.h b/third_party/libwebrtc/pc/test/mock_data_channel.h
new file mode 100644
index 0000000000..f1c5374d28
--- /dev/null
+++ b/third_party/libwebrtc/pc/test/mock_data_channel.h
@@ -0,0 +1,72 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_TEST_MOCK_DATA_CHANNEL_H_
+#define PC_TEST_MOCK_DATA_CHANNEL_H_
+
+#include <string>
+
+#include "pc/sctp_data_channel.h"
+#include "test/gmock.h"
+
+namespace webrtc {
+
+class MockSctpDataChannel : public SctpDataChannel {
+ public:
+ MockSctpDataChannel(int id, DataState state)
+ : MockSctpDataChannel(id,
+ "MockSctpDataChannel",
+ state,
+ "udp",
+ 0,
+ 0,
+ 0,
+ 0) {}
+ MockSctpDataChannel(
+ int id,
+ const std::string& label,
+ DataState state,
+ const std::string& protocol,
+ uint32_t messages_sent,
+ uint64_t bytes_sent,
+ uint32_t messages_received,
+ uint64_t bytes_received,
+ const InternalDataChannelInit& config = InternalDataChannelInit(),
+ rtc::Thread* signaling_thread = rtc::Thread::Current(),
+ rtc::Thread* network_thread = rtc::Thread::Current())
+ : SctpDataChannel(config,
+ nullptr,
+ label,
+ signaling_thread,
+ network_thread) {
+ EXPECT_CALL(*this, id()).WillRepeatedly(::testing::Return(id));
+ EXPECT_CALL(*this, state()).WillRepeatedly(::testing::Return(state));
+ EXPECT_CALL(*this, protocol()).WillRepeatedly(::testing::Return(protocol));
+ EXPECT_CALL(*this, messages_sent())
+ .WillRepeatedly(::testing::Return(messages_sent));
+ EXPECT_CALL(*this, bytes_sent())
+ .WillRepeatedly(::testing::Return(bytes_sent));
+ EXPECT_CALL(*this, messages_received())
+ .WillRepeatedly(::testing::Return(messages_received));
+ EXPECT_CALL(*this, bytes_received())
+ .WillRepeatedly(::testing::Return(bytes_received));
+ }
+ MOCK_METHOD(int, id, (), (const, override));
+ MOCK_METHOD(DataState, state, (), (const, override));
+ MOCK_METHOD(std::string, protocol, (), (const, override));
+ MOCK_METHOD(uint32_t, messages_sent, (), (const, override));
+ MOCK_METHOD(uint64_t, bytes_sent, (), (const, override));
+ MOCK_METHOD(uint32_t, messages_received, (), (const, override));
+ MOCK_METHOD(uint64_t, bytes_received, (), (const, override));
+};
+
+} // namespace webrtc
+
+#endif // PC_TEST_MOCK_DATA_CHANNEL_H_
diff --git a/third_party/libwebrtc/pc/test/mock_peer_connection_internal.h b/third_party/libwebrtc/pc/test/mock_peer_connection_internal.h
new file mode 100644
index 0000000000..23ecc93e43
--- /dev/null
+++ b/third_party/libwebrtc/pc/test/mock_peer_connection_internal.h
@@ -0,0 +1,332 @@
+/*
+ * Copyright 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_TEST_MOCK_PEER_CONNECTION_INTERNAL_H_
+#define PC_TEST_MOCK_PEER_CONNECTION_INTERNAL_H_
+
+#include <map>
+#include <memory>
+#include <set>
+#include <string>
+#include <vector>
+
+#include "modules/audio_device/include/audio_device.h"
+#include "pc/peer_connection_internal.h"
+#include "test/gmock.h"
+
+namespace webrtc {
+
+class MockPeerConnectionInternal : public PeerConnectionInternal {
+ public:
+ MockPeerConnectionInternal() {}
+ ~MockPeerConnectionInternal() = default;
+ // PeerConnectionInterface
+ MOCK_METHOD(rtc::scoped_refptr<StreamCollectionInterface>,
+ local_streams,
+ (),
+ (override));
+ MOCK_METHOD(rtc::scoped_refptr<StreamCollectionInterface>,
+ remote_streams,
+ (),
+ (override));
+ MOCK_METHOD(bool, AddStream, (MediaStreamInterface*), (override));
+ MOCK_METHOD(void, RemoveStream, (MediaStreamInterface*), (override));
+ MOCK_METHOD(RTCErrorOr<rtc::scoped_refptr<RtpSenderInterface>>,
+ AddTrack,
+ (rtc::scoped_refptr<MediaStreamTrackInterface>,
+ const std::vector<std::string>&),
+ (override));
+ MOCK_METHOD(RTCErrorOr<rtc::scoped_refptr<RtpSenderInterface>>,
+ AddTrack,
+ (rtc::scoped_refptr<MediaStreamTrackInterface>,
+ const std::vector<std::string>&,
+ const std::vector<RtpEncodingParameters>&),
+ (override));
+ MOCK_METHOD(RTCError,
+ RemoveTrackOrError,
+ (rtc::scoped_refptr<RtpSenderInterface>),
+ (override));
+ MOCK_METHOD(RTCErrorOr<rtc::scoped_refptr<RtpTransceiverInterface>>,
+ AddTransceiver,
+ (rtc::scoped_refptr<MediaStreamTrackInterface>),
+ (override));
+ MOCK_METHOD(RTCErrorOr<rtc::scoped_refptr<RtpTransceiverInterface>>,
+ AddTransceiver,
+ (rtc::scoped_refptr<MediaStreamTrackInterface>,
+ const RtpTransceiverInit&),
+ (override));
+ MOCK_METHOD(RTCErrorOr<rtc::scoped_refptr<RtpTransceiverInterface>>,
+ AddTransceiver,
+ (cricket::MediaType),
+ (override));
+ MOCK_METHOD(RTCErrorOr<rtc::scoped_refptr<RtpTransceiverInterface>>,
+ AddTransceiver,
+ (cricket::MediaType, const RtpTransceiverInit&),
+ (override));
+ MOCK_METHOD(rtc::scoped_refptr<RtpSenderInterface>,
+ CreateSender,
+ (const std::string&, const std::string&),
+ (override));
+ MOCK_METHOD(std::vector<rtc::scoped_refptr<RtpSenderInterface>>,
+ GetSenders,
+ (),
+ (const, override));
+ MOCK_METHOD(std::vector<rtc::scoped_refptr<RtpReceiverInterface>>,
+ GetReceivers,
+ (),
+ (const, override));
+ MOCK_METHOD(std::vector<rtc::scoped_refptr<RtpTransceiverInterface>>,
+ GetTransceivers,
+ (),
+ (const, override));
+ MOCK_METHOD(bool,
+ GetStats,
+ (StatsObserver*, MediaStreamTrackInterface*, StatsOutputLevel),
+ (override));
+ MOCK_METHOD(void, GetStats, (RTCStatsCollectorCallback*), (override));
+ MOCK_METHOD(void,
+ GetStats,
+ (rtc::scoped_refptr<RtpSenderInterface>,
+ rtc::scoped_refptr<RTCStatsCollectorCallback>),
+ (override));
+ MOCK_METHOD(void,
+ GetStats,
+ (rtc::scoped_refptr<RtpReceiverInterface>,
+ rtc::scoped_refptr<RTCStatsCollectorCallback>),
+ (override));
+ MOCK_METHOD(void, ClearStatsCache, (), (override));
+ MOCK_METHOD(RTCErrorOr<rtc::scoped_refptr<DataChannelInterface>>,
+ CreateDataChannelOrError,
+ (const std::string&, const DataChannelInit*),
+ (override));
+ MOCK_METHOD(SessionDescriptionInterface*,
+ local_description,
+ (),
+ (const, override));
+ MOCK_METHOD(SessionDescriptionInterface*,
+ remote_description,
+ (),
+ (const, override));
+ MOCK_METHOD(SessionDescriptionInterface*,
+ current_local_description,
+ (),
+ (const, override));
+ MOCK_METHOD(SessionDescriptionInterface*,
+ current_remote_description,
+ (),
+ (const, override));
+ MOCK_METHOD(SessionDescriptionInterface*,
+ pending_local_description,
+ (),
+ (const, override));
+ MOCK_METHOD(SessionDescriptionInterface*,
+ pending_remote_description,
+ (),
+ (const, override));
+ MOCK_METHOD(void, RestartIce, (), (override));
+ MOCK_METHOD(void,
+ CreateOffer,
+ (CreateSessionDescriptionObserver*, const RTCOfferAnswerOptions&),
+ (override));
+ MOCK_METHOD(void,
+ CreateAnswer,
+ (CreateSessionDescriptionObserver*, const RTCOfferAnswerOptions&),
+ (override));
+
+ MOCK_METHOD(void,
+ SetLocalDescription,
+ (SetSessionDescriptionObserver*, SessionDescriptionInterface*),
+ (override));
+ MOCK_METHOD(void,
+ SetRemoteDescription,
+ (SetSessionDescriptionObserver*, SessionDescriptionInterface*),
+ (override));
+ MOCK_METHOD(void,
+ SetRemoteDescription,
+ (std::unique_ptr<SessionDescriptionInterface>,
+ rtc::scoped_refptr<SetRemoteDescriptionObserverInterface>),
+ (override));
+ MOCK_METHOD(PeerConnectionInterface::RTCConfiguration,
+ GetConfiguration,
+ (),
+ (override));
+ MOCK_METHOD(RTCError,
+ SetConfiguration,
+ (const PeerConnectionInterface::RTCConfiguration&),
+ (override));
+ MOCK_METHOD(bool,
+ AddIceCandidate,
+ (const IceCandidateInterface*),
+ (override));
+ MOCK_METHOD(bool,
+ RemoveIceCandidates,
+ (const std::vector<cricket::Candidate>&),
+ (override));
+ MOCK_METHOD(RTCError, SetBitrate, (const BitrateSettings&), (override));
+ MOCK_METHOD(void, SetAudioPlayout, (bool), (override));
+ MOCK_METHOD(void, SetAudioRecording, (bool), (override));
+ MOCK_METHOD(rtc::scoped_refptr<DtlsTransportInterface>,
+ LookupDtlsTransportByMid,
+ (const std::string&),
+ (override));
+ MOCK_METHOD(rtc::scoped_refptr<SctpTransportInterface>,
+ GetSctpTransport,
+ (),
+ (const, override));
+ MOCK_METHOD(SignalingState, signaling_state, (), (override));
+ MOCK_METHOD(IceConnectionState, ice_connection_state, (), (override));
+ MOCK_METHOD(IceConnectionState,
+ standardized_ice_connection_state,
+ (),
+ (override));
+ MOCK_METHOD(PeerConnectionState, peer_connection_state, (), (override));
+ MOCK_METHOD(IceGatheringState, ice_gathering_state, (), (override));
+ MOCK_METHOD(absl::optional<bool>, can_trickle_ice_candidates, (), (override));
+ MOCK_METHOD(bool,
+ StartRtcEventLog,
+ (std::unique_ptr<RtcEventLogOutput>, int64_t),
+ (override));
+ MOCK_METHOD(bool,
+ StartRtcEventLog,
+ (std::unique_ptr<RtcEventLogOutput>),
+ (override));
+ MOCK_METHOD(void, StopRtcEventLog, (), (override));
+ MOCK_METHOD(void, Close, (), (override));
+ MOCK_METHOD(rtc::Thread*, signaling_thread, (), (const, override));
+
+ // PeerConnectionSdpMethods
+ MOCK_METHOD(std::string, session_id, (), (const, override));
+ MOCK_METHOD(bool, NeedsIceRestart, (const std::string&), (const, override));
+ MOCK_METHOD(absl::optional<std::string>, sctp_mid, (), (const, override));
+ MOCK_METHOD(PeerConnectionInterface::RTCConfiguration*,
+ configuration,
+ (),
+ (const, override));
+ MOCK_METHOD(void,
+ ReportSdpBundleUsage,
+ (const SessionDescriptionInterface&),
+ (override));
+ MOCK_METHOD(PeerConnectionMessageHandler*, message_handler, (), (override));
+ MOCK_METHOD(RtpTransmissionManager*, rtp_manager, (), (override));
+ MOCK_METHOD(const RtpTransmissionManager*,
+ rtp_manager,
+ (),
+ (const, override));
+ MOCK_METHOD(bool, dtls_enabled, (), (const, override));
+ MOCK_METHOD(const PeerConnectionFactoryInterface::Options*,
+ options,
+ (),
+ (const, override));
+ MOCK_METHOD(CryptoOptions, GetCryptoOptions, (), (override));
+ MOCK_METHOD(JsepTransportController*, transport_controller_s, (), (override));
+ MOCK_METHOD(JsepTransportController*, transport_controller_n, (), (override));
+ MOCK_METHOD(DataChannelController*, data_channel_controller, (), (override));
+ MOCK_METHOD(cricket::PortAllocator*, port_allocator, (), (override));
+ MOCK_METHOD(LegacyStatsCollector*, legacy_stats, (), (override));
+ MOCK_METHOD(PeerConnectionObserver*, Observer, (), (const, override));
+ MOCK_METHOD(bool, GetSctpSslRole, (rtc::SSLRole*), (override));
+ MOCK_METHOD(PeerConnectionInterface::IceConnectionState,
+ ice_connection_state_internal,
+ (),
+ (override));
+ MOCK_METHOD(void,
+ SetIceConnectionState,
+ (PeerConnectionInterface::IceConnectionState),
+ (override));
+ MOCK_METHOD(void, NoteUsageEvent, (UsageEvent), (override));
+ MOCK_METHOD(bool, IsClosed, (), (const, override));
+ MOCK_METHOD(bool, IsUnifiedPlan, (), (const, override));
+ MOCK_METHOD(bool,
+ ValidateBundleSettings,
+ (const cricket::SessionDescription*,
+ (const std::map<std::string, const cricket::ContentGroup*>&)),
+ (override));
+ MOCK_METHOD(absl::optional<std::string>, GetDataMid, (), (const, override));
+ MOCK_METHOD(RTCErrorOr<rtc::scoped_refptr<RtpTransceiverInterface>>,
+ AddTransceiver,
+ (cricket::MediaType,
+ rtc::scoped_refptr<MediaStreamTrackInterface>,
+ const RtpTransceiverInit&,
+ bool),
+ (override));
+ MOCK_METHOD(void, StartSctpTransport, (int, int, int), (override));
+ MOCK_METHOD(void,
+ AddRemoteCandidate,
+ (const std::string&, const cricket::Candidate&),
+ (override));
+ MOCK_METHOD(Call*, call_ptr, (), (override));
+ MOCK_METHOD(bool, SrtpRequired, (), (const, override));
+ MOCK_METHOD(bool,
+ SetupDataChannelTransport_n,
+ (const std::string&),
+ (override));
+ MOCK_METHOD(void, TeardownDataChannelTransport_n, (), (override));
+ MOCK_METHOD(void, SetSctpDataMid, (const std::string&), (override));
+ MOCK_METHOD(void, ResetSctpDataMid, (), (override));
+ MOCK_METHOD(const FieldTrialsView&, trials, (), (const, override));
+
+ // PeerConnectionInternal
+ MOCK_METHOD(rtc::Thread*, network_thread, (), (const, override));
+ MOCK_METHOD(rtc::Thread*, worker_thread, (), (const, override));
+ MOCK_METHOD(bool, initial_offerer, (), (const, override));
+ MOCK_METHOD(
+ std::vector<
+ rtc::scoped_refptr<RtpTransceiverProxyWithInternal<RtpTransceiver>>>,
+ GetTransceiversInternal,
+ (),
+ (const, override));
+ MOCK_METHOD(sigslot::signal1<SctpDataChannel*>&,
+ SignalSctpDataChannelCreated,
+ (),
+ (override));
+ MOCK_METHOD(std::vector<DataChannelStats>,
+ GetDataChannelStats,
+ (),
+ (const, override));
+ MOCK_METHOD(absl::optional<std::string>,
+ sctp_transport_name,
+ (),
+ (const, override));
+ MOCK_METHOD(cricket::CandidateStatsList,
+ GetPooledCandidateStats,
+ (),
+ (const, override));
+ MOCK_METHOD((std::map<std::string, cricket::TransportStats>),
+ GetTransportStatsByNames,
+ (const std::set<std::string>&),
+ (override));
+ MOCK_METHOD(Call::Stats, GetCallStats, (), (override));
+ MOCK_METHOD(absl::optional<AudioDeviceModule::Stats>,
+ GetAudioDeviceStats,
+ (),
+ (override));
+ MOCK_METHOD(bool,
+ GetLocalCertificate,
+ (const std::string&, rtc::scoped_refptr<rtc::RTCCertificate>*),
+ (override));
+ MOCK_METHOD(std::unique_ptr<rtc::SSLCertChain>,
+ GetRemoteSSLCertChain,
+ (const std::string&),
+ (override));
+ MOCK_METHOD(bool, IceRestartPending, (const std::string&), (const, override));
+ MOCK_METHOD(bool,
+ GetSslRole,
+ (const std::string&, rtc::SSLRole*),
+ (override));
+ MOCK_METHOD(void, NoteDataAddedEvent, (), (override));
+ MOCK_METHOD(void,
+ OnSctpDataChannelClosed,
+ (DataChannelInterface*),
+ (override));
+};
+
+} // namespace webrtc
+
+#endif // PC_TEST_MOCK_PEER_CONNECTION_INTERNAL_H_
diff --git a/third_party/libwebrtc/pc/test/mock_peer_connection_observers.h b/third_party/libwebrtc/pc/test/mock_peer_connection_observers.h
new file mode 100644
index 0000000000..e9d97a97f6
--- /dev/null
+++ b/third_party/libwebrtc/pc/test/mock_peer_connection_observers.h
@@ -0,0 +1,599 @@
+/*
+ * Copyright 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This file contains mock implementations of observers used in PeerConnection.
+// TODO(steveanton): These aren't really mocks and should be renamed.
+
+#ifndef PC_TEST_MOCK_PEER_CONNECTION_OBSERVERS_H_
+#define PC_TEST_MOCK_PEER_CONNECTION_OBSERVERS_H_
+
+#include <map>
+#include <memory>
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "api/data_channel_interface.h"
+#include "api/jsep_ice_candidate.h"
+#include "pc/stream_collection.h"
+#include "rtc_base/checks.h"
+
+namespace webrtc {
+
+class MockPeerConnectionObserver : public PeerConnectionObserver {
+ public:
+ struct AddTrackEvent {
+ explicit AddTrackEvent(
+ rtc::scoped_refptr<RtpReceiverInterface> event_receiver,
+ std::vector<rtc::scoped_refptr<MediaStreamInterface>> event_streams)
+ : receiver(std::move(event_receiver)),
+ streams(std::move(event_streams)) {
+ for (auto stream : streams) {
+ std::vector<rtc::scoped_refptr<MediaStreamTrackInterface>> tracks;
+ for (auto audio_track : stream->GetAudioTracks()) {
+ tracks.push_back(audio_track);
+ }
+ for (auto video_track : stream->GetVideoTracks()) {
+ tracks.push_back(video_track);
+ }
+ snapshotted_stream_tracks[stream] = tracks;
+ }
+ }
+
+ rtc::scoped_refptr<RtpReceiverInterface> receiver;
+ std::vector<rtc::scoped_refptr<MediaStreamInterface>> streams;
+ // This map records the tracks present in each stream at the time the
+ // OnAddTrack callback was issued.
+ std::map<rtc::scoped_refptr<MediaStreamInterface>,
+ std::vector<rtc::scoped_refptr<MediaStreamTrackInterface>>>
+ snapshotted_stream_tracks;
+ };
+
+ MockPeerConnectionObserver() : remote_streams_(StreamCollection::Create()) {}
+ virtual ~MockPeerConnectionObserver() {}
+ void SetPeerConnectionInterface(PeerConnectionInterface* pc) {
+ pc_ = pc;
+ if (pc) {
+ state_ = pc_->signaling_state();
+ }
+ }
+ void OnSignalingChange(
+ PeerConnectionInterface::SignalingState new_state) override {
+ RTC_DCHECK(pc_);
+ RTC_DCHECK(pc_->signaling_state() == new_state);
+ state_ = new_state;
+ }
+
+ MediaStreamInterface* RemoteStream(const std::string& label) {
+ return remote_streams_->find(label);
+ }
+ StreamCollectionInterface* remote_streams() const {
+ return remote_streams_.get();
+ }
+ void OnAddStream(rtc::scoped_refptr<MediaStreamInterface> stream) override {
+ last_added_stream_ = stream;
+ remote_streams_->AddStream(stream);
+ }
+ void OnRemoveStream(
+ rtc::scoped_refptr<MediaStreamInterface> stream) override {
+ last_removed_stream_ = stream;
+ remote_streams_->RemoveStream(stream.get());
+ }
+ void OnRenegotiationNeeded() override { renegotiation_needed_ = true; }
+ void OnNegotiationNeededEvent(uint32_t event_id) override {
+ latest_negotiation_needed_event_ = event_id;
+ }
+ void OnDataChannel(
+ rtc::scoped_refptr<DataChannelInterface> data_channel) override {
+ last_datachannel_ = data_channel;
+ }
+
+ void OnIceConnectionChange(
+ PeerConnectionInterface::IceConnectionState new_state) override {
+ RTC_DCHECK(pc_);
+ RTC_DCHECK(pc_->ice_connection_state() == new_state);
+ // When ICE is finished, the caller will get to a kIceConnectionCompleted
+ // state, because it has the ICE controlling role, while the callee
+ // will get to a kIceConnectionConnected state. This means that both ICE
+ // and DTLS are connected.
+ ice_connected_ =
+ (new_state == PeerConnectionInterface::kIceConnectionConnected) ||
+ (new_state == PeerConnectionInterface::kIceConnectionCompleted);
+ callback_triggered_ = true;
+ }
+ void OnIceGatheringChange(
+ PeerConnectionInterface::IceGatheringState new_state) override {
+ RTC_DCHECK(pc_);
+ RTC_DCHECK(pc_->ice_gathering_state() == new_state);
+ ice_gathering_complete_ =
+ new_state == PeerConnectionInterface::kIceGatheringComplete;
+ callback_triggered_ = true;
+ }
+ void OnIceCandidate(const IceCandidateInterface* candidate) override {
+ RTC_DCHECK(pc_);
+ candidates_.push_back(std::make_unique<JsepIceCandidate>(
+ candidate->sdp_mid(), candidate->sdp_mline_index(),
+ candidate->candidate()));
+ callback_triggered_ = true;
+ }
+
+ void OnIceCandidatesRemoved(
+ const std::vector<cricket::Candidate>& candidates) override {
+ num_candidates_removed_++;
+ callback_triggered_ = true;
+ }
+
+ void OnIceConnectionReceivingChange(bool receiving) override {
+ callback_triggered_ = true;
+ }
+
+ void OnAddTrack(rtc::scoped_refptr<RtpReceiverInterface> receiver,
+ const std::vector<rtc::scoped_refptr<MediaStreamInterface>>&
+ streams) override {
+ RTC_DCHECK(receiver);
+ num_added_tracks_++;
+ last_added_track_label_ = receiver->id();
+ add_track_events_.push_back(AddTrackEvent(receiver, streams));
+ }
+
+ void OnTrack(
+ rtc::scoped_refptr<RtpTransceiverInterface> transceiver) override {
+ on_track_transceivers_.push_back(transceiver);
+ }
+
+ void OnRemoveTrack(
+ rtc::scoped_refptr<RtpReceiverInterface> receiver) override {
+ remove_track_events_.push_back(receiver);
+ }
+
+ std::vector<rtc::scoped_refptr<RtpReceiverInterface>> GetAddTrackReceivers() {
+ std::vector<rtc::scoped_refptr<RtpReceiverInterface>> receivers;
+ for (const AddTrackEvent& event : add_track_events_) {
+ receivers.push_back(event.receiver);
+ }
+ return receivers;
+ }
+
+ int CountAddTrackEventsForStream(const std::string& stream_id) {
+ int found_tracks = 0;
+ for (const AddTrackEvent& event : add_track_events_) {
+ bool has_stream_id = false;
+ for (auto stream : event.streams) {
+ if (stream->id() == stream_id) {
+ has_stream_id = true;
+ break;
+ }
+ }
+ if (has_stream_id) {
+ ++found_tracks;
+ }
+ }
+ return found_tracks;
+ }
+
+ // Returns the id of the last added stream.
+ // Empty string if no stream have been added.
+ std::string GetLastAddedStreamId() {
+ if (last_added_stream_.get())
+ return last_added_stream_->id();
+ return "";
+ }
+ std::string GetLastRemovedStreamId() {
+ if (last_removed_stream_.get())
+ return last_removed_stream_->id();
+ return "";
+ }
+
+ IceCandidateInterface* last_candidate() {
+ if (candidates_.empty()) {
+ return nullptr;
+ } else {
+ return candidates_.back().get();
+ }
+ }
+
+ std::vector<const IceCandidateInterface*> GetAllCandidates() {
+ std::vector<const IceCandidateInterface*> candidates;
+ for (const auto& candidate : candidates_) {
+ candidates.push_back(candidate.get());
+ }
+ return candidates;
+ }
+
+ std::vector<IceCandidateInterface*> GetCandidatesByMline(int mline_index) {
+ std::vector<IceCandidateInterface*> candidates;
+ for (const auto& candidate : candidates_) {
+ if (candidate->sdp_mline_index() == mline_index) {
+ candidates.push_back(candidate.get());
+ }
+ }
+ return candidates;
+ }
+
+ bool legacy_renegotiation_needed() const { return renegotiation_needed_; }
+ void clear_legacy_renegotiation_needed() { renegotiation_needed_ = false; }
+
+ bool has_negotiation_needed_event() {
+ return latest_negotiation_needed_event_.has_value();
+ }
+ uint32_t latest_negotiation_needed_event() {
+ return latest_negotiation_needed_event_.value_or(0u);
+ }
+ void clear_latest_negotiation_needed_event() {
+ latest_negotiation_needed_event_ = absl::nullopt;
+ }
+
+ rtc::scoped_refptr<PeerConnectionInterface> pc_;
+ PeerConnectionInterface::SignalingState state_;
+ std::vector<std::unique_ptr<IceCandidateInterface>> candidates_;
+ rtc::scoped_refptr<DataChannelInterface> last_datachannel_;
+ rtc::scoped_refptr<StreamCollection> remote_streams_;
+ bool renegotiation_needed_ = false;
+ absl::optional<uint32_t> latest_negotiation_needed_event_;
+ bool ice_gathering_complete_ = false;
+ bool ice_connected_ = false;
+ bool callback_triggered_ = false;
+ int num_added_tracks_ = 0;
+ std::string last_added_track_label_;
+ std::vector<AddTrackEvent> add_track_events_;
+ std::vector<rtc::scoped_refptr<RtpReceiverInterface>> remove_track_events_;
+ std::vector<rtc::scoped_refptr<RtpTransceiverInterface>>
+ on_track_transceivers_;
+ int num_candidates_removed_ = 0;
+
+ private:
+ rtc::scoped_refptr<MediaStreamInterface> last_added_stream_;
+ rtc::scoped_refptr<MediaStreamInterface> last_removed_stream_;
+};
+
+class MockCreateSessionDescriptionObserver
+ : public webrtc::CreateSessionDescriptionObserver {
+ public:
+ MockCreateSessionDescriptionObserver()
+ : called_(false),
+ error_("MockCreateSessionDescriptionObserver not called") {}
+ virtual ~MockCreateSessionDescriptionObserver() {}
+ void OnSuccess(SessionDescriptionInterface* desc) override {
+ MutexLock lock(&mutex_);
+ called_ = true;
+ error_ = "";
+ desc_.reset(desc);
+ }
+ void OnFailure(webrtc::RTCError error) override {
+ MutexLock lock(&mutex_);
+ called_ = true;
+ error_ = error.message();
+ }
+ bool called() const {
+ MutexLock lock(&mutex_);
+ return called_;
+ }
+ bool result() const {
+ MutexLock lock(&mutex_);
+ return error_.empty();
+ }
+ const std::string& error() const {
+ MutexLock lock(&mutex_);
+ return error_;
+ }
+ std::unique_ptr<SessionDescriptionInterface> MoveDescription() {
+ MutexLock lock(&mutex_);
+ return std::move(desc_);
+ }
+
+ private:
+ mutable Mutex mutex_;
+ bool called_ RTC_GUARDED_BY(mutex_);
+ std::string error_ RTC_GUARDED_BY(mutex_);
+ std::unique_ptr<SessionDescriptionInterface> desc_ RTC_GUARDED_BY(mutex_);
+};
+
+class MockSetSessionDescriptionObserver
+ : public webrtc::SetSessionDescriptionObserver {
+ public:
+ static rtc::scoped_refptr<MockSetSessionDescriptionObserver> Create() {
+ return rtc::make_ref_counted<MockSetSessionDescriptionObserver>();
+ }
+
+ MockSetSessionDescriptionObserver()
+ : called_(false),
+ error_("MockSetSessionDescriptionObserver not called") {}
+ ~MockSetSessionDescriptionObserver() override {}
+ void OnSuccess() override {
+ MutexLock lock(&mutex_);
+
+ called_ = true;
+ error_ = "";
+ }
+ void OnFailure(webrtc::RTCError error) override {
+ MutexLock lock(&mutex_);
+ called_ = true;
+ error_ = error.message();
+ }
+
+ bool called() const {
+ MutexLock lock(&mutex_);
+ return called_;
+ }
+ bool result() const {
+ MutexLock lock(&mutex_);
+ return error_.empty();
+ }
+ const std::string& error() const {
+ MutexLock lock(&mutex_);
+ return error_;
+ }
+
+ private:
+ mutable Mutex mutex_;
+ bool called_;
+ std::string error_;
+};
+
+class FakeSetLocalDescriptionObserver
+ : public SetLocalDescriptionObserverInterface {
+ public:
+ bool called() const { return error_.has_value(); }
+ RTCError& error() {
+ RTC_DCHECK(error_.has_value());
+ return *error_;
+ }
+
+ // SetLocalDescriptionObserverInterface implementation.
+ void OnSetLocalDescriptionComplete(RTCError error) override {
+ error_ = std::move(error);
+ }
+
+ private:
+ // Set on complete, on success this is set to an RTCError::OK() error.
+ absl::optional<RTCError> error_;
+};
+
+class FakeSetRemoteDescriptionObserver
+ : public SetRemoteDescriptionObserverInterface {
+ public:
+ bool called() const { return error_.has_value(); }
+ RTCError& error() {
+ RTC_DCHECK(error_.has_value());
+ return *error_;
+ }
+
+ // SetRemoteDescriptionObserverInterface implementation.
+ void OnSetRemoteDescriptionComplete(RTCError error) override {
+ error_ = std::move(error);
+ }
+
+ private:
+ // Set on complete, on success this is set to an RTCError::OK() error.
+ absl::optional<RTCError> error_;
+};
+
+class MockDataChannelObserver : public webrtc::DataChannelObserver {
+ public:
+ struct Message {
+ std::string data;
+ bool binary;
+ };
+
+ explicit MockDataChannelObserver(webrtc::DataChannelInterface* channel)
+ : channel_(channel) {
+ channel_->RegisterObserver(this);
+ states_.push_back(channel_->state());
+ }
+ virtual ~MockDataChannelObserver() { channel_->UnregisterObserver(); }
+
+ void OnBufferedAmountChange(uint64_t previous_amount) override {}
+
+ void OnStateChange() override { states_.push_back(channel_->state()); }
+ void OnMessage(const DataBuffer& buffer) override {
+ messages_.push_back(
+ {std::string(buffer.data.data<char>(), buffer.data.size()),
+ buffer.binary});
+ }
+
+ bool IsOpen() const { return state() == DataChannelInterface::kOpen; }
+ std::vector<Message> messages() const { return messages_; }
+ std::string last_message() const {
+ if (messages_.empty())
+ return {};
+
+ return messages_.back().data;
+ }
+ bool last_message_is_binary() const {
+ if (messages_.empty())
+ return false;
+ return messages_.back().binary;
+ }
+ size_t received_message_count() const { return messages_.size(); }
+
+ DataChannelInterface::DataState state() const { return states_.back(); }
+ const std::vector<DataChannelInterface::DataState>& states() const {
+ return states_;
+ }
+
+ private:
+ rtc::scoped_refptr<webrtc::DataChannelInterface> channel_;
+ std::vector<DataChannelInterface::DataState> states_;
+ std::vector<Message> messages_;
+};
+
+class MockStatsObserver : public webrtc::StatsObserver {
+ public:
+ MockStatsObserver() : called_(false), stats_() {}
+ virtual ~MockStatsObserver() {}
+
+ virtual void OnComplete(const StatsReports& reports) {
+ RTC_CHECK(!called_);
+ called_ = true;
+ stats_.Clear();
+ stats_.number_of_reports = reports.size();
+ for (const auto* r : reports) {
+ if (r->type() == StatsReport::kStatsReportTypeSsrc) {
+ stats_.timestamp = r->timestamp();
+ GetIntValue(r, StatsReport::kStatsValueNameAudioOutputLevel,
+ &stats_.audio_output_level);
+ GetIntValue(r, StatsReport::kStatsValueNameAudioInputLevel,
+ &stats_.audio_input_level);
+ GetIntValue(r, StatsReport::kStatsValueNameBytesReceived,
+ &stats_.bytes_received);
+ GetIntValue(r, StatsReport::kStatsValueNameBytesSent,
+ &stats_.bytes_sent);
+ GetInt64Value(r, StatsReport::kStatsValueNameCaptureStartNtpTimeMs,
+ &stats_.capture_start_ntp_time);
+ stats_.track_ids.emplace_back();
+ GetStringValue(r, StatsReport::kStatsValueNameTrackId,
+ &stats_.track_ids.back());
+ } else if (r->type() == StatsReport::kStatsReportTypeBwe) {
+ stats_.timestamp = r->timestamp();
+ GetIntValue(r, StatsReport::kStatsValueNameAvailableReceiveBandwidth,
+ &stats_.available_receive_bandwidth);
+ } else if (r->type() == StatsReport::kStatsReportTypeComponent) {
+ stats_.timestamp = r->timestamp();
+ GetStringValue(r, StatsReport::kStatsValueNameDtlsCipher,
+ &stats_.dtls_cipher);
+ GetStringValue(r, StatsReport::kStatsValueNameSrtpCipher,
+ &stats_.srtp_cipher);
+ }
+ }
+ }
+
+ bool called() const { return called_; }
+ size_t number_of_reports() const { return stats_.number_of_reports; }
+ double timestamp() const { return stats_.timestamp; }
+
+ int AudioOutputLevel() const {
+ RTC_CHECK(called_);
+ return stats_.audio_output_level;
+ }
+
+ int AudioInputLevel() const {
+ RTC_CHECK(called_);
+ return stats_.audio_input_level;
+ }
+
+ int BytesReceived() const {
+ RTC_CHECK(called_);
+ return stats_.bytes_received;
+ }
+
+ int BytesSent() const {
+ RTC_CHECK(called_);
+ return stats_.bytes_sent;
+ }
+
+ int64_t CaptureStartNtpTime() const {
+ RTC_CHECK(called_);
+ return stats_.capture_start_ntp_time;
+ }
+
+ int AvailableReceiveBandwidth() const {
+ RTC_CHECK(called_);
+ return stats_.available_receive_bandwidth;
+ }
+
+ std::string DtlsCipher() const {
+ RTC_CHECK(called_);
+ return stats_.dtls_cipher;
+ }
+
+ std::string SrtpCipher() const {
+ RTC_CHECK(called_);
+ return stats_.srtp_cipher;
+ }
+
+ std::vector<std::string> TrackIds() const {
+ RTC_CHECK(called_);
+ return stats_.track_ids;
+ }
+
+ private:
+ bool GetIntValue(const StatsReport* report,
+ StatsReport::StatsValueName name,
+ int* value) {
+ const StatsReport::Value* v = report->FindValue(name);
+ if (v) {
+ // TODO(tommi): We should really just be using an int here :-/
+ *value = rtc::FromString<int>(v->ToString());
+ }
+ return v != nullptr;
+ }
+
+ bool GetInt64Value(const StatsReport* report,
+ StatsReport::StatsValueName name,
+ int64_t* value) {
+ const StatsReport::Value* v = report->FindValue(name);
+ if (v) {
+ // TODO(tommi): We should really just be using an int here :-/
+ *value = rtc::FromString<int64_t>(v->ToString());
+ }
+ return v != nullptr;
+ }
+
+ bool GetStringValue(const StatsReport* report,
+ StatsReport::StatsValueName name,
+ std::string* value) {
+ const StatsReport::Value* v = report->FindValue(name);
+ if (v)
+ *value = v->ToString();
+ return v != nullptr;
+ }
+
+ bool called_;
+ struct {
+ void Clear() {
+ number_of_reports = 0;
+ timestamp = 0;
+ audio_output_level = 0;
+ audio_input_level = 0;
+ bytes_received = 0;
+ bytes_sent = 0;
+ capture_start_ntp_time = 0;
+ available_receive_bandwidth = 0;
+ dtls_cipher.clear();
+ srtp_cipher.clear();
+ track_ids.clear();
+ }
+
+ size_t number_of_reports;
+ double timestamp;
+ int audio_output_level;
+ int audio_input_level;
+ int bytes_received;
+ int bytes_sent;
+ int64_t capture_start_ntp_time;
+ int available_receive_bandwidth;
+ std::string dtls_cipher;
+ std::string srtp_cipher;
+ std::vector<std::string> track_ids;
+ } stats_;
+};
+
+// Helper class that just stores the report from the callback.
+class MockRTCStatsCollectorCallback : public webrtc::RTCStatsCollectorCallback {
+ public:
+ rtc::scoped_refptr<const RTCStatsReport> report() { return report_; }
+
+ bool called() const { return called_; }
+
+ protected:
+ void OnStatsDelivered(
+ const rtc::scoped_refptr<const RTCStatsReport>& report) override {
+ report_ = report;
+ called_ = true;
+ }
+
+ private:
+ bool called_ = false;
+ rtc::scoped_refptr<const RTCStatsReport> report_;
+};
+
+} // namespace webrtc
+
+#endif // PC_TEST_MOCK_PEER_CONNECTION_OBSERVERS_H_
diff --git a/third_party/libwebrtc/pc/test/mock_rtp_receiver_internal.h b/third_party/libwebrtc/pc/test/mock_rtp_receiver_internal.h
new file mode 100644
index 0000000000..e76b56755d
--- /dev/null
+++ b/third_party/libwebrtc/pc/test/mock_rtp_receiver_internal.h
@@ -0,0 +1,82 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_TEST_MOCK_RTP_RECEIVER_INTERNAL_H_
+#define PC_TEST_MOCK_RTP_RECEIVER_INTERNAL_H_
+
+#include <string>
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "pc/rtp_receiver.h"
+#include "test/gmock.h"
+
+namespace webrtc {
+
+// The definition of MockRtpReceiver is copied in to avoid multiple inheritance.
+class MockRtpReceiverInternal : public RtpReceiverInternal {
+ public:
+ // RtpReceiverInterface methods.
+ MOCK_METHOD(rtc::scoped_refptr<MediaStreamTrackInterface>,
+ track,
+ (),
+ (const, override));
+ MOCK_METHOD(rtc::scoped_refptr<DtlsTransportInterface>,
+ dtls_transport,
+ (),
+ (const, override));
+ MOCK_METHOD(std::vector<std::string>, stream_ids, (), (const, override));
+ MOCK_METHOD(std::vector<rtc::scoped_refptr<MediaStreamInterface>>,
+ streams,
+ (),
+ (const, override));
+ MOCK_METHOD(cricket::MediaType, media_type, (), (const, override));
+ MOCK_METHOD(std::string, id, (), (const, override));
+ MOCK_METHOD(RtpParameters, GetParameters, (), (const, override));
+ MOCK_METHOD(void, SetObserver, (RtpReceiverObserverInterface*), (override));
+ MOCK_METHOD(void,
+ SetJitterBufferMinimumDelay,
+ (absl::optional<double>),
+ (override));
+ MOCK_METHOD(std::vector<RtpSource>, GetSources, (), (const, override));
+ MOCK_METHOD(void,
+ SetFrameDecryptor,
+ (rtc::scoped_refptr<FrameDecryptorInterface>),
+ (override));
+ MOCK_METHOD(rtc::scoped_refptr<FrameDecryptorInterface>,
+ GetFrameDecryptor,
+ (),
+ (const, override));
+
+ // RtpReceiverInternal methods.
+ MOCK_METHOD(void, Stop, (), (override));
+ MOCK_METHOD(void,
+ SetMediaChannel,
+ (cricket::MediaReceiveChannelInterface*),
+ (override));
+ MOCK_METHOD(void, SetupMediaChannel, (uint32_t), (override));
+ MOCK_METHOD(void, SetupUnsignaledMediaChannel, (), (override));
+ MOCK_METHOD(absl::optional<uint32_t>, ssrc, (), (const, override));
+ MOCK_METHOD(void, NotifyFirstPacketReceived, (), (override));
+ MOCK_METHOD(void, set_stream_ids, (std::vector<std::string>), (override));
+ MOCK_METHOD(void,
+ set_transport,
+ (rtc::scoped_refptr<DtlsTransportInterface>),
+ (override));
+ MOCK_METHOD(void,
+ SetStreams,
+ (const std::vector<rtc::scoped_refptr<MediaStreamInterface>>&),
+ (override));
+ MOCK_METHOD(int, AttachmentId, (), (const, override));
+};
+
+} // namespace webrtc
+
+#endif // PC_TEST_MOCK_RTP_RECEIVER_INTERNAL_H_
diff --git a/third_party/libwebrtc/pc/test/mock_rtp_sender_internal.h b/third_party/libwebrtc/pc/test/mock_rtp_sender_internal.h
new file mode 100644
index 0000000000..8ed0ede21b
--- /dev/null
+++ b/third_party/libwebrtc/pc/test/mock_rtp_sender_internal.h
@@ -0,0 +1,109 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_TEST_MOCK_RTP_SENDER_INTERNAL_H_
+#define PC_TEST_MOCK_RTP_SENDER_INTERNAL_H_
+
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "pc/rtp_sender.h"
+#include "test/gmock.h"
+
+namespace webrtc {
+
+// The definition of MockRtpSender is copied in to avoid multiple inheritance.
+class MockRtpSenderInternal : public RtpSenderInternal {
+ public:
+ // RtpSenderInterface methods.
+ MOCK_METHOD(bool, SetTrack, (MediaStreamTrackInterface*), (override));
+ MOCK_METHOD(rtc::scoped_refptr<MediaStreamTrackInterface>,
+ track,
+ (),
+ (const, override));
+ MOCK_METHOD(uint32_t, ssrc, (), (const, override));
+ MOCK_METHOD(rtc::scoped_refptr<DtlsTransportInterface>,
+ dtls_transport,
+ (),
+ (const, override));
+ MOCK_METHOD(cricket::MediaType, media_type, (), (const, override));
+ MOCK_METHOD(std::string, id, (), (const, override));
+ MOCK_METHOD(std::vector<std::string>, stream_ids, (), (const, override));
+ MOCK_METHOD(std::vector<RtpEncodingParameters>,
+ init_send_encodings,
+ (),
+ (const, override));
+ MOCK_METHOD(void,
+ set_transport,
+ (rtc::scoped_refptr<DtlsTransportInterface>),
+ (override));
+ MOCK_METHOD(RtpParameters, GetParameters, (), (const, override));
+ MOCK_METHOD(RtpParameters, GetParametersInternal, (), (const, override));
+ MOCK_METHOD(RtpParameters,
+ GetParametersInternalWithAllLayers,
+ (),
+ (const, override));
+ MOCK_METHOD(RTCError, SetParameters, (const RtpParameters&), (override));
+ MOCK_METHOD(void,
+ SetParametersAsync,
+ (const RtpParameters&, SetParametersCallback),
+ (override));
+ MOCK_METHOD(void,
+ SetParametersInternal,
+ (const RtpParameters&, SetParametersCallback, bool blocking),
+ (override));
+ MOCK_METHOD(RTCError,
+ SetParametersInternalWithAllLayers,
+ (const RtpParameters&),
+ (override));
+ MOCK_METHOD(RTCError, CheckSVCParameters, (const RtpParameters&), (override));
+ MOCK_METHOD(void,
+ SetVideoCodecPreferences,
+ (std::vector<cricket::VideoCodec>),
+ (override));
+ MOCK_METHOD(rtc::scoped_refptr<DtmfSenderInterface>,
+ GetDtmfSender,
+ (),
+ (const, override));
+ MOCK_METHOD(void,
+ SetFrameEncryptor,
+ (rtc::scoped_refptr<FrameEncryptorInterface>),
+ (override));
+ MOCK_METHOD(rtc::scoped_refptr<FrameEncryptorInterface>,
+ GetFrameEncryptor,
+ (),
+ (const, override));
+ MOCK_METHOD(void,
+ SetEncoderToPacketizerFrameTransformer,
+ (rtc::scoped_refptr<FrameTransformerInterface>),
+ (override));
+ MOCK_METHOD(void,
+ SetEncoderSelector,
+ (std::unique_ptr<VideoEncoderFactory::EncoderSelectorInterface>),
+ (override));
+
+ // RtpSenderInternal methods.
+ MOCK_METHOD1(SetMediaChannel, void(cricket::MediaSendChannelInterface*));
+ MOCK_METHOD1(SetSsrc, void(uint32_t));
+ MOCK_METHOD1(set_stream_ids, void(const std::vector<std::string>&));
+ MOCK_METHOD1(SetStreams, void(const std::vector<std::string>&));
+ MOCK_METHOD1(set_init_send_encodings,
+ void(const std::vector<RtpEncodingParameters>&));
+ MOCK_METHOD0(Stop, void());
+ MOCK_CONST_METHOD0(AttachmentId, int());
+ MOCK_METHOD1(DisableEncodingLayers,
+ RTCError(const std::vector<std::string>&));
+ MOCK_METHOD0(SetTransceiverAsStopped, void());
+};
+
+} // namespace webrtc
+
+#endif // PC_TEST_MOCK_RTP_SENDER_INTERNAL_H_
diff --git a/third_party/libwebrtc/pc/test/mock_voice_media_channel.h b/third_party/libwebrtc/pc/test/mock_voice_media_channel.h
new file mode 100644
index 0000000000..2e5a8b5801
--- /dev/null
+++ b/third_party/libwebrtc/pc/test/mock_voice_media_channel.h
@@ -0,0 +1,163 @@
+/*
+ * Copyright 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef PC_TEST_MOCK_VOICE_MEDIA_CHANNEL_H_
+#define PC_TEST_MOCK_VOICE_MEDIA_CHANNEL_H_
+
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "api/call/audio_sink.h"
+#include "media/base/media_channel.h"
+#include "media/base/media_channel_impl.h"
+#include "modules/rtp_rtcp/source/rtp_packet_received.h"
+#include "rtc_base/gunit.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+
+using ::testing::InvokeWithoutArgs;
+using ::testing::Mock;
+
+namespace cricket {
+class MockVoiceMediaChannel : public VoiceMediaChannel {
+ public:
+ explicit MockVoiceMediaChannel(webrtc::TaskQueueBase* network_thread)
+ : VoiceMediaChannel(network_thread) {}
+
+ MOCK_METHOD(void,
+ SetInterface,
+ (MediaChannelNetworkInterface * iface),
+ (override));
+ MOCK_METHOD(void,
+ OnPacketReceived,
+ (const webrtc::RtpPacketReceived& packet),
+ (override));
+ MOCK_METHOD(void,
+ OnPacketSent,
+ (const rtc::SentPacket& sent_packet),
+ (override));
+ MOCK_METHOD(void, OnReadyToSend, (bool ready), (override));
+ MOCK_METHOD(void,
+ OnNetworkRouteChanged,
+ (absl::string_view transport_name,
+ const rtc::NetworkRoute& network_route),
+ (override));
+ MOCK_METHOD(bool, AddSendStream, (const StreamParams& sp), (override));
+ MOCK_METHOD(bool, RemoveSendStream, (uint32_t ssrc), (override));
+ MOCK_METHOD(bool, AddRecvStream, (const StreamParams& sp), (override));
+ MOCK_METHOD(bool, RemoveRecvStream, (uint32_t ssrc), (override));
+ MOCK_METHOD(void, ResetUnsignaledRecvStream, (), (override));
+ MOCK_METHOD(absl::optional<uint32_t>,
+ GetUnsignaledSsrc,
+ (),
+ (const, override));
+ MOCK_METHOD(void, OnDemuxerCriteriaUpdatePending, (), (override));
+ MOCK_METHOD(void, OnDemuxerCriteriaUpdateComplete, (), (override));
+ MOCK_METHOD(int, GetRtpSendTimeExtnId, (), (const, override));
+ MOCK_METHOD(
+ void,
+ SetFrameEncryptor,
+ (uint32_t ssrc,
+ rtc::scoped_refptr<webrtc::FrameEncryptorInterface> frame_encryptor),
+ (override));
+ MOCK_METHOD(
+ void,
+ SetFrameDecryptor,
+ (uint32_t ssrc,
+ rtc::scoped_refptr<webrtc::FrameDecryptorInterface> frame_decryptor),
+ (override));
+ MOCK_METHOD(webrtc::RtpParameters,
+ GetRtpSendParameters,
+ (uint32_t ssrc),
+ (const, override));
+ MOCK_METHOD(webrtc::RTCError,
+ SetRtpSendParameters,
+ (uint32_t ssrc,
+ const webrtc::RtpParameters& parameters,
+ webrtc::SetParametersCallback callback),
+ (override));
+ MOCK_METHOD(
+ void,
+ SetEncoderToPacketizerFrameTransformer,
+ (uint32_t ssrc,
+ rtc::scoped_refptr<webrtc::FrameTransformerInterface> frame_transformer),
+ (override));
+ MOCK_METHOD(
+ void,
+ SetDepacketizerToDecoderFrameTransformer,
+ (uint32_t ssrc,
+ rtc::scoped_refptr<webrtc::FrameTransformerInterface> frame_transformer),
+ (override));
+
+ MOCK_METHOD(bool,
+ SetSendParameters,
+ (const AudioSendParameters& params),
+ (override));
+ MOCK_METHOD(bool,
+ SetRecvParameters,
+ (const AudioRecvParameters& params),
+ (override));
+ MOCK_METHOD(webrtc::RtpParameters,
+ GetRtpReceiveParameters,
+ (uint32_t ssrc),
+ (const, override));
+ MOCK_METHOD(webrtc::RtpParameters,
+ GetDefaultRtpReceiveParameters,
+ (),
+ (const, override));
+ MOCK_METHOD(void, SetPlayout, (bool playout), (override));
+ MOCK_METHOD(void, SetSend, (bool send), (override));
+ MOCK_METHOD(bool,
+ SetAudioSend,
+ (uint32_t ssrc,
+ bool enable,
+ const AudioOptions* options,
+ AudioSource* source),
+ (override));
+ MOCK_METHOD(bool,
+ SetOutputVolume,
+ (uint32_t ssrc, double volume),
+ (override));
+ MOCK_METHOD(bool, SetDefaultOutputVolume, (double volume), (override));
+ MOCK_METHOD(bool, CanInsertDtmf, (), (override));
+ MOCK_METHOD(bool,
+ InsertDtmf,
+ (uint32_t ssrc, int event, int duration),
+ (override));
+ MOCK_METHOD(bool, GetSendStats, (VoiceMediaSendInfo * info), (override));
+ MOCK_METHOD(bool,
+ GetReceiveStats,
+ (VoiceMediaReceiveInfo * info, bool get_and_clear_legacy_stats),
+ (override));
+ MOCK_METHOD(void,
+ SetRawAudioSink,
+ (uint32_t ssrc, std::unique_ptr<webrtc::AudioSinkInterface> sink),
+ (override));
+ MOCK_METHOD(void,
+ SetDefaultRawAudioSink,
+ (std::unique_ptr<webrtc::AudioSinkInterface> sink),
+ (override));
+ MOCK_METHOD(std::vector<webrtc::RtpSource>,
+ GetSources,
+ (uint32_t ssrc),
+ (const, override));
+
+ MOCK_METHOD(bool,
+ SetBaseMinimumPlayoutDelayMs,
+ (uint32_t ssrc, int delay_ms),
+ (override));
+ MOCK_METHOD(absl::optional<int>,
+ GetBaseMinimumPlayoutDelayMs,
+ (uint32_t ssrc),
+ (const, override));
+};
+} // namespace cricket
+
+#endif // PC_TEST_MOCK_VOICE_MEDIA_CHANNEL_H_
diff --git a/third_party/libwebrtc/pc/test/peer_connection_test_wrapper.cc b/third_party/libwebrtc/pc/test/peer_connection_test_wrapper.cc
new file mode 100644
index 0000000000..8325e59510
--- /dev/null
+++ b/third_party/libwebrtc/pc/test/peer_connection_test_wrapper.cc
@@ -0,0 +1,356 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "pc/test/peer_connection_test_wrapper.h"
+
+#include <stddef.h>
+
+#include <memory>
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "api/audio/audio_mixer.h"
+#include "api/create_peerconnection_factory.h"
+#include "api/sequence_checker.h"
+#include "api/video_codecs/builtin_video_decoder_factory.h"
+#include "api/video_codecs/builtin_video_encoder_factory.h"
+#include "api/video_codecs/video_decoder_factory.h"
+#include "api/video_codecs/video_encoder_factory.h"
+#include "modules/audio_device/include/audio_device.h"
+#include "modules/audio_processing/include/audio_processing.h"
+#include "p2p/base/fake_port_allocator.h"
+#include "p2p/base/port_allocator.h"
+#include "pc/test/fake_periodic_video_source.h"
+#include "pc/test/fake_periodic_video_track_source.h"
+#include "pc/test/fake_rtc_certificate_generator.h"
+#include "pc/test/mock_peer_connection_observers.h"
+#include "rtc_base/gunit.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/rtc_certificate_generator.h"
+#include "rtc_base/string_encode.h"
+#include "rtc_base/time_utils.h"
+#include "test/gtest.h"
+
+using webrtc::FakeVideoTrackRenderer;
+using webrtc::IceCandidateInterface;
+using webrtc::MediaStreamInterface;
+using webrtc::MediaStreamTrackInterface;
+using webrtc::MockSetSessionDescriptionObserver;
+using webrtc::PeerConnectionInterface;
+using webrtc::RtpReceiverInterface;
+using webrtc::SdpType;
+using webrtc::SessionDescriptionInterface;
+using webrtc::VideoTrackInterface;
+
+namespace {
+const char kStreamIdBase[] = "stream_id";
+const char kVideoTrackLabelBase[] = "video_track";
+const char kAudioTrackLabelBase[] = "audio_track";
+constexpr int kMaxWait = 10000;
+constexpr int kTestAudioFrameCount = 3;
+constexpr int kTestVideoFrameCount = 3;
+} // namespace
+
+void PeerConnectionTestWrapper::Connect(PeerConnectionTestWrapper* caller,
+ PeerConnectionTestWrapper* callee) {
+ caller->SignalOnIceCandidateReady.connect(
+ callee, &PeerConnectionTestWrapper::AddIceCandidate);
+ callee->SignalOnIceCandidateReady.connect(
+ caller, &PeerConnectionTestWrapper::AddIceCandidate);
+
+ caller->SignalOnSdpReady.connect(callee,
+ &PeerConnectionTestWrapper::ReceiveOfferSdp);
+ callee->SignalOnSdpReady.connect(
+ caller, &PeerConnectionTestWrapper::ReceiveAnswerSdp);
+}
+
+PeerConnectionTestWrapper::PeerConnectionTestWrapper(
+ const std::string& name,
+ rtc::SocketServer* socket_server,
+ rtc::Thread* network_thread,
+ rtc::Thread* worker_thread)
+ : name_(name),
+ socket_server_(socket_server),
+ network_thread_(network_thread),
+ worker_thread_(worker_thread),
+ pending_negotiation_(false) {
+ pc_thread_checker_.Detach();
+}
+
+PeerConnectionTestWrapper::~PeerConnectionTestWrapper() {
+ RTC_DCHECK_RUN_ON(&pc_thread_checker_);
+ // Either network_thread or worker_thread might be active at this point.
+ // Relying on ~PeerConnection to properly wait for them doesn't work,
+ // as a vptr race might occur (before we enter the destruction body).
+ // See: bugs.webrtc.org/9847
+ if (pc()) {
+ pc()->Close();
+ }
+}
+
+bool PeerConnectionTestWrapper::CreatePc(
+ const webrtc::PeerConnectionInterface::RTCConfiguration& config,
+ rtc::scoped_refptr<webrtc::AudioEncoderFactory> audio_encoder_factory,
+ rtc::scoped_refptr<webrtc::AudioDecoderFactory> audio_decoder_factory) {
+ std::unique_ptr<cricket::PortAllocator> port_allocator(
+ new cricket::FakePortAllocator(
+ network_thread_,
+ std::make_unique<rtc::BasicPacketSocketFactory>(socket_server_),
+ &field_trials_));
+
+ RTC_DCHECK_RUN_ON(&pc_thread_checker_);
+
+ fake_audio_capture_module_ = FakeAudioCaptureModule::Create();
+ if (fake_audio_capture_module_ == nullptr) {
+ return false;
+ }
+
+ peer_connection_factory_ = webrtc::CreatePeerConnectionFactory(
+ network_thread_, worker_thread_, rtc::Thread::Current(),
+ rtc::scoped_refptr<webrtc::AudioDeviceModule>(fake_audio_capture_module_),
+ audio_encoder_factory, audio_decoder_factory,
+ webrtc::CreateBuiltinVideoEncoderFactory(),
+ webrtc::CreateBuiltinVideoDecoderFactory(), nullptr /* audio_mixer */,
+ nullptr /* audio_processing */);
+ if (!peer_connection_factory_) {
+ return false;
+ }
+
+ std::unique_ptr<rtc::RTCCertificateGeneratorInterface> cert_generator(
+ new FakeRTCCertificateGenerator());
+ webrtc::PeerConnectionDependencies deps(this);
+ deps.allocator = std::move(port_allocator);
+ deps.cert_generator = std::move(cert_generator);
+ auto result = peer_connection_factory_->CreatePeerConnectionOrError(
+ config, std::move(deps));
+ if (result.ok()) {
+ peer_connection_ = result.MoveValue();
+ return true;
+ } else {
+ return false;
+ }
+}
+
+rtc::scoped_refptr<webrtc::DataChannelInterface>
+PeerConnectionTestWrapper::CreateDataChannel(
+ const std::string& label,
+ const webrtc::DataChannelInit& init) {
+ auto result = peer_connection_->CreateDataChannelOrError(label, &init);
+ if (!result.ok()) {
+ RTC_LOG(LS_ERROR) << "CreateDataChannel failed: "
+ << ToString(result.error().type()) << " "
+ << result.error().message();
+ return nullptr;
+ }
+ return result.MoveValue();
+}
+
+void PeerConnectionTestWrapper::WaitForNegotiation() {
+ EXPECT_TRUE_WAIT(!pending_negotiation_, kMaxWait);
+}
+
+void PeerConnectionTestWrapper::OnSignalingChange(
+ webrtc::PeerConnectionInterface::SignalingState new_state) {
+ if (new_state == webrtc::PeerConnectionInterface::SignalingState::kStable) {
+ pending_negotiation_ = false;
+ }
+}
+
+void PeerConnectionTestWrapper::OnAddTrack(
+ rtc::scoped_refptr<RtpReceiverInterface> receiver,
+ const std::vector<rtc::scoped_refptr<MediaStreamInterface>>& streams) {
+ RTC_LOG(LS_INFO) << "PeerConnectionTestWrapper " << name_ << ": OnAddTrack";
+ if (receiver->track()->kind() == MediaStreamTrackInterface::kVideoKind) {
+ auto* video_track =
+ static_cast<VideoTrackInterface*>(receiver->track().get());
+ renderer_ = std::make_unique<FakeVideoTrackRenderer>(video_track);
+ }
+}
+
+void PeerConnectionTestWrapper::OnIceCandidate(
+ const IceCandidateInterface* candidate) {
+ std::string sdp;
+ EXPECT_TRUE(candidate->ToString(&sdp));
+ SignalOnIceCandidateReady(candidate->sdp_mid(), candidate->sdp_mline_index(),
+ sdp);
+}
+
+void PeerConnectionTestWrapper::OnDataChannel(
+ rtc::scoped_refptr<webrtc::DataChannelInterface> data_channel) {
+ SignalOnDataChannel(data_channel.get());
+}
+
+void PeerConnectionTestWrapper::OnSuccess(SessionDescriptionInterface* desc) {
+ // This callback should take the ownership of `desc`.
+ std::unique_ptr<SessionDescriptionInterface> owned_desc(desc);
+ std::string sdp;
+ EXPECT_TRUE(desc->ToString(&sdp));
+
+ RTC_LOG(LS_INFO) << "PeerConnectionTestWrapper " << name_ << ": "
+ << webrtc::SdpTypeToString(desc->GetType())
+ << " sdp created: " << sdp;
+
+ SetLocalDescription(desc->GetType(), sdp);
+
+ SignalOnSdpReady(sdp);
+}
+
+void PeerConnectionTestWrapper::CreateOffer(
+ const webrtc::PeerConnectionInterface::RTCOfferAnswerOptions& options) {
+ RTC_LOG(LS_INFO) << "PeerConnectionTestWrapper " << name_ << ": CreateOffer.";
+ pending_negotiation_ = true;
+ peer_connection_->CreateOffer(this, options);
+}
+
+void PeerConnectionTestWrapper::CreateAnswer(
+ const webrtc::PeerConnectionInterface::RTCOfferAnswerOptions& options) {
+ RTC_LOG(LS_INFO) << "PeerConnectionTestWrapper " << name_
+ << ": CreateAnswer.";
+ pending_negotiation_ = true;
+ peer_connection_->CreateAnswer(this, options);
+}
+
+void PeerConnectionTestWrapper::ReceiveOfferSdp(const std::string& sdp) {
+ SetRemoteDescription(SdpType::kOffer, sdp);
+ CreateAnswer(webrtc::PeerConnectionInterface::RTCOfferAnswerOptions());
+}
+
+void PeerConnectionTestWrapper::ReceiveAnswerSdp(const std::string& sdp) {
+ SetRemoteDescription(SdpType::kAnswer, sdp);
+}
+
+void PeerConnectionTestWrapper::SetLocalDescription(SdpType type,
+ const std::string& sdp) {
+ RTC_LOG(LS_INFO) << "PeerConnectionTestWrapper " << name_
+ << ": SetLocalDescription " << webrtc::SdpTypeToString(type)
+ << " " << sdp;
+
+ auto observer = rtc::make_ref_counted<MockSetSessionDescriptionObserver>();
+ peer_connection_->SetLocalDescription(
+ observer.get(), webrtc::CreateSessionDescription(type, sdp).release());
+}
+
+void PeerConnectionTestWrapper::SetRemoteDescription(SdpType type,
+ const std::string& sdp) {
+ RTC_LOG(LS_INFO) << "PeerConnectionTestWrapper " << name_
+ << ": SetRemoteDescription " << webrtc::SdpTypeToString(type)
+ << " " << sdp;
+
+ auto observer = rtc::make_ref_counted<MockSetSessionDescriptionObserver>();
+ peer_connection_->SetRemoteDescription(
+ observer.get(), webrtc::CreateSessionDescription(type, sdp).release());
+}
+
+void PeerConnectionTestWrapper::AddIceCandidate(const std::string& sdp_mid,
+ int sdp_mline_index,
+ const std::string& candidate) {
+ std::unique_ptr<webrtc::IceCandidateInterface> owned_candidate(
+ webrtc::CreateIceCandidate(sdp_mid, sdp_mline_index, candidate, NULL));
+ EXPECT_TRUE(peer_connection_->AddIceCandidate(owned_candidate.get()));
+}
+
+void PeerConnectionTestWrapper::WaitForCallEstablished() {
+ WaitForConnection();
+ WaitForAudio();
+ WaitForVideo();
+}
+
+void PeerConnectionTestWrapper::WaitForConnection() {
+ EXPECT_TRUE_WAIT(CheckForConnection(), kMaxWait);
+ RTC_LOG(LS_INFO) << "PeerConnectionTestWrapper " << name_ << ": Connected.";
+}
+
+bool PeerConnectionTestWrapper::CheckForConnection() {
+ return (peer_connection_->ice_connection_state() ==
+ PeerConnectionInterface::kIceConnectionConnected) ||
+ (peer_connection_->ice_connection_state() ==
+ PeerConnectionInterface::kIceConnectionCompleted);
+}
+
+void PeerConnectionTestWrapper::WaitForAudio() {
+ EXPECT_TRUE_WAIT(CheckForAudio(), kMaxWait);
+ RTC_LOG(LS_INFO) << "PeerConnectionTestWrapper " << name_
+ << ": Got enough audio frames.";
+}
+
+bool PeerConnectionTestWrapper::CheckForAudio() {
+ return (fake_audio_capture_module_->frames_received() >=
+ kTestAudioFrameCount);
+}
+
+void PeerConnectionTestWrapper::WaitForVideo() {
+ EXPECT_TRUE_WAIT(CheckForVideo(), kMaxWait);
+ RTC_LOG(LS_INFO) << "PeerConnectionTestWrapper " << name_
+ << ": Got enough video frames.";
+}
+
+bool PeerConnectionTestWrapper::CheckForVideo() {
+ if (!renderer_) {
+ return false;
+ }
+ return (renderer_->num_rendered_frames() >= kTestVideoFrameCount);
+}
+
+void PeerConnectionTestWrapper::GetAndAddUserMedia(
+ bool audio,
+ const cricket::AudioOptions& audio_options,
+ bool video) {
+ rtc::scoped_refptr<webrtc::MediaStreamInterface> stream =
+ GetUserMedia(audio, audio_options, video);
+ for (const auto& audio_track : stream->GetAudioTracks()) {
+ EXPECT_TRUE(peer_connection_->AddTrack(audio_track, {stream->id()}).ok());
+ }
+ for (const auto& video_track : stream->GetVideoTracks()) {
+ EXPECT_TRUE(peer_connection_->AddTrack(video_track, {stream->id()}).ok());
+ }
+}
+
+rtc::scoped_refptr<webrtc::MediaStreamInterface>
+PeerConnectionTestWrapper::GetUserMedia(
+ bool audio,
+ const cricket::AudioOptions& audio_options,
+ bool video) {
+ std::string stream_id =
+ kStreamIdBase + rtc::ToString(num_get_user_media_calls_++);
+ rtc::scoped_refptr<webrtc::MediaStreamInterface> stream =
+ peer_connection_factory_->CreateLocalMediaStream(stream_id);
+
+ if (audio) {
+ cricket::AudioOptions options = audio_options;
+ // Disable highpass filter so that we can get all the test audio frames.
+ options.highpass_filter = false;
+ rtc::scoped_refptr<webrtc::AudioSourceInterface> source =
+ peer_connection_factory_->CreateAudioSource(options);
+ rtc::scoped_refptr<webrtc::AudioTrackInterface> audio_track(
+ peer_connection_factory_->CreateAudioTrack(kAudioTrackLabelBase,
+ source.get()));
+ stream->AddTrack(audio_track);
+ }
+
+ if (video) {
+ // Set max frame rate to 10fps to reduce the risk of the tests to be flaky.
+ webrtc::FakePeriodicVideoSource::Config config;
+ config.frame_interval_ms = 100;
+ config.timestamp_offset_ms = rtc::TimeMillis();
+
+ auto source = rtc::make_ref_counted<webrtc::FakePeriodicVideoTrackSource>(
+ config, /* remote */ false);
+
+ std::string videotrack_label = stream_id + kVideoTrackLabelBase;
+ rtc::scoped_refptr<webrtc::VideoTrackInterface> video_track(
+ peer_connection_factory_->CreateVideoTrack(videotrack_label,
+ source.get()));
+
+ stream->AddTrack(video_track);
+ }
+ return stream;
+}
diff --git a/third_party/libwebrtc/pc/test/peer_connection_test_wrapper.h b/third_party/libwebrtc/pc/test/peer_connection_test_wrapper.h
new file mode 100644
index 0000000000..cda3ecb73b
--- /dev/null
+++ b/third_party/libwebrtc/pc/test/peer_connection_test_wrapper.h
@@ -0,0 +1,135 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_TEST_PEER_CONNECTION_TEST_WRAPPER_H_
+#define PC_TEST_PEER_CONNECTION_TEST_WRAPPER_H_
+
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "api/audio_codecs/audio_decoder_factory.h"
+#include "api/audio_codecs/audio_encoder_factory.h"
+#include "api/audio_options.h"
+#include "api/data_channel_interface.h"
+#include "api/jsep.h"
+#include "api/media_stream_interface.h"
+#include "api/peer_connection_interface.h"
+#include "api/rtc_error.h"
+#include "api/rtp_receiver_interface.h"
+#include "api/scoped_refptr.h"
+#include "api/sequence_checker.h"
+#include "pc/test/fake_audio_capture_module.h"
+#include "pc/test/fake_video_track_renderer.h"
+#include "rtc_base/third_party/sigslot/sigslot.h"
+#include "rtc_base/thread.h"
+#include "test/scoped_key_value_config.h"
+
+class PeerConnectionTestWrapper
+ : public webrtc::PeerConnectionObserver,
+ public webrtc::CreateSessionDescriptionObserver,
+ public sigslot::has_slots<> {
+ public:
+ static void Connect(PeerConnectionTestWrapper* caller,
+ PeerConnectionTestWrapper* callee);
+
+ PeerConnectionTestWrapper(const std::string& name,
+ rtc::SocketServer* socket_server,
+ rtc::Thread* network_thread,
+ rtc::Thread* worker_thread);
+ virtual ~PeerConnectionTestWrapper();
+
+ bool CreatePc(
+ const webrtc::PeerConnectionInterface::RTCConfiguration& config,
+ rtc::scoped_refptr<webrtc::AudioEncoderFactory> audio_encoder_factory,
+ rtc::scoped_refptr<webrtc::AudioDecoderFactory> audio_decoder_factory);
+
+ rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface> pc_factory()
+ const {
+ return peer_connection_factory_;
+ }
+ webrtc::PeerConnectionInterface* pc() { return peer_connection_.get(); }
+
+ rtc::scoped_refptr<webrtc::DataChannelInterface> CreateDataChannel(
+ const std::string& label,
+ const webrtc::DataChannelInit& init);
+
+ void WaitForNegotiation();
+
+ // Implements PeerConnectionObserver.
+ void OnSignalingChange(
+ webrtc::PeerConnectionInterface::SignalingState new_state) override;
+ void OnAddTrack(
+ rtc::scoped_refptr<webrtc::RtpReceiverInterface> receiver,
+ const std::vector<rtc::scoped_refptr<webrtc::MediaStreamInterface>>&
+ streams) override;
+ void OnDataChannel(
+ rtc::scoped_refptr<webrtc::DataChannelInterface> data_channel) override;
+ void OnRenegotiationNeeded() override {}
+ void OnIceConnectionChange(
+ webrtc::PeerConnectionInterface::IceConnectionState new_state) override {}
+ void OnIceGatheringChange(
+ webrtc::PeerConnectionInterface::IceGatheringState new_state) override {}
+ void OnIceCandidate(const webrtc::IceCandidateInterface* candidate) override;
+
+ // Implements CreateSessionDescriptionObserver.
+ void OnSuccess(webrtc::SessionDescriptionInterface* desc) override;
+ void OnFailure(webrtc::RTCError) override {}
+
+ void CreateOffer(
+ const webrtc::PeerConnectionInterface::RTCOfferAnswerOptions& options);
+ void CreateAnswer(
+ const webrtc::PeerConnectionInterface::RTCOfferAnswerOptions& options);
+ void ReceiveOfferSdp(const std::string& sdp);
+ void ReceiveAnswerSdp(const std::string& sdp);
+ void AddIceCandidate(const std::string& sdp_mid,
+ int sdp_mline_index,
+ const std::string& candidate);
+ void WaitForCallEstablished();
+ void WaitForConnection();
+ void WaitForAudio();
+ void WaitForVideo();
+ void GetAndAddUserMedia(bool audio,
+ const cricket::AudioOptions& audio_options,
+ bool video);
+
+ // sigslots
+ sigslot::signal3<const std::string&, int, const std::string&>
+ SignalOnIceCandidateReady;
+ sigslot::signal1<const std::string&> SignalOnSdpReady;
+ sigslot::signal1<webrtc::DataChannelInterface*> SignalOnDataChannel;
+
+ private:
+ void SetLocalDescription(webrtc::SdpType type, const std::string& sdp);
+ void SetRemoteDescription(webrtc::SdpType type, const std::string& sdp);
+ bool CheckForConnection();
+ bool CheckForAudio();
+ bool CheckForVideo();
+ rtc::scoped_refptr<webrtc::MediaStreamInterface> GetUserMedia(
+ bool audio,
+ const cricket::AudioOptions& audio_options,
+ bool video);
+
+ webrtc::test::ScopedKeyValueConfig field_trials_;
+ std::string name_;
+ rtc::SocketServer* const socket_server_;
+ rtc::Thread* const network_thread_;
+ rtc::Thread* const worker_thread_;
+ webrtc::SequenceChecker pc_thread_checker_;
+ rtc::scoped_refptr<webrtc::PeerConnectionInterface> peer_connection_;
+ rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface>
+ peer_connection_factory_;
+ rtc::scoped_refptr<FakeAudioCaptureModule> fake_audio_capture_module_;
+ std::unique_ptr<webrtc::FakeVideoTrackRenderer> renderer_;
+ int num_get_user_media_calls_ = 0;
+ bool pending_negotiation_;
+};
+
+#endif // PC_TEST_PEER_CONNECTION_TEST_WRAPPER_H_
diff --git a/third_party/libwebrtc/pc/test/rtc_stats_obtainer.h b/third_party/libwebrtc/pc/test/rtc_stats_obtainer.h
new file mode 100644
index 0000000000..b1cc701a06
--- /dev/null
+++ b/third_party/libwebrtc/pc/test/rtc_stats_obtainer.h
@@ -0,0 +1,55 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_TEST_RTC_STATS_OBTAINER_H_
+#define PC_TEST_RTC_STATS_OBTAINER_H_
+
+#include "api/make_ref_counted.h"
+#include "api/sequence_checker.h"
+#include "api/stats/rtc_stats_collector_callback.h"
+#include "api/stats/rtc_stats_report.h"
+#include "rtc_base/gunit.h"
+
+namespace webrtc {
+
+class RTCStatsObtainer : public RTCStatsCollectorCallback {
+ public:
+ static rtc::scoped_refptr<RTCStatsObtainer> Create(
+ rtc::scoped_refptr<const RTCStatsReport>* report_ptr = nullptr) {
+ return rtc::make_ref_counted<RTCStatsObtainer>(report_ptr);
+ }
+
+ void OnStatsDelivered(
+ const rtc::scoped_refptr<const RTCStatsReport>& report) override {
+ EXPECT_TRUE(thread_checker_.IsCurrent());
+ report_ = report;
+ if (report_ptr_)
+ *report_ptr_ = report_;
+ }
+
+ rtc::scoped_refptr<const RTCStatsReport> report() const {
+ EXPECT_TRUE(thread_checker_.IsCurrent());
+ return report_;
+ }
+
+ protected:
+ explicit RTCStatsObtainer(
+ rtc::scoped_refptr<const RTCStatsReport>* report_ptr)
+ : report_ptr_(report_ptr) {}
+
+ private:
+ SequenceChecker thread_checker_;
+ rtc::scoped_refptr<const RTCStatsReport> report_;
+ rtc::scoped_refptr<const RTCStatsReport>* report_ptr_;
+};
+
+} // namespace webrtc
+
+#endif // PC_TEST_RTC_STATS_OBTAINER_H_
diff --git a/third_party/libwebrtc/pc/test/rtp_transport_test_util.h b/third_party/libwebrtc/pc/test/rtp_transport_test_util.h
new file mode 100644
index 0000000000..0353b74754
--- /dev/null
+++ b/third_party/libwebrtc/pc/test/rtp_transport_test_util.h
@@ -0,0 +1,78 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_TEST_RTP_TRANSPORT_TEST_UTIL_H_
+#define PC_TEST_RTP_TRANSPORT_TEST_UTIL_H_
+
+#include "call/rtp_packet_sink_interface.h"
+#include "modules/rtp_rtcp/source/rtp_packet_received.h"
+#include "pc/rtp_transport_internal.h"
+#include "rtc_base/third_party/sigslot/sigslot.h"
+
+namespace webrtc {
+
+// Used to handle the signals when the RtpTransport receives an RTP/RTCP packet.
+// Used in Rtp/Srtp/DtlsTransport unit tests.
+class TransportObserver : public RtpPacketSinkInterface,
+ public sigslot::has_slots<> {
+ public:
+ TransportObserver() {}
+
+ explicit TransportObserver(RtpTransportInternal* rtp_transport) {
+ rtp_transport->SignalRtcpPacketReceived.connect(
+ this, &TransportObserver::OnRtcpPacketReceived);
+ rtp_transport->SignalReadyToSend.connect(this,
+ &TransportObserver::OnReadyToSend);
+ }
+
+ // RtpPacketInterface override.
+ void OnRtpPacket(const RtpPacketReceived& packet) override {
+ rtp_count_++;
+ last_recv_rtp_packet_ = packet.Buffer();
+ }
+
+ void OnRtcpPacketReceived(rtc::CopyOnWriteBuffer* packet,
+ int64_t packet_time_us) {
+ rtcp_count_++;
+ last_recv_rtcp_packet_ = *packet;
+ }
+
+ int rtp_count() const { return rtp_count_; }
+ int rtcp_count() const { return rtcp_count_; }
+
+ rtc::CopyOnWriteBuffer last_recv_rtp_packet() {
+ return last_recv_rtp_packet_;
+ }
+
+ rtc::CopyOnWriteBuffer last_recv_rtcp_packet() {
+ return last_recv_rtcp_packet_;
+ }
+
+ void OnReadyToSend(bool ready) {
+ ready_to_send_signal_count_++;
+ ready_to_send_ = ready;
+ }
+
+ bool ready_to_send() { return ready_to_send_; }
+
+ int ready_to_send_signal_count() { return ready_to_send_signal_count_; }
+
+ private:
+ bool ready_to_send_ = false;
+ int rtp_count_ = 0;
+ int rtcp_count_ = 0;
+ int ready_to_send_signal_count_ = 0;
+ rtc::CopyOnWriteBuffer last_recv_rtp_packet_;
+ rtc::CopyOnWriteBuffer last_recv_rtcp_packet_;
+};
+
+} // namespace webrtc
+
+#endif // PC_TEST_RTP_TRANSPORT_TEST_UTIL_H_
diff --git a/third_party/libwebrtc/pc/test/srtp_test_util.h b/third_party/libwebrtc/pc/test/srtp_test_util.h
new file mode 100644
index 0000000000..ae02310eba
--- /dev/null
+++ b/third_party/libwebrtc/pc/test/srtp_test_util.h
@@ -0,0 +1,45 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef PC_TEST_SRTP_TEST_UTIL_H_
+#define PC_TEST_SRTP_TEST_UTIL_H_
+
+#include <string>
+
+namespace rtc {
+
+extern const char kCsAesCm128HmacSha1_32[];
+extern const char kCsAeadAes128Gcm[];
+extern const char kCsAeadAes256Gcm[];
+
+static const uint8_t kTestKey1[] = "ABCDEFGHIJKLMNOPQRSTUVWXYZ1234";
+static const uint8_t kTestKey2[] = "4321ZYXWVUTSRQPONMLKJIHGFEDCBA";
+static const int kTestKeyLen = 30;
+
+static int rtp_auth_tag_len(const std::string& cs) {
+ if (cs == kCsAesCm128HmacSha1_32) {
+ return 4;
+ } else if (cs == kCsAeadAes128Gcm || cs == kCsAeadAes256Gcm) {
+ return 16;
+ } else {
+ return 10;
+ }
+}
+static int rtcp_auth_tag_len(const std::string& cs) {
+ if (cs == kCsAeadAes128Gcm || cs == kCsAeadAes256Gcm) {
+ return 16;
+ } else {
+ return 10;
+ }
+}
+
+} // namespace rtc
+
+#endif // PC_TEST_SRTP_TEST_UTIL_H_
diff --git a/third_party/libwebrtc/pc/test/svc_e2e_tests.cc b/third_party/libwebrtc/pc/test/svc_e2e_tests.cc
new file mode 100644
index 0000000000..dea0763758
--- /dev/null
+++ b/third_party/libwebrtc/pc/test/svc_e2e_tests.cc
@@ -0,0 +1,507 @@
+/*
+ * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include <memory>
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "api/media_stream_interface.h"
+#include "api/stats/rtcstats_objects.h"
+#include "api/test/create_network_emulation_manager.h"
+#include "api/test/create_peer_connection_quality_test_frame_generator.h"
+#include "api/test/create_peerconnection_quality_test_fixture.h"
+#include "api/test/frame_generator_interface.h"
+#include "api/test/metrics/global_metrics_logger_and_exporter.h"
+#include "api/test/network_emulation_manager.h"
+#include "api/test/pclf/media_configuration.h"
+#include "api/test/pclf/media_quality_test_params.h"
+#include "api/test/pclf/peer_configurer.h"
+#include "api/test/peerconnection_quality_test_fixture.h"
+#include "api/test/simulated_network.h"
+#include "api/test/time_controller.h"
+#include "api/video_codecs/vp9_profile.h"
+#include "call/simulated_network.h"
+#include "modules/video_coding/codecs/vp9/include/vp9.h"
+#include "modules/video_coding/svc/scalability_mode_util.h"
+#include "rtc_base/containers/flat_map.h"
+#include "system_wrappers/include/field_trial.h"
+#include "test/field_trial.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+#include "test/pc/e2e/analyzer/video/default_video_quality_analyzer.h"
+#include "test/pc/e2e/network_quality_metrics_reporter.h"
+#include "test/testsupport/file_utils.h"
+
+namespace webrtc {
+namespace {
+
+using ::cricket::kAv1CodecName;
+using ::cricket::kH264CodecName;
+using ::cricket::kVp8CodecName;
+using ::cricket::kVp9CodecName;
+using ::testing::Combine;
+using ::testing::Optional;
+using ::testing::UnitTest;
+using ::testing::Values;
+using ::testing::ValuesIn;
+using ::webrtc::webrtc_pc_e2e::EmulatedSFUConfig;
+using ::webrtc::webrtc_pc_e2e::PeerConfigurer;
+using ::webrtc::webrtc_pc_e2e::RunParams;
+using ::webrtc::webrtc_pc_e2e::ScreenShareConfig;
+using ::webrtc::webrtc_pc_e2e::VideoCodecConfig;
+using ::webrtc::webrtc_pc_e2e::VideoConfig;
+
+std::unique_ptr<webrtc_pc_e2e::PeerConnectionE2EQualityTestFixture>
+CreateTestFixture(absl::string_view test_case_name,
+ TimeController& time_controller,
+ std::pair<EmulatedNetworkManagerInterface*,
+ EmulatedNetworkManagerInterface*> network_links,
+ rtc::FunctionView<void(PeerConfigurer*)> alice_configurer,
+ rtc::FunctionView<void(PeerConfigurer*)> bob_configurer,
+ std::unique_ptr<VideoQualityAnalyzerInterface>
+ video_quality_analyzer = nullptr) {
+ auto fixture = webrtc_pc_e2e::CreatePeerConnectionE2EQualityTestFixture(
+ std::string(test_case_name), time_controller, nullptr,
+ std::move(video_quality_analyzer));
+ auto alice = std::make_unique<PeerConfigurer>(
+ network_links.first->network_dependencies());
+ auto bob = std::make_unique<PeerConfigurer>(
+ network_links.second->network_dependencies());
+ alice_configurer(alice.get());
+ bob_configurer(bob.get());
+ fixture->AddPeer(std::move(alice));
+ fixture->AddPeer(std::move(bob));
+ return fixture;
+}
+
+// Takes the current active field trials set, and appends some new trials.
+std::string AppendFieldTrials(std::string new_trial_string) {
+ return std::string(field_trial::GetFieldTrialString()) + new_trial_string;
+}
+
+enum class UseDependencyDescriptor {
+ Enabled,
+ Disabled,
+};
+
+struct SvcTestParameters {
+ static SvcTestParameters Create(const std::string& codec_name,
+ const std::string& scalability_mode_str) {
+ absl::optional<ScalabilityMode> scalability_mode =
+ ScalabilityModeFromString(scalability_mode_str);
+ RTC_CHECK(scalability_mode.has_value())
+ << "Unsupported scalability mode: " << scalability_mode_str;
+
+ int num_spatial_layers =
+ ScalabilityModeToNumSpatialLayers(*scalability_mode);
+ int num_temporal_layers =
+ ScalabilityModeToNumTemporalLayers(*scalability_mode);
+
+ return SvcTestParameters{codec_name, scalability_mode_str,
+ num_spatial_layers, num_temporal_layers};
+ }
+
+ std::string codec_name;
+ std::string scalability_mode;
+ int expected_spatial_layers;
+ int expected_temporal_layers;
+};
+
+class SvcTest : public testing::TestWithParam<
+ std::tuple<SvcTestParameters, UseDependencyDescriptor>> {
+ public:
+ SvcTest()
+ : video_codec_config(ToVideoCodecConfig(SvcTestParameters().codec_name)) {
+ }
+
+ static VideoCodecConfig ToVideoCodecConfig(absl::string_view codec) {
+ if (codec == cricket::kVp9CodecName) {
+ return VideoCodecConfig(
+ cricket::kVp9CodecName,
+ {{kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile0)}});
+ }
+
+ return VideoCodecConfig(std::string(codec));
+ }
+
+ const SvcTestParameters& SvcTestParameters() const {
+ return std::get<0>(GetParam());
+ }
+
+ bool UseDependencyDescriptor() const {
+ return std::get<1>(GetParam()) == UseDependencyDescriptor::Enabled;
+ }
+
+ bool IsSMode() const {
+ return SvcTestParameters().scalability_mode[0] == 'S';
+ }
+
+ protected:
+ VideoCodecConfig video_codec_config;
+};
+
+std::string SvcTestNameGenerator(
+ const testing::TestParamInfo<SvcTest::ParamType>& info) {
+ return std::get<0>(info.param).scalability_mode +
+ (std::get<1>(info.param) == UseDependencyDescriptor::Enabled ? "_DD"
+ : "");
+}
+
+} // namespace
+
+// Records how many frames are seen for each spatial and temporal index at the
+// encoder and decoder level.
+class SvcVideoQualityAnalyzer : public DefaultVideoQualityAnalyzer {
+ public:
+ using SpatialTemporalLayerCounts =
+ webrtc::flat_map<int, webrtc::flat_map<int, int>>;
+
+ explicit SvcVideoQualityAnalyzer(webrtc::Clock* clock)
+ : DefaultVideoQualityAnalyzer(clock,
+ test::GetGlobalMetricsLogger(),
+ DefaultVideoQualityAnalyzerOptions{
+ .compute_psnr = false,
+ .compute_ssim = false,
+ }) {}
+ ~SvcVideoQualityAnalyzer() override = default;
+
+ void OnFrameEncoded(absl::string_view peer_name,
+ uint16_t frame_id,
+ const EncodedImage& encoded_image,
+ const EncoderStats& stats,
+ bool discarded) override {
+ absl::optional<int> spatial_id = encoded_image.SpatialIndex();
+ absl::optional<int> temporal_id = encoded_image.TemporalIndex();
+ encoder_layers_seen_[spatial_id.value_or(0)][temporal_id.value_or(0)]++;
+ DefaultVideoQualityAnalyzer::OnFrameEncoded(
+ peer_name, frame_id, encoded_image, stats, discarded);
+ }
+
+ void OnFramePreDecode(absl::string_view peer_name,
+ uint16_t frame_id,
+ const EncodedImage& input_image) override {
+ absl::optional<int> spatial_id = input_image.SpatialIndex();
+ absl::optional<int> temporal_id = input_image.TemporalIndex();
+ if (!spatial_id) {
+ decoder_layers_seen_[0][temporal_id.value_or(0)]++;
+ } else {
+ for (int i = 0; i <= *spatial_id; ++i) {
+ // If there are no spatial layers (for example VP8), we still want to
+ // record the temporal index for pseudo-layer "0" frames.
+ if (*spatial_id == 0 ||
+ input_image.SpatialLayerFrameSize(i).value_or(0) > 0) {
+ decoder_layers_seen_[i][temporal_id.value_or(0)]++;
+ }
+ }
+ }
+ DefaultVideoQualityAnalyzer::OnFramePreDecode(peer_name, frame_id,
+ input_image);
+ }
+
+ void OnStatsReports(
+ absl::string_view pc_label,
+ const rtc::scoped_refptr<const RTCStatsReport>& report) override {
+ // Extract the scalability mode reported in the stats.
+ auto outbound_stats = report->GetStatsOfType<RTCOutboundRTPStreamStats>();
+ for (const auto& stat : outbound_stats) {
+ if (stat->scalability_mode.is_defined()) {
+ reported_scalability_mode_ = *stat->scalability_mode;
+ }
+ }
+ }
+
+ const SpatialTemporalLayerCounts& encoder_layers_seen() const {
+ return encoder_layers_seen_;
+ }
+ const SpatialTemporalLayerCounts& decoder_layers_seen() const {
+ return decoder_layers_seen_;
+ }
+ const absl::optional<std::string> reported_scalability_mode() const {
+ return reported_scalability_mode_;
+ }
+
+ private:
+ SpatialTemporalLayerCounts encoder_layers_seen_;
+ SpatialTemporalLayerCounts decoder_layers_seen_;
+ absl::optional<std::string> reported_scalability_mode_;
+};
+
+MATCHER_P2(HasSpatialAndTemporalLayers,
+ expected_spatial_layers,
+ expected_temporal_layers,
+ "") {
+ if (arg.size() != static_cast<size_t>(expected_spatial_layers)) {
+ *result_listener << "spatial layer count mismatch expected "
+ << expected_spatial_layers << " but got " << arg.size();
+ return false;
+ }
+ for (const auto& [spatial_layer_index, temporal_layers] : arg) {
+ if (spatial_layer_index < 0 ||
+ spatial_layer_index >= expected_spatial_layers) {
+ *result_listener << "spatial layer index is not in range [0,"
+ << expected_spatial_layers << "[.";
+ return false;
+ }
+
+ if (temporal_layers.size() !=
+ static_cast<size_t>(expected_temporal_layers)) {
+ *result_listener << "temporal layer count mismatch on spatial layer "
+ << spatial_layer_index << ", expected "
+ << expected_temporal_layers << " but got "
+ << temporal_layers.size();
+ return false;
+ }
+ for (const auto& [temporal_layer_index, temporal_layer_frame_count] :
+ temporal_layers) {
+ if (temporal_layer_index < 0 ||
+ temporal_layer_index >= expected_temporal_layers) {
+ *result_listener << "temporal layer index on spatial layer "
+ << spatial_layer_index << " is not in range [0,"
+ << expected_temporal_layers << "[.";
+ return false;
+ }
+ }
+ }
+ return true;
+}
+
+MATCHER_P2(HasSpatialAndTemporalLayersSMode,
+ expected_spatial_layers,
+ expected_temporal_layers,
+ "") {
+ if (arg.size() != 1) {
+ *result_listener << "spatial layer count mismatch expected 1 but got "
+ << arg.size();
+ return false;
+ }
+ for (const auto& [spatial_layer_index, temporal_layers] : arg) {
+ if (spatial_layer_index != expected_spatial_layers - 1) {
+ *result_listener << "spatial layer index is not equal to "
+ << expected_spatial_layers - 1 << ".";
+ return false;
+ }
+
+ if (temporal_layers.size() !=
+ static_cast<size_t>(expected_temporal_layers)) {
+ *result_listener << "temporal layer count mismatch on spatial layer "
+ << spatial_layer_index << ", expected "
+ << expected_temporal_layers << " but got "
+ << temporal_layers.size();
+ return false;
+ }
+ for (const auto& [temporal_layer_index, temporal_layer_frame_count] :
+ temporal_layers) {
+ if (temporal_layer_index < 0 ||
+ temporal_layer_index >= expected_temporal_layers) {
+ *result_listener << "temporal layer index on spatial layer "
+ << spatial_layer_index << " is not in range [0,"
+ << expected_temporal_layers << "[.";
+ return false;
+ }
+ }
+ }
+ return true;
+}
+
+TEST_P(SvcTest, ScalabilityModeSupported) {
+ std::string trials;
+ if (UseDependencyDescriptor()) {
+ trials += "WebRTC-DependencyDescriptorAdvertised/Enabled/";
+ }
+ webrtc::test::ScopedFieldTrials override_trials(AppendFieldTrials(trials));
+ std::unique_ptr<NetworkEmulationManager> network_emulation_manager =
+ CreateNetworkEmulationManager(webrtc::TimeMode::kSimulated);
+ auto analyzer = std::make_unique<SvcVideoQualityAnalyzer>(
+ network_emulation_manager->time_controller()->GetClock());
+ SvcVideoQualityAnalyzer* analyzer_ptr = analyzer.get();
+ auto fixture = CreateTestFixture(
+ UnitTest::GetInstance()->current_test_info()->name(),
+ *network_emulation_manager->time_controller(),
+ network_emulation_manager->CreateEndpointPairWithTwoWayRoutes(
+ BuiltInNetworkBehaviorConfig()),
+ [this](PeerConfigurer* alice) {
+ VideoConfig video(/*stream_label=*/"alice-video", /*width=*/1850,
+ /*height=*/1110, /*fps=*/30);
+ if (IsSMode()) {
+ video.emulated_sfu_config = EmulatedSFUConfig(
+ SvcTestParameters().expected_spatial_layers - 1,
+ SvcTestParameters().expected_temporal_layers - 1);
+ }
+ RtpEncodingParameters parameters;
+ parameters.scalability_mode = SvcTestParameters().scalability_mode;
+ video.encoding_params.push_back(parameters);
+ alice->AddVideoConfig(
+ std::move(video),
+ CreateScreenShareFrameGenerator(
+ video, ScreenShareConfig(TimeDelta::Seconds(5))));
+ alice->SetVideoCodecs({video_codec_config});
+ },
+ [](PeerConfigurer* bob) {}, std::move(analyzer));
+ fixture->Run(RunParams(TimeDelta::Seconds(5)));
+ EXPECT_THAT(analyzer_ptr->encoder_layers_seen(),
+ HasSpatialAndTemporalLayers(
+ SvcTestParameters().expected_spatial_layers,
+ SvcTestParameters().expected_temporal_layers));
+ if (IsSMode()) {
+ EXPECT_THAT(analyzer_ptr->decoder_layers_seen(),
+ HasSpatialAndTemporalLayersSMode(
+ SvcTestParameters().expected_spatial_layers,
+ SvcTestParameters().expected_temporal_layers));
+ } else {
+ EXPECT_THAT(analyzer_ptr->decoder_layers_seen(),
+ HasSpatialAndTemporalLayers(
+ SvcTestParameters().expected_spatial_layers,
+ SvcTestParameters().expected_temporal_layers));
+ }
+ EXPECT_THAT(analyzer_ptr->reported_scalability_mode(),
+ Optional(SvcTestParameters().scalability_mode));
+
+ RTC_LOG(LS_INFO) << "Encoder layers seen: "
+ << analyzer_ptr->encoder_layers_seen().size();
+ for (auto& [spatial_index, temporal_layers] :
+ analyzer_ptr->encoder_layers_seen()) {
+ for (auto& [temporal_index, frame_count] : temporal_layers) {
+ RTC_LOG(LS_INFO) << " Layer: " << spatial_index << "," << temporal_index
+ << " frames: " << frame_count;
+ }
+ }
+ RTC_LOG(LS_INFO) << "Decoder layers seen: "
+ << analyzer_ptr->decoder_layers_seen().size();
+ for (auto& [spatial_index, temporal_layers] :
+ analyzer_ptr->decoder_layers_seen()) {
+ for (auto& [temporal_index, frame_count] : temporal_layers) {
+ RTC_LOG(LS_INFO) << " Layer: " << spatial_index << "," << temporal_index
+ << " frames: " << frame_count;
+ }
+ }
+}
+
+INSTANTIATE_TEST_SUITE_P(
+ SvcTestVP8,
+ SvcTest,
+ Combine(Values(SvcTestParameters::Create(kVp8CodecName, "L1T1"),
+ SvcTestParameters::Create(kVp8CodecName, "L1T2"),
+ SvcTestParameters::Create(kVp8CodecName, "L1T3")),
+ Values(UseDependencyDescriptor::Disabled,
+ UseDependencyDescriptor::Enabled)),
+ SvcTestNameGenerator);
+
+#if defined(WEBRTC_USE_H264)
+INSTANTIATE_TEST_SUITE_P(
+ SvcTestH264,
+ SvcTest,
+ Combine(ValuesIn({
+ SvcTestParameters::Create(kH264CodecName, "L1T1"),
+ SvcTestParameters::Create(kH264CodecName, "L1T2"),
+ SvcTestParameters::Create(kH264CodecName, "L1T3"),
+ }),
+ // Like AV1, H.264 RTP format does not include SVC related
+ // information, so always use Dependency Descriptor.
+ Values(UseDependencyDescriptor::Enabled)),
+ SvcTestNameGenerator);
+#endif
+
+#if defined(RTC_ENABLE_VP9)
+INSTANTIATE_TEST_SUITE_P(
+ SvcTestVP9,
+ SvcTest,
+ Combine(
+ // TODO(bugs.webrtc.org/13960): Fix and enable remaining VP9 modes
+ ValuesIn({
+ SvcTestParameters::Create(kVp9CodecName, "L1T1"),
+ SvcTestParameters::Create(kVp9CodecName, "L1T2"),
+ SvcTestParameters::Create(kVp9CodecName, "L1T3"),
+ SvcTestParameters::Create(kVp9CodecName, "L2T1"),
+ SvcTestParameters::Create(kVp9CodecName, "L2T1h"),
+ SvcTestParameters::Create(kVp9CodecName, "L2T1_KEY"),
+ SvcTestParameters::Create(kVp9CodecName, "L2T2"),
+ SvcTestParameters::Create(kVp9CodecName, "L2T2h"),
+ SvcTestParameters::Create(kVp9CodecName, "L2T2_KEY"),
+ SvcTestParameters::Create(kVp9CodecName, "L2T2_KEY_SHIFT"),
+ SvcTestParameters::Create(kVp9CodecName, "L2T3"),
+ SvcTestParameters::Create(kVp9CodecName, "L2T3h"),
+ SvcTestParameters::Create(kVp9CodecName, "L2T3_KEY"),
+ // SvcTestParameters::Create(kVp9CodecName, "L2T3_KEY_SHIFT"),
+ SvcTestParameters::Create(kVp9CodecName, "L3T1"),
+ SvcTestParameters::Create(kVp9CodecName, "L3T1h"),
+ SvcTestParameters::Create(kVp9CodecName, "L3T1_KEY"),
+ SvcTestParameters::Create(kVp9CodecName, "L3T2"),
+ SvcTestParameters::Create(kVp9CodecName, "L3T2h"),
+ SvcTestParameters::Create(kVp9CodecName, "L3T2_KEY"),
+ // SvcTestParameters::Create(kVp9CodecName, "L3T2_KEY_SHIFT"),
+ SvcTestParameters::Create(kVp9CodecName, "L3T3"),
+ SvcTestParameters::Create(kVp9CodecName, "L3T3h"),
+ SvcTestParameters::Create(kVp9CodecName, "L3T3_KEY"),
+ // SvcTestParameters::Create(kVp9CodecName, "L3T3_KEY_SHIFT"),
+ SvcTestParameters::Create(kVp9CodecName, "S2T1"),
+ SvcTestParameters::Create(kVp9CodecName, "S2T1h"),
+ SvcTestParameters::Create(kVp9CodecName, "S2T2"),
+ SvcTestParameters::Create(kVp9CodecName, "S2T2h"),
+ SvcTestParameters::Create(kVp9CodecName, "S2T3"),
+ SvcTestParameters::Create(kVp9CodecName, "S2T3h"),
+ SvcTestParameters::Create(kVp9CodecName, "S3T1"),
+ SvcTestParameters::Create(kVp9CodecName, "S3T1h"),
+ SvcTestParameters::Create(kVp9CodecName, "S3T2"),
+ SvcTestParameters::Create(kVp9CodecName, "S3T2h"),
+ SvcTestParameters::Create(kVp9CodecName, "S3T3"),
+ SvcTestParameters::Create(kVp9CodecName, "S3T3h"),
+ }),
+ Values(UseDependencyDescriptor::Disabled,
+ UseDependencyDescriptor::Enabled)),
+ SvcTestNameGenerator);
+
+INSTANTIATE_TEST_SUITE_P(
+ SvcTestAV1,
+ SvcTest,
+ Combine(ValuesIn({
+ SvcTestParameters::Create(kAv1CodecName, "L1T1"),
+ SvcTestParameters::Create(kAv1CodecName, "L1T2"),
+ SvcTestParameters::Create(kAv1CodecName, "L1T3"),
+ SvcTestParameters::Create(kAv1CodecName, "L2T1"),
+ SvcTestParameters::Create(kAv1CodecName, "L2T1h"),
+ SvcTestParameters::Create(kAv1CodecName, "L2T1_KEY"),
+ SvcTestParameters::Create(kAv1CodecName, "L2T2"),
+ SvcTestParameters::Create(kAv1CodecName, "L2T2h"),
+ SvcTestParameters::Create(kAv1CodecName, "L2T2_KEY"),
+ SvcTestParameters::Create(kAv1CodecName, "L2T2_KEY_SHIFT"),
+ SvcTestParameters::Create(kAv1CodecName, "L2T3"),
+ SvcTestParameters::Create(kAv1CodecName, "L2T3h"),
+ SvcTestParameters::Create(kAv1CodecName, "L2T3_KEY"),
+ // SvcTestParameters::Create(kAv1CodecName, "L2T3_KEY_SHIFT"),
+ SvcTestParameters::Create(kAv1CodecName, "L3T1"),
+ SvcTestParameters::Create(kAv1CodecName, "L3T1h"),
+ SvcTestParameters::Create(kAv1CodecName, "L3T1_KEY"),
+ SvcTestParameters::Create(kAv1CodecName, "L3T2"),
+ SvcTestParameters::Create(kAv1CodecName, "L3T2h"),
+ SvcTestParameters::Create(kAv1CodecName, "L3T2_KEY"),
+ // SvcTestParameters::Create(kAv1CodecName, "L3T2_KEY_SHIFT"),
+ SvcTestParameters::Create(kAv1CodecName, "L3T3"),
+ SvcTestParameters::Create(kAv1CodecName, "L3T3h"),
+ SvcTestParameters::Create(kAv1CodecName, "L3T3_KEY"),
+ // SvcTestParameters::Create(kAv1CodecName, "L3T3_KEY_SHIFT"),
+ SvcTestParameters::Create(kAv1CodecName, "S2T1"),
+ SvcTestParameters::Create(kAv1CodecName, "S2T1h"),
+ SvcTestParameters::Create(kAv1CodecName, "S2T2"),
+ SvcTestParameters::Create(kAv1CodecName, "S2T2h"),
+ SvcTestParameters::Create(kAv1CodecName, "S2T3"),
+ SvcTestParameters::Create(kAv1CodecName, "S2T3h"),
+ SvcTestParameters::Create(kAv1CodecName, "S3T1"),
+ SvcTestParameters::Create(kAv1CodecName, "S3T1h"),
+ SvcTestParameters::Create(kAv1CodecName, "S3T2"),
+ SvcTestParameters::Create(kAv1CodecName, "S3T2h"),
+ SvcTestParameters::Create(kAv1CodecName, "S3T3"),
+ SvcTestParameters::Create(kAv1CodecName, "S3T3h"),
+ }),
+ Values(UseDependencyDescriptor::Enabled)),
+ SvcTestNameGenerator);
+
+#endif
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/pc/test/test_sdp_strings.h b/third_party/libwebrtc/pc/test/test_sdp_strings.h
new file mode 100644
index 0000000000..e4ad325d31
--- /dev/null
+++ b/third_party/libwebrtc/pc/test/test_sdp_strings.h
@@ -0,0 +1,184 @@
+/*
+ * Copyright 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This file contain SDP strings used for testing.
+
+#ifndef PC_TEST_TEST_SDP_STRINGS_H_
+#define PC_TEST_TEST_SDP_STRINGS_H_
+
+namespace webrtc {
+
+// SDP offer string from a Nightly FireFox build.
+static const char kFireFoxSdpOffer[] =
+ "v=0\r\n"
+ "o=Mozilla-SIPUA 23551 0 IN IP4 0.0.0.0\r\n"
+ "s=SIP Call\r\n"
+ "t=0 0\r\n"
+ "a=ice-ufrag:e5785931\r\n"
+ "a=ice-pwd:36fb7878390db89481c1d46daa4278d8\r\n"
+ "a=fingerprint:sha-256 A7:24:72:CA:6E:02:55:39:BA:66:DF:6E:CC:4C:D8:B0:1A:"
+ "BF:1A:56:65:7D:F4:03:AD:7E:77:43:2A:29:EC:93\r\n"
+ "m=audio 36993 RTP/SAVPF 109 0 8 101\r\n"
+ "c=IN IP4 74.95.2.170\r\n"
+ "a=rtpmap:109 opus/48000/2\r\n"
+ "a=ptime:20\r\n"
+ "a=rtcp-mux\r\n"
+ "a=rtpmap:0 PCMU/8000\r\n"
+ "a=rtpmap:8 PCMA/8000\r\n"
+ "a=rtpmap:101 telephone-event/8000\r\n"
+ "a=fmtp:101 0-15\r\n"
+ "a=sendrecv\r\n"
+ "a=candidate:0 1 UDP 2112946431 172.16.191.1 61725 typ host\r\n"
+ "a=candidate:2 1 UDP 2112487679 172.16.131.1 58798 typ host\r\n"
+ "a=candidate:4 1 UDP 2113667327 10.0.254.2 58122 typ host\r\n"
+ "a=candidate:5 1 UDP 1694302207 74.95.2.170 36993 typ srflx raddr "
+ "10.0.254.2 rport 58122\r\n"
+ "a=candidate:0 2 UDP 2112946430 172.16.191.1 55025 typ host\r\n"
+ "a=candidate:2 2 UDP 2112487678 172.16.131.1 63576 typ host\r\n"
+ "a=candidate:4 2 UDP 2113667326 10.0.254.2 50962 typ host\r\n"
+ "a=candidate:5 2 UDP 1694302206 74.95.2.170 41028 typ srflx raddr"
+ " 10.0.254.2 rport 50962\r\n"
+ "m=video 38826 RTP/SAVPF 120\r\n"
+ "c=IN IP4 74.95.2.170\r\n"
+ "a=rtcp-mux\r\n"
+ "a=rtpmap:120 VP8/90000\r\n"
+ "a=sendrecv\r\n"
+ "a=candidate:0 1 UDP 2112946431 172.16.191.1 62017 typ host\r\n"
+ "a=candidate:2 1 UDP 2112487679 172.16.131.1 59741 typ host\r\n"
+ "a=candidate:4 1 UDP 2113667327 10.0.254.2 62652 typ host\r\n"
+ "a=candidate:5 1 UDP 1694302207 74.95.2.170 38826 typ srflx raddr"
+ " 10.0.254.2 rport 62652\r\n"
+ "a=candidate:0 2 UDP 2112946430 172.16.191.1 63440 typ host\r\n"
+ "a=candidate:2 2 UDP 2112487678 172.16.131.1 51847 typ host\r\n"
+ "a=candidate:4 2 UDP 2113667326 10.0.254.2 58890 typ host\r\n"
+ "a=candidate:5 2 UDP 1694302206 74.95.2.170 33611 typ srflx raddr"
+ " 10.0.254.2 rport 58890\r\n"
+#ifdef WEBRTC_HAVE_SCTP
+ "m=application 45536 DTLS/SCTP 5000\r\n"
+ "c=IN IP4 74.95.2.170\r\n"
+ "a=fmtp:5000 protocol=webrtc-datachannel;streams=16\r\n"
+ "a=sendrecv\r\n"
+ "a=candidate:0 1 UDP 2112946431 172.16.191.1 60248 typ host\r\n"
+ "a=candidate:2 1 UDP 2112487679 172.16.131.1 55925 typ host\r\n"
+ "a=candidate:4 1 UDP 2113667327 10.0.254.2 65268 typ host\r\n"
+ "a=candidate:5 1 UDP 1694302207 74.95.2.170 45536 typ srflx raddr"
+ " 10.0.254.2 rport 65268\r\n"
+ "a=candidate:0 2 UDP 2112946430 172.16.191.1 49162 typ host\r\n"
+ "a=candidate:2 2 UDP 2112487678 172.16.131.1 59635 typ host\r\n"
+ "a=candidate:4 2 UDP 2113667326 10.0.254.2 61232 typ host\r\n"
+ "a=candidate:5 2 UDP 1694302206 74.95.2.170 45468 typ srflx raddr"
+ " 10.0.254.2 rport 61232\r\n"
+#endif
+ ; // NOLINT(whitespace/semicolon)
+
+// Audio SDP with a limited set of audio codecs.
+static const char kAudioSdpPlanB[] =
+ "v=0\r\n"
+ "o=- 7859371131 2 IN IP4 192.168.30.208\r\n"
+ "s=-\r\n"
+ "c=IN IP4 192.168.30.208\r\n"
+ "t=0 0\r\n"
+ "m=audio 16000 RTP/SAVPF 0 8 126\r\n"
+ "a=rtpmap:0 PCMU/8000\r\n"
+ "a=rtpmap:8 PCMA/8000\r\n"
+ "a=rtpmap:126 telephone-event/8000\r\n"
+ "a=sendrecv\r\n"
+ "a=rtcp:16000 IN IP4 192.168.30.208\r\n"
+ "a=rtcp-mux\r\n"
+ "a=crypto:0 AES_CM_128_HMAC_SHA1_80 "
+ "inline:tvKIFjbMQ7W0/C2RzhwN0oQglj/7GJg+frdsNRxt\r\n"
+ "a=ice-ufrag:AI2sRT3r\r\n"
+ "a=ice-pwd:lByS9z2RSQlSE9XurlvjYmEm\r\n"
+ "a=ssrc:4227871655 cname:GeAAgb6XCPNLVMX5\r\n"
+ "a=ssrc:4227871655 msid:1NFAV3iD08ioO2339rQS9pfOI9mDf6GeG9F4 a0\r\n"
+ "a=ssrc:4227871655 mslabel:1NFAV3iD08ioO2339rQS9pfOI9mDf6GeG9F4\r\n"
+ "a=ssrc:4227871655 label:1NFAV3iD08ioO2339rQS9pfOI9mDf6GeG9F4a0\r\n"
+ "a=mid:audio\r\n";
+// Same string as above but with the MID changed to the Unified Plan default.
+// This is needed so that this SDP can be used as an answer for a Unified Plan
+// offer.
+static const char kAudioSdpUnifiedPlan[] =
+ "v=0\r\n"
+ "o=- 7859371131 2 IN IP4 192.168.30.208\r\n"
+ "s=-\r\n"
+ "c=IN IP4 192.168.30.208\r\n"
+ "t=0 0\r\n"
+ "m=audio 16000 RTP/SAVPF 0 8 126\r\n"
+ "a=rtpmap:0 PCMU/8000\r\n"
+ "a=rtpmap:8 PCMA/8000\r\n"
+ "a=rtpmap:126 telephone-event/8000\r\n"
+ "a=sendrecv\r\n"
+ "a=rtcp:16000 IN IP4 192.168.30.208\r\n"
+ "a=rtcp-mux\r\n"
+ "a=crypto:0 AES_CM_128_HMAC_SHA1_80 "
+ "inline:tvKIFjbMQ7W0/C2RzhwN0oQglj/7GJg+frdsNRxt\r\n"
+ "a=ice-ufrag:AI2sRT3r\r\n"
+ "a=ice-pwd:lByS9z2RSQlSE9XurlvjYmEm\r\n"
+ "a=ssrc:4227871655 cname:GeAAgb6XCPNLVMX5\r\n"
+ "a=ssrc:4227871655 msid:1NFAV3iD08ioO2339rQS9pfOI9mDf6GeG9F4 a0\r\n"
+ "a=ssrc:4227871655 mslabel:1NFAV3iD08ioO2339rQS9pfOI9mDf6GeG9F4\r\n"
+ "a=ssrc:4227871655 label:1NFAV3iD08ioO2339rQS9pfOI9mDf6GeG9F4a0\r\n"
+ "a=mid:0\r\n";
+
+static const char kAudioSdpWithUnsupportedCodecsPlanB[] =
+ "v=0\r\n"
+ "o=- 6858750541 2 IN IP4 192.168.30.208\r\n"
+ "s=-\r\n"
+ "c=IN IP4 192.168.30.208\r\n"
+ "t=0 0\r\n"
+ "m=audio 16000 RTP/SAVPF 0 8 109 110 126\r\n"
+ "a=rtpmap:0 PCMU/8000\r\n"
+ "a=rtpmap:8 PCMA/8000\r\n"
+ "a=rtpmap:109 WeirdCodec1/8000\r\n"
+ "a=rtpmap:110 WeirdCodec2/8000\r\n"
+ "a=rtpmap:126 telephone-event/8000\r\n"
+ "a=sendonly\r\n"
+ "a=rtcp:16000 IN IP4 192.168.30.208\r\n"
+ "a=rtcp-mux\r\n"
+ "a=crypto:0 AES_CM_128_HMAC_SHA1_80 "
+ "inline:tvKIFjbMQ7W0/C2RzhwN0oQglj/7GJg+frdsNRxt\r\n"
+ "a=ice-ufrag:AI2sRT3r\r\n"
+ "a=ice-pwd:lByS9z2RSQlSE9XurlvjYmEm\r\n"
+ "a=ssrc:4227871655 cname:TsmD02HRfhkJBm4m\r\n"
+ "a=ssrc:4227871655 msid:7nU0TApbB-n4dfPlCplWT9QTEsbBDS1IlpW3 a0\r\n"
+ "a=ssrc:4227871655 mslabel:7nU0TApbB-n4dfPlCplWT9QTEsbBDS1IlpW3\r\n"
+ "a=ssrc:4227871655 label:7nU0TApbB-n4dfPlCplWT9QTEsbBDS1IlpW3a0\r\n"
+ "a=mid:audio\r\n";
+// Same string as above but with the MID changed to the Unified Plan default.
+// This is needed so that this SDP can be used as an answer for a Unified Plan
+// offer.
+static const char kAudioSdpWithUnsupportedCodecsUnifiedPlan[] =
+ "v=0\r\n"
+ "o=- 6858750541 2 IN IP4 192.168.30.208\r\n"
+ "s=-\r\n"
+ "c=IN IP4 192.168.30.208\r\n"
+ "t=0 0\r\n"
+ "m=audio 16000 RTP/SAVPF 0 8 109 110 126\r\n"
+ "a=rtpmap:0 PCMU/8000\r\n"
+ "a=rtpmap:8 PCMA/8000\r\n"
+ "a=rtpmap:109 WeirdCodec1/8000\r\n"
+ "a=rtpmap:110 WeirdCodec2/8000\r\n"
+ "a=rtpmap:126 telephone-event/8000\r\n"
+ "a=sendonly\r\n"
+ "a=rtcp:16000 IN IP4 192.168.30.208\r\n"
+ "a=rtcp-mux\r\n"
+ "a=crypto:0 AES_CM_128_HMAC_SHA1_80 "
+ "inline:tvKIFjbMQ7W0/C2RzhwN0oQglj/7GJg+frdsNRxt\r\n"
+ "a=ice-ufrag:AI2sRT3r\r\n"
+ "a=ice-pwd:lByS9z2RSQlSE9XurlvjYmEm\r\n"
+ "a=ssrc:4227871655 cname:TsmD02HRfhkJBm4m\r\n"
+ "a=ssrc:4227871655 msid:7nU0TApbB-n4dfPlCplWT9QTEsbBDS1IlpW3 a0\r\n"
+ "a=ssrc:4227871655 mslabel:7nU0TApbB-n4dfPlCplWT9QTEsbBDS1IlpW3\r\n"
+ "a=ssrc:4227871655 label:7nU0TApbB-n4dfPlCplWT9QTEsbBDS1IlpW3a0\r\n"
+ "a=mid:0\r\n";
+
+} // namespace webrtc
+
+#endif // PC_TEST_TEST_SDP_STRINGS_H_