summaryrefslogtreecommitdiffstats
path: root/third_party/libwebrtc/test/scenario
diff options
context:
space:
mode:
Diffstat (limited to 'third_party/libwebrtc/test/scenario')
-rw-r--r--third_party/libwebrtc/test/scenario/BUILD.gn200
-rw-r--r--third_party/libwebrtc/test/scenario/OWNERS2
-rw-r--r--third_party/libwebrtc/test/scenario/audio_stream.cc241
-rw-r--r--third_party/libwebrtc/test/scenario/audio_stream.h110
-rw-r--r--third_party/libwebrtc/test/scenario/call_client.cc386
-rw-r--r--third_party/libwebrtc/test/scenario/call_client.h204
-rw-r--r--third_party/libwebrtc/test/scenario/column_printer.cc73
-rw-r--r--third_party/libwebrtc/test/scenario/column_printer.h66
-rw-r--r--third_party/libwebrtc/test/scenario/hardware_codecs.cc52
-rw-r--r--third_party/libwebrtc/test/scenario/hardware_codecs.h24
-rw-r--r--third_party/libwebrtc/test/scenario/network_node.cc144
-rw-r--r--third_party/libwebrtc/test/scenario/network_node.h83
-rw-r--r--third_party/libwebrtc/test/scenario/performance_stats.cc47
-rw-r--r--third_party/libwebrtc/test/scenario/performance_stats.h108
-rw-r--r--third_party/libwebrtc/test/scenario/performance_stats_unittest.cc27
-rw-r--r--third_party/libwebrtc/test/scenario/probing_test.cc135
-rw-r--r--third_party/libwebrtc/test/scenario/scenario.cc355
-rw-r--r--third_party/libwebrtc/test/scenario/scenario.h189
-rw-r--r--third_party/libwebrtc/test/scenario/scenario_config.cc47
-rw-r--r--third_party/libwebrtc/test/scenario/scenario_config.h231
-rw-r--r--third_party/libwebrtc/test/scenario/scenario_unittest.cc196
-rw-r--r--third_party/libwebrtc/test/scenario/stats_collection.cc190
-rw-r--r--third_party/libwebrtc/test/scenario/stats_collection.h110
-rw-r--r--third_party/libwebrtc/test/scenario/stats_collection_unittest.cc114
-rw-r--r--third_party/libwebrtc/test/scenario/video_frame_matcher.cc188
-rw-r--r--third_party/libwebrtc/test/scenario/video_frame_matcher.h134
-rw-r--r--third_party/libwebrtc/test/scenario/video_stream.cc636
-rw-r--r--third_party/libwebrtc/test/scenario/video_stream.h138
-rw-r--r--third_party/libwebrtc/test/scenario/video_stream_unittest.cc322
29 files changed, 4752 insertions, 0 deletions
diff --git a/third_party/libwebrtc/test/scenario/BUILD.gn b/third_party/libwebrtc/test/scenario/BUILD.gn
new file mode 100644
index 0000000000..5da6dce87d
--- /dev/null
+++ b/third_party/libwebrtc/test/scenario/BUILD.gn
@@ -0,0 +1,200 @@
+# Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+import("../../webrtc.gni")
+
+rtc_library("column_printer") {
+ testonly = true
+ sources = [
+ "column_printer.cc",
+ "column_printer.h",
+ ]
+ deps = [
+ "../../rtc_base:macromagic",
+ "../../rtc_base:stringutils",
+ "../logging:log_writer",
+ ]
+}
+
+if (rtc_include_tests && !build_with_chromium) {
+ scenario_resources = [
+ "../../resources/difficult_photo_1850_1110.yuv",
+ "../../resources/photo_1850_1110.yuv",
+ "../../resources/presentation_1850_1110.yuv",
+ "../../resources/web_screenshot_1850_1110.yuv",
+ ]
+ scenario_unittest_resources = [ "../../resources/foreman_cif.yuv" ]
+
+ if (is_ios) {
+ bundle_data("scenario_resources_bundle_data") {
+ testonly = true
+ sources = scenario_resources
+ outputs = [ "{{bundle_resources_dir}}/{{source_file_part}}" ]
+ }
+ bundle_data("scenario_unittest_resources_bundle_data") {
+ testonly = true
+ sources = scenario_unittest_resources
+ outputs = [ "{{bundle_resources_dir}}/{{source_file_part}}" ]
+ }
+ }
+
+ rtc_library("scenario") {
+ testonly = true
+ sources = [
+ "audio_stream.cc",
+ "audio_stream.h",
+ "call_client.cc",
+ "call_client.h",
+ "hardware_codecs.cc",
+ "hardware_codecs.h",
+ "network_node.cc",
+ "network_node.h",
+ "performance_stats.cc",
+ "performance_stats.h",
+ "scenario.cc",
+ "scenario.h",
+ "scenario_config.cc",
+ "scenario_config.h",
+ "stats_collection.cc",
+ "stats_collection.h",
+ "video_frame_matcher.cc",
+ "video_frame_matcher.h",
+ "video_stream.cc",
+ "video_stream.h",
+ ]
+ deps = [
+ ":column_printer",
+ "../:fake_video_codecs",
+ "../:fileutils",
+ "../:test_common",
+ "../:test_support",
+ "../:video_test_common",
+ "../../api:array_view",
+ "../../api:create_frame_generator",
+ "../../api:fec_controller_api",
+ "../../api:frame_generator_api",
+ "../../api:libjingle_peerconnection_api",
+ "../../api:rtc_event_log_output_file",
+ "../../api:rtp_parameters",
+ "../../api:sequence_checker",
+ "../../api:time_controller",
+ "../../api:time_controller",
+ "../../api:transport_api",
+ "../../api/audio_codecs:builtin_audio_decoder_factory",
+ "../../api/audio_codecs:builtin_audio_encoder_factory",
+ "../../api/rtc_event_log",
+ "../../api/rtc_event_log:rtc_event_log_factory",
+ "../../api/task_queue",
+ "../../api/test/video:function_video_factory",
+ "../../api/transport:network_control",
+ "../../api/units:data_rate",
+ "../../api/units:data_size",
+ "../../api/units:time_delta",
+ "../../api/units:timestamp",
+ "../../api/video:builtin_video_bitrate_allocator_factory",
+ "../../api/video:video_frame",
+ "../../api/video:video_rtp_headers",
+ "../../api/video_codecs:scalability_mode",
+ "../../api/video_codecs:video_codecs_api",
+ "../../audio",
+ "../../call",
+ "../../call:call_interfaces",
+ "../../call:rtp_sender",
+ "../../call:simulated_network",
+ "../../call:video_stream_api",
+ "../../common_video",
+ "../../media:media_constants",
+ "../../media:rtc_audio_video",
+ "../../media:rtc_internal_video_codecs",
+ "../../media:rtc_media_base",
+ "../../modules/audio_device",
+ "../../modules/audio_device:audio_device_impl",
+ "../../modules/audio_device:mock_audio_device",
+ "../../modules/audio_mixer:audio_mixer_impl",
+ "../../modules/audio_processing",
+ "../../modules/congestion_controller/goog_cc:test_goog_cc_printer",
+ "../../modules/rtp_rtcp",
+ "../../modules/rtp_rtcp:mock_rtp_rtcp",
+ "../../modules/rtp_rtcp:rtp_rtcp_format",
+ "../../modules/video_coding:video_codec_interface",
+ "../../modules/video_coding:video_coding_utility",
+ "../../modules/video_coding:webrtc_h264",
+ "../../modules/video_coding:webrtc_multiplex",
+ "../../modules/video_coding:webrtc_vp8",
+ "../../modules/video_coding:webrtc_vp9",
+ "../../modules/video_coding/svc:scalability_mode_util",
+ "../../rtc_base:checks",
+ "../../rtc_base:copy_on_write_buffer",
+ "../../rtc_base:net_helper",
+ "../../rtc_base:refcount",
+ "../../rtc_base:rtc_base_tests_utils",
+ "../../rtc_base:rtc_event",
+ "../../rtc_base:rtc_numerics",
+ "../../rtc_base:rtc_stats_counters",
+ "../../rtc_base:safe_minmax",
+ "../../rtc_base:socket_address",
+ "../../rtc_base:task_queue_for_test",
+ "../../rtc_base:threading",
+ "../../rtc_base/synchronization:mutex",
+ "../../rtc_base/task_utils:repeating_task",
+ "../../system_wrappers",
+ "../../system_wrappers:field_trial",
+ "../../video/config:streams_config",
+ "../logging:log_writer",
+ "../network:emulated_network",
+ "../time_controller",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/flags:flag",
+ "//third_party/abseil-cpp/absl/flags:parse",
+ "//third_party/abseil-cpp/absl/functional:any_invocable",
+ "//third_party/abseil-cpp/absl/memory",
+ "//third_party/abseil-cpp/absl/strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+ if (is_android) {
+ deps += [ "../../modules/video_coding:android_codec_factory_helper" ]
+ } else if (is_ios || is_mac) {
+ deps += [ "../../modules/video_coding:objc_codec_factory_helper" ]
+ }
+ if (rtc_enable_protobuf) {
+ deps += [ "../../modules/audio_coding:ana_config_proto" ]
+ }
+ data = scenario_resources
+ if (is_ios) {
+ deps += [ ":scenario_resources_bundle_data" ]
+ }
+ }
+ rtc_library("scenario_unittests") {
+ testonly = true
+ sources = [
+ "performance_stats_unittest.cc",
+ "probing_test.cc",
+ "scenario_unittest.cc",
+ "stats_collection_unittest.cc",
+ "video_stream_unittest.cc",
+ ]
+ deps = [
+ ":scenario",
+ "../../api/test/network_emulation",
+ "../../api/test/network_emulation:create_cross_traffic",
+ "../../logging:mocks",
+ "../../rtc_base:checks",
+ "../../system_wrappers",
+ "../../system_wrappers:field_trial",
+ "../../test:field_trial",
+ "../../test:test_support",
+ "../logging:log_writer",
+ "//testing/gmock",
+ ]
+ data = scenario_unittest_resources
+ if (is_ios) {
+ deps += [ ":scenario_unittest_resources_bundle_data" ]
+ }
+ }
+}
diff --git a/third_party/libwebrtc/test/scenario/OWNERS b/third_party/libwebrtc/test/scenario/OWNERS
new file mode 100644
index 0000000000..6698afbf02
--- /dev/null
+++ b/third_party/libwebrtc/test/scenario/OWNERS
@@ -0,0 +1,2 @@
+srte@webrtc.org
+perkj@webrtc.org
diff --git a/third_party/libwebrtc/test/scenario/audio_stream.cc b/third_party/libwebrtc/test/scenario/audio_stream.cc
new file mode 100644
index 0000000000..5f2eff12ff
--- /dev/null
+++ b/third_party/libwebrtc/test/scenario/audio_stream.cc
@@ -0,0 +1,241 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "test/scenario/audio_stream.h"
+
+#include "absl/memory/memory.h"
+#include "test/call_test.h"
+
+#if WEBRTC_ENABLE_PROTOBUF
+RTC_PUSH_IGNORING_WUNDEF()
+#ifdef WEBRTC_ANDROID_PLATFORM_BUILD
+#include "external/webrtc/webrtc/modules/audio_coding/audio_network_adaptor/config.pb.h"
+#else
+#include "modules/audio_coding/audio_network_adaptor/config.pb.h"
+#endif
+RTC_POP_IGNORING_WUNDEF()
+#endif
+
+namespace webrtc {
+namespace test {
+namespace {
+enum : int { // The first valid value is 1.
+ kTransportSequenceNumberExtensionId = 1,
+ kAbsSendTimeExtensionId
+};
+
+absl::optional<std::string> CreateAdaptationString(
+ AudioStreamConfig::NetworkAdaptation config) {
+#if WEBRTC_ENABLE_PROTOBUF
+
+ audio_network_adaptor::config::ControllerManager cont_conf;
+ if (config.frame.max_rate_for_60_ms.IsFinite()) {
+ auto controller =
+ cont_conf.add_controllers()->mutable_frame_length_controller();
+ controller->set_fl_decreasing_packet_loss_fraction(
+ config.frame.min_packet_loss_for_decrease);
+ controller->set_fl_increasing_packet_loss_fraction(
+ config.frame.max_packet_loss_for_increase);
+
+ controller->set_fl_20ms_to_60ms_bandwidth_bps(
+ config.frame.min_rate_for_20_ms.bps<int32_t>());
+ controller->set_fl_60ms_to_20ms_bandwidth_bps(
+ config.frame.max_rate_for_60_ms.bps<int32_t>());
+
+ if (config.frame.max_rate_for_120_ms.IsFinite()) {
+ controller->set_fl_60ms_to_120ms_bandwidth_bps(
+ config.frame.min_rate_for_60_ms.bps<int32_t>());
+ controller->set_fl_120ms_to_60ms_bandwidth_bps(
+ config.frame.max_rate_for_120_ms.bps<int32_t>());
+ }
+ }
+ cont_conf.add_controllers()->mutable_bitrate_controller();
+ std::string config_string = cont_conf.SerializeAsString();
+ return config_string;
+#else
+ RTC_LOG(LS_ERROR) << "audio_network_adaptation is enabled"
+ " but WEBRTC_ENABLE_PROTOBUF is false.\n"
+ "Ignoring settings.";
+ return absl::nullopt;
+#endif // WEBRTC_ENABLE_PROTOBUF
+}
+} // namespace
+
+std::vector<RtpExtension> GetAudioRtpExtensions(
+ const AudioStreamConfig& config) {
+ std::vector<RtpExtension> extensions;
+ if (config.stream.in_bandwidth_estimation) {
+ extensions.push_back({RtpExtension::kTransportSequenceNumberUri,
+ kTransportSequenceNumberExtensionId});
+ }
+ if (config.stream.abs_send_time) {
+ extensions.push_back(
+ {RtpExtension::kAbsSendTimeUri, kAbsSendTimeExtensionId});
+ }
+ return extensions;
+}
+
+SendAudioStream::SendAudioStream(
+ CallClient* sender,
+ AudioStreamConfig config,
+ rtc::scoped_refptr<AudioEncoderFactory> encoder_factory,
+ Transport* send_transport)
+ : sender_(sender), config_(config) {
+ AudioSendStream::Config send_config(send_transport);
+ ssrc_ = sender->GetNextAudioSsrc();
+ send_config.rtp.ssrc = ssrc_;
+ SdpAudioFormat::Parameters sdp_params;
+ if (config.source.channels == 2)
+ sdp_params["stereo"] = "1";
+ if (config.encoder.initial_frame_length != TimeDelta::Millis(20))
+ sdp_params["ptime"] =
+ std::to_string(config.encoder.initial_frame_length.ms());
+ if (config.encoder.enable_dtx)
+ sdp_params["usedtx"] = "1";
+
+ // SdpAudioFormat::num_channels indicates that the encoder is capable of
+ // stereo, but the actual channel count used is based on the "stereo"
+ // parameter.
+ send_config.send_codec_spec = AudioSendStream::Config::SendCodecSpec(
+ CallTest::kAudioSendPayloadType, {"opus", 48000, 2, sdp_params});
+ RTC_DCHECK_LE(config.source.channels, 2);
+ send_config.encoder_factory = encoder_factory;
+
+ bool use_fixed_rate = !config.encoder.min_rate && !config.encoder.max_rate;
+ if (use_fixed_rate)
+ send_config.send_codec_spec->target_bitrate_bps =
+ config.encoder.fixed_rate.bps();
+ if (!config.adapt.binary_proto.empty()) {
+ send_config.audio_network_adaptor_config = config.adapt.binary_proto;
+ } else if (config.network_adaptation) {
+ send_config.audio_network_adaptor_config =
+ CreateAdaptationString(config.adapt);
+ }
+ if (config.encoder.allocate_bitrate ||
+ config.stream.in_bandwidth_estimation) {
+ DataRate min_rate = DataRate::Infinity();
+ DataRate max_rate = DataRate::Infinity();
+ if (use_fixed_rate) {
+ min_rate = config.encoder.fixed_rate;
+ max_rate = config.encoder.fixed_rate;
+ } else {
+ min_rate = *config.encoder.min_rate;
+ max_rate = *config.encoder.max_rate;
+ }
+ send_config.min_bitrate_bps = min_rate.bps();
+ send_config.max_bitrate_bps = max_rate.bps();
+ }
+
+ if (config.stream.in_bandwidth_estimation) {
+ send_config.send_codec_spec->transport_cc_enabled = true;
+ }
+ send_config.rtp.extensions = GetAudioRtpExtensions(config);
+
+ sender_->SendTask([&] {
+ send_stream_ = sender_->call_->CreateAudioSendStream(send_config);
+ sender->call_->OnAudioTransportOverheadChanged(
+ sender_->transport_->packet_overhead().bytes());
+ });
+}
+
+SendAudioStream::~SendAudioStream() {
+ sender_->SendTask(
+ [this] { sender_->call_->DestroyAudioSendStream(send_stream_); });
+}
+
+void SendAudioStream::Start() {
+ sender_->SendTask([this] {
+ send_stream_->Start();
+ sender_->call_->SignalChannelNetworkState(MediaType::AUDIO, kNetworkUp);
+ });
+}
+
+void SendAudioStream::Stop() {
+ sender_->SendTask([this] { send_stream_->Stop(); });
+}
+
+void SendAudioStream::SetMuted(bool mute) {
+ sender_->SendTask([this, mute] { send_stream_->SetMuted(mute); });
+}
+
+ColumnPrinter SendAudioStream::StatsPrinter() {
+ return ColumnPrinter::Lambda(
+ "audio_target_rate",
+ [this](rtc::SimpleStringBuilder& sb) {
+ sender_->SendTask([this, &sb] {
+ AudioSendStream::Stats stats = send_stream_->GetStats();
+ sb.AppendFormat("%.0lf", stats.target_bitrate_bps / 8.0);
+ });
+ },
+ 64);
+}
+
+ReceiveAudioStream::ReceiveAudioStream(
+ CallClient* receiver,
+ AudioStreamConfig config,
+ SendAudioStream* send_stream,
+ rtc::scoped_refptr<AudioDecoderFactory> decoder_factory,
+ Transport* feedback_transport)
+ : receiver_(receiver), config_(config) {
+ AudioReceiveStreamInterface::Config recv_config;
+ recv_config.rtp.local_ssrc = receiver_->GetNextAudioLocalSsrc();
+ recv_config.rtcp_send_transport = feedback_transport;
+ recv_config.rtp.remote_ssrc = send_stream->ssrc_;
+ receiver->ssrc_media_types_[recv_config.rtp.remote_ssrc] = MediaType::AUDIO;
+ recv_config.rtp.extensions = GetAudioRtpExtensions(config);
+ recv_config.decoder_factory = decoder_factory;
+ recv_config.decoder_map = {
+ {CallTest::kAudioSendPayloadType, {"opus", 48000, 2}}};
+ recv_config.sync_group = config.render.sync_group;
+ receiver_->SendTask([&] {
+ receive_stream_ = receiver_->call_->CreateAudioReceiveStream(recv_config);
+ });
+}
+ReceiveAudioStream::~ReceiveAudioStream() {
+ receiver_->SendTask(
+ [&] { receiver_->call_->DestroyAudioReceiveStream(receive_stream_); });
+}
+
+void ReceiveAudioStream::Start() {
+ receiver_->SendTask([&] {
+ receive_stream_->Start();
+ receiver_->call_->SignalChannelNetworkState(MediaType::AUDIO, kNetworkUp);
+ });
+}
+
+void ReceiveAudioStream::Stop() {
+ receiver_->SendTask([&] { receive_stream_->Stop(); });
+}
+
+AudioReceiveStreamInterface::Stats ReceiveAudioStream::GetStats() const {
+ AudioReceiveStreamInterface::Stats result;
+ receiver_->SendTask([&] {
+ result = receive_stream_->GetStats(/*get_and_clear_legacy_stats=*/true);
+ });
+ return result;
+}
+
+AudioStreamPair::~AudioStreamPair() = default;
+
+AudioStreamPair::AudioStreamPair(
+ CallClient* sender,
+ rtc::scoped_refptr<AudioEncoderFactory> encoder_factory,
+ CallClient* receiver,
+ rtc::scoped_refptr<AudioDecoderFactory> decoder_factory,
+ AudioStreamConfig config)
+ : config_(config),
+ send_stream_(sender, config, encoder_factory, sender->transport_.get()),
+ receive_stream_(receiver,
+ config,
+ &send_stream_,
+ decoder_factory,
+ receiver->transport_.get()) {}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/scenario/audio_stream.h b/third_party/libwebrtc/test/scenario/audio_stream.h
new file mode 100644
index 0000000000..cbaf9d29eb
--- /dev/null
+++ b/third_party/libwebrtc/test/scenario/audio_stream.h
@@ -0,0 +1,110 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef TEST_SCENARIO_AUDIO_STREAM_H_
+#define TEST_SCENARIO_AUDIO_STREAM_H_
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "test/scenario/call_client.h"
+#include "test/scenario/column_printer.h"
+#include "test/scenario/network_node.h"
+#include "test/scenario/scenario_config.h"
+
+namespace webrtc {
+namespace test {
+
+// SendAudioStream represents sending of audio. It can be used for starting the
+// stream if neccessary.
+class SendAudioStream {
+ public:
+ ~SendAudioStream();
+
+ SendAudioStream(const SendAudioStream&) = delete;
+ SendAudioStream& operator=(const SendAudioStream&) = delete;
+
+ void Start();
+ void Stop();
+ void SetMuted(bool mute);
+ ColumnPrinter StatsPrinter();
+
+ private:
+ friend class Scenario;
+ friend class AudioStreamPair;
+ friend class ReceiveAudioStream;
+ SendAudioStream(CallClient* sender,
+ AudioStreamConfig config,
+ rtc::scoped_refptr<AudioEncoderFactory> encoder_factory,
+ Transport* send_transport);
+ AudioSendStream* send_stream_ = nullptr;
+ CallClient* const sender_;
+ const AudioStreamConfig config_;
+ uint32_t ssrc_;
+};
+
+// ReceiveAudioStream represents an audio receiver. It can't be used directly.
+class ReceiveAudioStream {
+ public:
+ ~ReceiveAudioStream();
+
+ ReceiveAudioStream(const ReceiveAudioStream&) = delete;
+ ReceiveAudioStream& operator=(const ReceiveAudioStream&) = delete;
+
+ void Start();
+ void Stop();
+ AudioReceiveStreamInterface::Stats GetStats() const;
+
+ private:
+ friend class Scenario;
+ friend class AudioStreamPair;
+ ReceiveAudioStream(CallClient* receiver,
+ AudioStreamConfig config,
+ SendAudioStream* send_stream,
+ rtc::scoped_refptr<AudioDecoderFactory> decoder_factory,
+ Transport* feedback_transport);
+ AudioReceiveStreamInterface* receive_stream_ = nullptr;
+ CallClient* const receiver_;
+ const AudioStreamConfig config_;
+};
+
+// AudioStreamPair represents an audio streaming session. It can be used to
+// access underlying send and receive classes. It can also be used in calls to
+// the Scenario class.
+class AudioStreamPair {
+ public:
+ ~AudioStreamPair();
+
+ AudioStreamPair(const AudioStreamPair&) = delete;
+ AudioStreamPair& operator=(const AudioStreamPair&) = delete;
+
+ SendAudioStream* send() { return &send_stream_; }
+ ReceiveAudioStream* receive() { return &receive_stream_; }
+
+ private:
+ friend class Scenario;
+ AudioStreamPair(CallClient* sender,
+ rtc::scoped_refptr<AudioEncoderFactory> encoder_factory,
+ CallClient* receiver,
+ rtc::scoped_refptr<AudioDecoderFactory> decoder_factory,
+ AudioStreamConfig config);
+
+ private:
+ const AudioStreamConfig config_;
+ SendAudioStream send_stream_;
+ ReceiveAudioStream receive_stream_;
+};
+
+std::vector<RtpExtension> GetAudioRtpExtensions(
+ const AudioStreamConfig& config);
+
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_SCENARIO_AUDIO_STREAM_H_
diff --git a/third_party/libwebrtc/test/scenario/call_client.cc b/third_party/libwebrtc/test/scenario/call_client.cc
new file mode 100644
index 0000000000..c80f58eeeb
--- /dev/null
+++ b/third_party/libwebrtc/test/scenario/call_client.cc
@@ -0,0 +1,386 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "test/scenario/call_client.h"
+
+#include <iostream>
+#include <memory>
+#include <utility>
+
+#include "api/media_types.h"
+#include "api/rtc_event_log/rtc_event_log.h"
+#include "api/rtc_event_log/rtc_event_log_factory.h"
+#include "api/transport/network_types.h"
+#include "call/call.h"
+#include "call/rtp_transport_controller_send_factory.h"
+#include "modules/audio_mixer/audio_mixer_impl.h"
+#include "modules/rtp_rtcp/include/rtp_header_extension_map.h"
+#include "modules/rtp_rtcp/source/rtp_packet_received.h"
+#include "modules/rtp_rtcp/source/rtp_util.h"
+
+namespace webrtc {
+namespace test {
+namespace {
+static constexpr size_t kNumSsrcs = 6;
+const uint32_t kSendRtxSsrcs[kNumSsrcs] = {0xBADCAFD, 0xBADCAFE, 0xBADCAFF,
+ 0xBADCB00, 0xBADCB01, 0xBADCB02};
+const uint32_t kVideoSendSsrcs[kNumSsrcs] = {0xC0FFED, 0xC0FFEE, 0xC0FFEF,
+ 0xC0FFF0, 0xC0FFF1, 0xC0FFF2};
+const uint32_t kVideoRecvLocalSsrcs[kNumSsrcs] = {0xDAB001, 0xDAB002, 0xDAB003,
+ 0xDAB004, 0xDAB005, 0xDAB006};
+const uint32_t kAudioSendSsrc = 0xDEADBEEF;
+const uint32_t kReceiverLocalAudioSsrc = 0x1234567;
+
+constexpr int kEventLogOutputIntervalMs = 5000;
+
+CallClientFakeAudio InitAudio(TimeController* time_controller) {
+ CallClientFakeAudio setup;
+ auto capturer = TestAudioDeviceModule::CreatePulsedNoiseCapturer(256, 48000);
+ auto renderer = TestAudioDeviceModule::CreateDiscardRenderer(48000);
+ setup.fake_audio_device = TestAudioDeviceModule::Create(
+ time_controller->GetTaskQueueFactory(), std::move(capturer),
+ std::move(renderer), 1.f);
+ setup.apm = AudioProcessingBuilder().Create();
+ setup.fake_audio_device->Init();
+ AudioState::Config audio_state_config;
+ audio_state_config.audio_mixer = AudioMixerImpl::Create();
+ audio_state_config.audio_processing = setup.apm;
+ audio_state_config.audio_device_module = setup.fake_audio_device;
+ setup.audio_state = AudioState::Create(audio_state_config);
+ setup.fake_audio_device->RegisterAudioCallback(
+ setup.audio_state->audio_transport());
+ return setup;
+}
+
+Call* CreateCall(TimeController* time_controller,
+ RtcEventLog* event_log,
+ CallClientConfig config,
+ LoggingNetworkControllerFactory* network_controller_factory,
+ rtc::scoped_refptr<AudioState> audio_state) {
+ CallConfig call_config(event_log);
+ call_config.bitrate_config.max_bitrate_bps =
+ config.transport.rates.max_rate.bps_or(-1);
+ call_config.bitrate_config.min_bitrate_bps =
+ config.transport.rates.min_rate.bps();
+ call_config.bitrate_config.start_bitrate_bps =
+ config.transport.rates.start_rate.bps();
+ call_config.task_queue_factory = time_controller->GetTaskQueueFactory();
+ call_config.network_controller_factory = network_controller_factory;
+ call_config.audio_state = audio_state;
+ call_config.pacer_burst_interval = config.pacer_burst_interval;
+ call_config.trials = config.field_trials;
+ Clock* clock = time_controller->GetClock();
+ return Call::Create(call_config, clock,
+ RtpTransportControllerSendFactory().Create(
+ call_config.ExtractTransportConfig(), clock));
+}
+
+std::unique_ptr<RtcEventLog> CreateEventLog(
+ TaskQueueFactory* task_queue_factory,
+ LogWriterFactoryInterface* log_writer_factory) {
+ if (!log_writer_factory) {
+ return std::make_unique<RtcEventLogNull>();
+ }
+ auto event_log = RtcEventLogFactory(task_queue_factory)
+ .CreateRtcEventLog(RtcEventLog::EncodingType::NewFormat);
+ bool success = event_log->StartLogging(log_writer_factory->Create(".rtc.dat"),
+ kEventLogOutputIntervalMs);
+ RTC_CHECK(success);
+ return event_log;
+}
+} // namespace
+NetworkControleUpdateCache::NetworkControleUpdateCache(
+ std::unique_ptr<NetworkControllerInterface> controller)
+ : controller_(std::move(controller)) {}
+NetworkControlUpdate NetworkControleUpdateCache::OnNetworkAvailability(
+ NetworkAvailability msg) {
+ return Update(controller_->OnNetworkAvailability(msg));
+}
+NetworkControlUpdate NetworkControleUpdateCache::OnNetworkRouteChange(
+ NetworkRouteChange msg) {
+ return Update(controller_->OnNetworkRouteChange(msg));
+}
+NetworkControlUpdate NetworkControleUpdateCache::OnProcessInterval(
+ ProcessInterval msg) {
+ return Update(controller_->OnProcessInterval(msg));
+}
+NetworkControlUpdate NetworkControleUpdateCache::OnRemoteBitrateReport(
+ RemoteBitrateReport msg) {
+ return Update(controller_->OnRemoteBitrateReport(msg));
+}
+NetworkControlUpdate NetworkControleUpdateCache::OnRoundTripTimeUpdate(
+ RoundTripTimeUpdate msg) {
+ return Update(controller_->OnRoundTripTimeUpdate(msg));
+}
+NetworkControlUpdate NetworkControleUpdateCache::OnSentPacket(SentPacket msg) {
+ return Update(controller_->OnSentPacket(msg));
+}
+NetworkControlUpdate NetworkControleUpdateCache::OnReceivedPacket(
+ ReceivedPacket msg) {
+ return Update(controller_->OnReceivedPacket(msg));
+}
+NetworkControlUpdate NetworkControleUpdateCache::OnStreamsConfig(
+ StreamsConfig msg) {
+ return Update(controller_->OnStreamsConfig(msg));
+}
+NetworkControlUpdate NetworkControleUpdateCache::OnTargetRateConstraints(
+ TargetRateConstraints msg) {
+ return Update(controller_->OnTargetRateConstraints(msg));
+}
+NetworkControlUpdate NetworkControleUpdateCache::OnTransportLossReport(
+ TransportLossReport msg) {
+ return Update(controller_->OnTransportLossReport(msg));
+}
+NetworkControlUpdate NetworkControleUpdateCache::OnTransportPacketsFeedback(
+ TransportPacketsFeedback msg) {
+ return Update(controller_->OnTransportPacketsFeedback(msg));
+}
+NetworkControlUpdate NetworkControleUpdateCache::OnNetworkStateEstimate(
+ NetworkStateEstimate msg) {
+ return Update(controller_->OnNetworkStateEstimate(msg));
+}
+
+NetworkControlUpdate NetworkControleUpdateCache::update_state() const {
+ return update_state_;
+}
+NetworkControlUpdate NetworkControleUpdateCache::Update(
+ NetworkControlUpdate update) {
+ if (update.target_rate)
+ update_state_.target_rate = update.target_rate;
+ if (update.pacer_config)
+ update_state_.pacer_config = update.pacer_config;
+ if (update.congestion_window)
+ update_state_.congestion_window = update.congestion_window;
+ if (!update.probe_cluster_configs.empty())
+ update_state_.probe_cluster_configs = update.probe_cluster_configs;
+ return update;
+}
+
+LoggingNetworkControllerFactory::LoggingNetworkControllerFactory(
+ LogWriterFactoryInterface* log_writer_factory,
+ TransportControllerConfig config) {
+ if (config.cc_factory) {
+ cc_factory_ = config.cc_factory;
+ if (log_writer_factory)
+ RTC_LOG(LS_WARNING)
+ << "Can't log controller state for injected network controllers";
+ } else {
+ if (log_writer_factory) {
+ goog_cc_factory_.AttachWriter(
+ log_writer_factory->Create(".cc_state.txt"));
+ print_cc_state_ = true;
+ }
+ cc_factory_ = &goog_cc_factory_;
+ }
+}
+
+LoggingNetworkControllerFactory::~LoggingNetworkControllerFactory() {}
+
+void LoggingNetworkControllerFactory::LogCongestionControllerStats(
+ Timestamp at_time) {
+ if (print_cc_state_)
+ goog_cc_factory_.PrintState(at_time);
+}
+
+NetworkControlUpdate LoggingNetworkControllerFactory::GetUpdate() const {
+ if (last_controller_)
+ return last_controller_->update_state();
+ return NetworkControlUpdate();
+}
+
+std::unique_ptr<NetworkControllerInterface>
+LoggingNetworkControllerFactory::Create(NetworkControllerConfig config) {
+ auto controller =
+ std::make_unique<NetworkControleUpdateCache>(cc_factory_->Create(config));
+ last_controller_ = controller.get();
+ return controller;
+}
+
+TimeDelta LoggingNetworkControllerFactory::GetProcessInterval() const {
+ return cc_factory_->GetProcessInterval();
+}
+
+void LoggingNetworkControllerFactory::SetRemoteBitrateEstimate(
+ RemoteBitrateReport msg) {
+ if (last_controller_)
+ last_controller_->OnRemoteBitrateReport(msg);
+}
+
+CallClient::CallClient(
+ TimeController* time_controller,
+ std::unique_ptr<LogWriterFactoryInterface> log_writer_factory,
+ CallClientConfig config)
+ : time_controller_(time_controller),
+ clock_(time_controller->GetClock()),
+ log_writer_factory_(std::move(log_writer_factory)),
+ network_controller_factory_(log_writer_factory_.get(), config.transport),
+ task_queue_(time_controller->GetTaskQueueFactory()->CreateTaskQueue(
+ "CallClient",
+ TaskQueueFactory::Priority::NORMAL)) {
+ config.field_trials = &field_trials_;
+ SendTask([this, config] {
+ event_log_ = CreateEventLog(time_controller_->GetTaskQueueFactory(),
+ log_writer_factory_.get());
+ fake_audio_setup_ = InitAudio(time_controller_);
+
+ call_.reset(CreateCall(time_controller_, event_log_.get(), config,
+ &network_controller_factory_,
+ fake_audio_setup_.audio_state));
+ transport_ = std::make_unique<NetworkNodeTransport>(clock_, call_.get());
+ });
+}
+
+CallClient::~CallClient() {
+ SendTask([&] {
+ call_.reset();
+ fake_audio_setup_ = {};
+ rtc::Event done;
+ event_log_->StopLogging([&done] { done.Set(); });
+ done.Wait(rtc::Event::kForever);
+ event_log_.reset();
+ });
+}
+
+ColumnPrinter CallClient::StatsPrinter() {
+ return ColumnPrinter::Lambda(
+ "pacer_delay call_send_bw",
+ [this](rtc::SimpleStringBuilder& sb) {
+ Call::Stats call_stats = call_->GetStats();
+ sb.AppendFormat("%.3lf %.0lf", call_stats.pacer_delay_ms / 1000.0,
+ call_stats.send_bandwidth_bps / 8.0);
+ },
+ 64);
+}
+
+Call::Stats CallClient::GetStats() {
+ // This call needs to be made on the thread that `call_` was constructed on.
+ Call::Stats stats;
+ SendTask([this, &stats] { stats = call_->GetStats(); });
+ return stats;
+}
+
+DataRate CallClient::target_rate() const {
+ return network_controller_factory_.GetUpdate().target_rate->target_rate;
+}
+
+DataRate CallClient::stable_target_rate() const {
+ return network_controller_factory_.GetUpdate()
+ .target_rate->stable_target_rate;
+}
+
+DataRate CallClient::padding_rate() const {
+ return network_controller_factory_.GetUpdate().pacer_config->pad_rate();
+}
+
+void CallClient::SetRemoteBitrate(DataRate bitrate) {
+ RemoteBitrateReport msg;
+ msg.bandwidth = bitrate;
+ msg.receive_time = clock_->CurrentTime();
+ network_controller_factory_.SetRemoteBitrateEstimate(msg);
+}
+
+void CallClient::UpdateBitrateConstraints(
+ const BitrateConstraints& constraints) {
+ SendTask([this, &constraints]() {
+ call_->GetTransportControllerSend()->SetSdpBitrateParameters(constraints);
+ });
+}
+
+void CallClient::SetAudioReceiveRtpHeaderExtensions(
+ rtc::ArrayView<RtpExtension> extensions) {
+ SendTask([this, &extensions]() {
+ audio_extensions_ = RtpHeaderExtensionMap(extensions);
+ });
+}
+
+void CallClient::SetVideoReceiveRtpHeaderExtensions(
+ rtc::ArrayView<RtpExtension> extensions) {
+ SendTask([this, &extensions]() {
+ video_extensions_ = RtpHeaderExtensionMap(extensions);
+ });
+}
+
+void CallClient::OnPacketReceived(EmulatedIpPacket packet) {
+ MediaType media_type = MediaType::ANY;
+ if (IsRtpPacket(packet.data)) {
+ media_type = ssrc_media_types_[ParseRtpSsrc(packet.data)];
+ task_queue_.PostTask([this, media_type,
+ packet = std::move(packet)]() mutable {
+ RtpHeaderExtensionMap& extension_map = media_type == MediaType::AUDIO
+ ? audio_extensions_
+ : video_extensions_;
+ RtpPacketReceived received_packet(&extension_map, packet.arrival_time);
+ RTC_CHECK(received_packet.Parse(packet.data));
+ call_->Receiver()->DeliverRtpPacket(media_type, received_packet,
+ /*undemuxable_packet_handler=*/
+ [](const RtpPacketReceived& packet) {
+ RTC_CHECK_NOTREACHED();
+ return false;
+ });
+ });
+ } else {
+ task_queue_.PostTask(
+ [call = call_.get(), packet = std::move(packet)]() mutable {
+ call->Receiver()->DeliverRtcpPacket(packet.data);
+ });
+ }
+}
+
+std::unique_ptr<RtcEventLogOutput> CallClient::GetLogWriter(std::string name) {
+ if (!log_writer_factory_ || name.empty())
+ return nullptr;
+ return log_writer_factory_->Create(name);
+}
+
+uint32_t CallClient::GetNextVideoSsrc() {
+ RTC_CHECK_LT(next_video_ssrc_index_, kNumSsrcs);
+ return kVideoSendSsrcs[next_video_ssrc_index_++];
+}
+
+uint32_t CallClient::GetNextVideoLocalSsrc() {
+ RTC_CHECK_LT(next_video_local_ssrc_index_, kNumSsrcs);
+ return kVideoRecvLocalSsrcs[next_video_local_ssrc_index_++];
+}
+
+uint32_t CallClient::GetNextAudioSsrc() {
+ RTC_CHECK_LT(next_audio_ssrc_index_, 1);
+ next_audio_ssrc_index_++;
+ return kAudioSendSsrc;
+}
+
+uint32_t CallClient::GetNextAudioLocalSsrc() {
+ RTC_CHECK_LT(next_audio_local_ssrc_index_, 1);
+ next_audio_local_ssrc_index_++;
+ return kReceiverLocalAudioSsrc;
+}
+
+uint32_t CallClient::GetNextRtxSsrc() {
+ RTC_CHECK_LT(next_rtx_ssrc_index_, kNumSsrcs);
+ return kSendRtxSsrcs[next_rtx_ssrc_index_++];
+}
+
+void CallClient::SendTask(std::function<void()> task) {
+ task_queue_.SendTask(std::move(task));
+}
+
+int16_t CallClient::Bind(EmulatedEndpoint* endpoint) {
+ uint16_t port = endpoint->BindReceiver(0, this).value();
+ endpoints_.push_back({endpoint, port});
+ return port;
+}
+
+void CallClient::UnBind() {
+ for (auto ep_port : endpoints_)
+ ep_port.first->UnbindReceiver(ep_port.second);
+}
+
+CallClientPair::~CallClientPair() = default;
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/scenario/call_client.h b/third_party/libwebrtc/test/scenario/call_client.h
new file mode 100644
index 0000000000..5d62fc75e7
--- /dev/null
+++ b/third_party/libwebrtc/test/scenario/call_client.h
@@ -0,0 +1,204 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef TEST_SCENARIO_CALL_CLIENT_H_
+#define TEST_SCENARIO_CALL_CLIENT_H_
+
+#include <map>
+#include <memory>
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "api/array_view.h"
+#include "api/rtc_event_log/rtc_event_log.h"
+#include "api/rtp_parameters.h"
+#include "api/test/time_controller.h"
+#include "api/units/data_rate.h"
+#include "call/call.h"
+#include "modules/audio_device/include/test_audio_device.h"
+#include "modules/congestion_controller/goog_cc/test/goog_cc_printer.h"
+#include "modules/rtp_rtcp/include/rtp_header_extension_map.h"
+#include "rtc_base/task_queue_for_test.h"
+#include "test/logging/log_writer.h"
+#include "test/network/network_emulation.h"
+#include "test/scenario/column_printer.h"
+#include "test/scenario/network_node.h"
+#include "test/scenario/scenario_config.h"
+
+namespace webrtc {
+
+namespace test {
+// Helper class to capture network controller state.
+class NetworkControleUpdateCache : public NetworkControllerInterface {
+ public:
+ explicit NetworkControleUpdateCache(
+ std::unique_ptr<NetworkControllerInterface> controller);
+
+ NetworkControlUpdate OnNetworkAvailability(NetworkAvailability msg) override;
+ NetworkControlUpdate OnNetworkRouteChange(NetworkRouteChange msg) override;
+ NetworkControlUpdate OnProcessInterval(ProcessInterval msg) override;
+ NetworkControlUpdate OnRemoteBitrateReport(RemoteBitrateReport msg) override;
+ NetworkControlUpdate OnRoundTripTimeUpdate(RoundTripTimeUpdate msg) override;
+ NetworkControlUpdate OnSentPacket(SentPacket msg) override;
+ NetworkControlUpdate OnReceivedPacket(ReceivedPacket msg) override;
+ NetworkControlUpdate OnStreamsConfig(StreamsConfig msg) override;
+ NetworkControlUpdate OnTargetRateConstraints(
+ TargetRateConstraints msg) override;
+ NetworkControlUpdate OnTransportLossReport(TransportLossReport msg) override;
+ NetworkControlUpdate OnTransportPacketsFeedback(
+ TransportPacketsFeedback msg) override;
+ NetworkControlUpdate OnNetworkStateEstimate(
+ NetworkStateEstimate msg) override;
+
+ NetworkControlUpdate update_state() const;
+
+ private:
+ NetworkControlUpdate Update(NetworkControlUpdate update);
+ const std::unique_ptr<NetworkControllerInterface> controller_;
+ NetworkControlUpdate update_state_;
+};
+
+class LoggingNetworkControllerFactory
+ : public NetworkControllerFactoryInterface {
+ public:
+ LoggingNetworkControllerFactory(LogWriterFactoryInterface* log_writer_factory,
+ TransportControllerConfig config);
+
+ ~LoggingNetworkControllerFactory();
+
+ LoggingNetworkControllerFactory(const LoggingNetworkControllerFactory&) =
+ delete;
+ LoggingNetworkControllerFactory& operator=(
+ const LoggingNetworkControllerFactory&) = delete;
+
+ std::unique_ptr<NetworkControllerInterface> Create(
+ NetworkControllerConfig config) override;
+ TimeDelta GetProcessInterval() const override;
+ // TODO(srte): Consider using the Columnprinter interface for this.
+ void LogCongestionControllerStats(Timestamp at_time);
+ void SetRemoteBitrateEstimate(RemoteBitrateReport msg);
+
+ NetworkControlUpdate GetUpdate() const;
+
+ private:
+ GoogCcDebugFactory goog_cc_factory_;
+ NetworkControllerFactoryInterface* cc_factory_ = nullptr;
+ bool print_cc_state_ = false;
+ NetworkControleUpdateCache* last_controller_ = nullptr;
+};
+
+struct CallClientFakeAudio {
+ rtc::scoped_refptr<AudioProcessing> apm;
+ rtc::scoped_refptr<TestAudioDeviceModule> fake_audio_device;
+ rtc::scoped_refptr<AudioState> audio_state;
+};
+// CallClient represents a participant in a call scenario. It is created by the
+// Scenario class and is used as sender and receiver when setting up a media
+// stream session.
+class CallClient : public EmulatedNetworkReceiverInterface {
+ public:
+ CallClient(TimeController* time_controller,
+ std::unique_ptr<LogWriterFactoryInterface> log_writer_factory,
+ CallClientConfig config);
+
+ ~CallClient();
+
+ CallClient(const CallClient&) = delete;
+ CallClient& operator=(const CallClient&) = delete;
+
+ ColumnPrinter StatsPrinter();
+ Call::Stats GetStats();
+ DataRate send_bandwidth() {
+ return DataRate::BitsPerSec(GetStats().send_bandwidth_bps);
+ }
+ DataRate target_rate() const;
+ DataRate stable_target_rate() const;
+ DataRate padding_rate() const;
+ void UpdateBitrateConstraints(const BitrateConstraints& constraints);
+ void SetRemoteBitrate(DataRate bitrate);
+
+ void SetAudioReceiveRtpHeaderExtensions(
+ rtc::ArrayView<RtpExtension> extensions);
+ void SetVideoReceiveRtpHeaderExtensions(
+ rtc::ArrayView<RtpExtension> extensions);
+
+ void OnPacketReceived(EmulatedIpPacket packet) override;
+ std::unique_ptr<RtcEventLogOutput> GetLogWriter(std::string name);
+
+ // Exposed publicly so that tests can execute tasks such as querying stats
+ // for media streams in the expected runtime environment (essentially what
+ // CallClient does internally for GetStats()).
+ void SendTask(std::function<void()> task);
+
+ private:
+ friend class Scenario;
+ friend class CallClientPair;
+ friend class SendVideoStream;
+ friend class VideoStreamPair;
+ friend class ReceiveVideoStream;
+ friend class SendAudioStream;
+ friend class ReceiveAudioStream;
+ friend class AudioStreamPair;
+ friend class NetworkNodeTransport;
+ uint32_t GetNextVideoSsrc();
+ uint32_t GetNextVideoLocalSsrc();
+ uint32_t GetNextAudioSsrc();
+ uint32_t GetNextAudioLocalSsrc();
+ uint32_t GetNextRtxSsrc();
+ int16_t Bind(EmulatedEndpoint* endpoint);
+ void UnBind();
+
+ TimeController* const time_controller_;
+ Clock* clock_;
+ const std::unique_ptr<LogWriterFactoryInterface> log_writer_factory_;
+ std::unique_ptr<RtcEventLog> event_log_;
+ LoggingNetworkControllerFactory network_controller_factory_;
+ CallClientFakeAudio fake_audio_setup_;
+ std::unique_ptr<Call> call_;
+ std::unique_ptr<NetworkNodeTransport> transport_;
+ std::vector<std::pair<EmulatedEndpoint*, uint16_t>> endpoints_;
+ RtpHeaderExtensionMap audio_extensions_;
+ RtpHeaderExtensionMap video_extensions_;
+
+ int next_video_ssrc_index_ = 0;
+ int next_video_local_ssrc_index_ = 0;
+ int next_rtx_ssrc_index_ = 0;
+ int next_audio_ssrc_index_ = 0;
+ int next_audio_local_ssrc_index_ = 0;
+ std::map<uint32_t, MediaType> ssrc_media_types_;
+ // Defined last so it's destroyed first.
+ TaskQueueForTest task_queue_;
+
+ const FieldTrialBasedConfig field_trials_;
+};
+
+class CallClientPair {
+ public:
+ ~CallClientPair();
+
+ CallClientPair(const CallClientPair&) = delete;
+ CallClientPair& operator=(const CallClientPair&) = delete;
+
+ CallClient* first() { return first_; }
+ CallClient* second() { return second_; }
+ std::pair<CallClient*, CallClient*> forward() { return {first(), second()}; }
+ std::pair<CallClient*, CallClient*> reverse() { return {second(), first()}; }
+
+ private:
+ friend class Scenario;
+ CallClientPair(CallClient* first, CallClient* second)
+ : first_(first), second_(second) {}
+ CallClient* const first_;
+ CallClient* const second_;
+};
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_SCENARIO_CALL_CLIENT_H_
diff --git a/third_party/libwebrtc/test/scenario/column_printer.cc b/third_party/libwebrtc/test/scenario/column_printer.cc
new file mode 100644
index 0000000000..661c83bd0d
--- /dev/null
+++ b/third_party/libwebrtc/test/scenario/column_printer.cc
@@ -0,0 +1,73 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "test/scenario/column_printer.h"
+
+namespace webrtc {
+namespace test {
+
+ColumnPrinter::ColumnPrinter(const ColumnPrinter&) = default;
+ColumnPrinter::~ColumnPrinter() = default;
+
+ColumnPrinter::ColumnPrinter(
+ const char* headers,
+ std::function<void(rtc::SimpleStringBuilder&)> printer,
+ size_t max_length)
+ : headers_(headers), printer_(printer), max_length_(max_length) {}
+
+ColumnPrinter ColumnPrinter::Fixed(const char* headers, std::string fields) {
+ return ColumnPrinter(
+ headers, [fields](rtc::SimpleStringBuilder& sb) { sb << fields; },
+ fields.size());
+}
+
+ColumnPrinter ColumnPrinter::Lambda(
+ const char* headers,
+ std::function<void(rtc::SimpleStringBuilder&)> printer,
+ size_t max_length) {
+ return ColumnPrinter(headers, printer, max_length);
+}
+
+StatesPrinter::StatesPrinter(std::unique_ptr<RtcEventLogOutput> writer,
+ std::vector<ColumnPrinter> printers)
+ : writer_(std::move(writer)), printers_(printers) {
+ RTC_CHECK(!printers_.empty());
+ for (auto& printer : printers_)
+ buffer_size_ += printer.max_length_ + 1;
+ buffer_.resize(buffer_size_);
+}
+
+StatesPrinter::~StatesPrinter() = default;
+
+void StatesPrinter::PrintHeaders() {
+ if (!writer_)
+ return;
+ writer_->Write(printers_[0].headers_);
+ for (size_t i = 1; i < printers_.size(); ++i) {
+ writer_->Write(" ");
+ writer_->Write(printers_[i].headers_);
+ }
+ writer_->Write("\n");
+}
+
+void StatesPrinter::PrintRow() {
+ // Note that this is run for null output to preserve side effects, this allows
+ // setting break points etc.
+ rtc::SimpleStringBuilder sb(buffer_);
+ printers_[0].printer_(sb);
+ for (size_t i = 1; i < printers_.size(); ++i) {
+ sb << ' ';
+ printers_[i].printer_(sb);
+ }
+ sb << "\n";
+ if (writer_)
+ writer_->Write(std::string(sb.str(), sb.size()));
+}
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/scenario/column_printer.h b/third_party/libwebrtc/test/scenario/column_printer.h
new file mode 100644
index 0000000000..529f4597ec
--- /dev/null
+++ b/third_party/libwebrtc/test/scenario/column_printer.h
@@ -0,0 +1,66 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef TEST_SCENARIO_COLUMN_PRINTER_H_
+#define TEST_SCENARIO_COLUMN_PRINTER_H_
+#include <functional>
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "rtc_base/strings/string_builder.h"
+#include "test/logging/log_writer.h"
+
+namespace webrtc {
+namespace test {
+class ColumnPrinter {
+ public:
+ ColumnPrinter(const ColumnPrinter&);
+ ~ColumnPrinter();
+ static ColumnPrinter Fixed(const char* headers, std::string fields);
+ static ColumnPrinter Lambda(
+ const char* headers,
+ std::function<void(rtc::SimpleStringBuilder&)> printer,
+ size_t max_length = 256);
+
+ protected:
+ friend class StatesPrinter;
+ const char* headers_;
+ std::function<void(rtc::SimpleStringBuilder&)> printer_;
+ size_t max_length_;
+
+ private:
+ ColumnPrinter(const char* headers,
+ std::function<void(rtc::SimpleStringBuilder&)> printer,
+ size_t max_length);
+};
+
+class StatesPrinter {
+ public:
+ StatesPrinter(std::unique_ptr<RtcEventLogOutput> writer,
+ std::vector<ColumnPrinter> printers);
+
+ ~StatesPrinter();
+
+ StatesPrinter(const StatesPrinter&) = delete;
+ StatesPrinter& operator=(const StatesPrinter&) = delete;
+
+ void PrintHeaders();
+ void PrintRow();
+
+ private:
+ const std::unique_ptr<RtcEventLogOutput> writer_;
+ const std::vector<ColumnPrinter> printers_;
+ size_t buffer_size_ = 0;
+ std::vector<char> buffer_;
+};
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_SCENARIO_COLUMN_PRINTER_H_
diff --git a/third_party/libwebrtc/test/scenario/hardware_codecs.cc b/third_party/libwebrtc/test/scenario/hardware_codecs.cc
new file mode 100644
index 0000000000..cac0f10dc9
--- /dev/null
+++ b/third_party/libwebrtc/test/scenario/hardware_codecs.cc
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "test/scenario/hardware_codecs.h"
+
+#include "rtc_base/checks.h"
+
+#ifdef WEBRTC_ANDROID
+#include "modules/video_coding/codecs/test/android_codec_factory_helper.h"
+#endif
+#ifdef WEBRTC_MAC
+#include "modules/video_coding/codecs/test/objc_codec_factory_helper.h"
+#endif
+
+namespace webrtc {
+namespace test {
+std::unique_ptr<VideoEncoderFactory> CreateHardwareEncoderFactory() {
+#ifdef WEBRTC_ANDROID
+ InitializeAndroidObjects();
+ return CreateAndroidEncoderFactory();
+#else
+#ifdef WEBRTC_MAC
+ return CreateObjCEncoderFactory();
+#else
+ RTC_DCHECK_NOTREACHED()
+ << "Hardware encoder not implemented on this platform.";
+ return nullptr;
+#endif
+#endif
+}
+std::unique_ptr<VideoDecoderFactory> CreateHardwareDecoderFactory() {
+#ifdef WEBRTC_ANDROID
+ InitializeAndroidObjects();
+ return CreateAndroidDecoderFactory();
+#else
+#ifdef WEBRTC_MAC
+ return CreateObjCDecoderFactory();
+#else
+ RTC_DCHECK_NOTREACHED()
+ << "Hardware decoder not implemented on this platform.";
+ return nullptr;
+#endif
+#endif
+}
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/scenario/hardware_codecs.h b/third_party/libwebrtc/test/scenario/hardware_codecs.h
new file mode 100644
index 0000000000..ae14a27d9e
--- /dev/null
+++ b/third_party/libwebrtc/test/scenario/hardware_codecs.h
@@ -0,0 +1,24 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef TEST_SCENARIO_HARDWARE_CODECS_H_
+#define TEST_SCENARIO_HARDWARE_CODECS_H_
+
+#include <memory>
+
+#include "api/video_codecs/video_decoder_factory.h"
+#include "api/video_codecs/video_encoder_factory.h"
+
+namespace webrtc {
+namespace test {
+std::unique_ptr<VideoEncoderFactory> CreateHardwareEncoderFactory();
+std::unique_ptr<VideoDecoderFactory> CreateHardwareDecoderFactory();
+} // namespace test
+} // namespace webrtc
+#endif // TEST_SCENARIO_HARDWARE_CODECS_H_
diff --git a/third_party/libwebrtc/test/scenario/network_node.cc b/third_party/libwebrtc/test/scenario/network_node.cc
new file mode 100644
index 0000000000..e149bb11e0
--- /dev/null
+++ b/third_party/libwebrtc/test/scenario/network_node.cc
@@ -0,0 +1,144 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "test/scenario/network_node.h"
+
+#include <algorithm>
+#include <vector>
+
+#include <memory>
+#include "rtc_base/net_helper.h"
+#include "rtc_base/numerics/safe_minmax.h"
+
+namespace webrtc {
+namespace test {
+namespace {
+constexpr char kDummyTransportName[] = "dummy";
+SimulatedNetwork::Config CreateSimulationConfig(
+ NetworkSimulationConfig config) {
+ SimulatedNetwork::Config sim_config;
+ sim_config.link_capacity_kbps = config.bandwidth.kbps_or(0);
+ sim_config.loss_percent = config.loss_rate * 100;
+ sim_config.queue_delay_ms = config.delay.ms();
+ sim_config.delay_standard_deviation_ms = config.delay_std_dev.ms();
+ sim_config.packet_overhead = config.packet_overhead.bytes<int>();
+ sim_config.queue_length_packets =
+ config.packet_queue_length_limit.value_or(0);
+ return sim_config;
+}
+} // namespace
+
+SimulationNode::SimulationNode(NetworkSimulationConfig config,
+ SimulatedNetwork* behavior,
+ EmulatedNetworkNode* network_node)
+ : config_(config), simulation_(behavior), network_node_(network_node) {}
+
+std::unique_ptr<SimulatedNetwork> SimulationNode::CreateBehavior(
+ NetworkSimulationConfig config) {
+ SimulatedNetwork::Config sim_config = CreateSimulationConfig(config);
+ return std::make_unique<SimulatedNetwork>(sim_config);
+}
+
+void SimulationNode::UpdateConfig(
+ std::function<void(NetworkSimulationConfig*)> modifier) {
+ modifier(&config_);
+ SimulatedNetwork::Config sim_config = CreateSimulationConfig(config_);
+ simulation_->SetConfig(sim_config);
+}
+
+void SimulationNode::PauseTransmissionUntil(Timestamp until) {
+ simulation_->PauseTransmissionUntil(until.us());
+}
+
+ColumnPrinter SimulationNode::ConfigPrinter() const {
+ return ColumnPrinter::Lambda(
+ "propagation_delay capacity loss_rate",
+ [this](rtc::SimpleStringBuilder& sb) {
+ sb.AppendFormat("%.3lf %.0lf %.2lf", config_.delay.seconds<double>(),
+ config_.bandwidth.bps() / 8.0, config_.loss_rate);
+ });
+}
+
+NetworkNodeTransport::NetworkNodeTransport(Clock* sender_clock,
+ Call* sender_call)
+ : sender_clock_(sender_clock), sender_call_(sender_call) {}
+
+NetworkNodeTransport::~NetworkNodeTransport() = default;
+
+bool NetworkNodeTransport::SendRtp(const uint8_t* packet,
+ size_t length,
+ const PacketOptions& options) {
+ int64_t send_time_ms = sender_clock_->TimeInMilliseconds();
+ rtc::SentPacket sent_packet;
+ sent_packet.packet_id = options.packet_id;
+ sent_packet.info.included_in_feedback = options.included_in_feedback;
+ sent_packet.info.included_in_allocation = options.included_in_allocation;
+ sent_packet.send_time_ms = send_time_ms;
+ sent_packet.info.packet_size_bytes = length;
+ sent_packet.info.packet_type = rtc::PacketType::kData;
+ sender_call_->OnSentPacket(sent_packet);
+
+ MutexLock lock(&mutex_);
+ if (!endpoint_)
+ return false;
+ rtc::CopyOnWriteBuffer buffer(packet, length);
+ endpoint_->SendPacket(local_address_, remote_address_, buffer,
+ packet_overhead_.bytes());
+ return true;
+}
+
+bool NetworkNodeTransport::SendRtcp(const uint8_t* packet, size_t length) {
+ rtc::CopyOnWriteBuffer buffer(packet, length);
+ MutexLock lock(&mutex_);
+ if (!endpoint_)
+ return false;
+ endpoint_->SendPacket(local_address_, remote_address_, buffer,
+ packet_overhead_.bytes());
+ return true;
+}
+
+void NetworkNodeTransport::Connect(EmulatedEndpoint* endpoint,
+ const rtc::SocketAddress& receiver_address,
+ DataSize packet_overhead) {
+ rtc::NetworkRoute route;
+ route.connected = true;
+ // We assume that the address will be unique in the lower bytes.
+ route.local = rtc::RouteEndpoint::CreateWithNetworkId(static_cast<uint16_t>(
+ receiver_address.ipaddr().v4AddressAsHostOrderInteger()));
+ route.remote = rtc::RouteEndpoint::CreateWithNetworkId(static_cast<uint16_t>(
+ receiver_address.ipaddr().v4AddressAsHostOrderInteger()));
+ route.packet_overhead = packet_overhead.bytes() +
+ receiver_address.ipaddr().overhead() +
+ cricket::kUdpHeaderSize;
+ {
+ // Only IPv4 address is supported.
+ RTC_CHECK_EQ(receiver_address.family(), AF_INET);
+ MutexLock lock(&mutex_);
+ endpoint_ = endpoint;
+ local_address_ = rtc::SocketAddress(endpoint_->GetPeerLocalAddress(), 0);
+ remote_address_ = receiver_address;
+ packet_overhead_ = packet_overhead;
+ current_network_route_ = route;
+ }
+
+ sender_call_->GetTransportControllerSend()->OnNetworkRouteChanged(
+ kDummyTransportName, route);
+}
+
+void NetworkNodeTransport::Disconnect() {
+ MutexLock lock(&mutex_);
+ current_network_route_.connected = false;
+ sender_call_->GetTransportControllerSend()->OnNetworkRouteChanged(
+ kDummyTransportName, current_network_route_);
+ current_network_route_ = {};
+ endpoint_ = nullptr;
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/scenario/network_node.h b/third_party/libwebrtc/test/scenario/network_node.h
new file mode 100644
index 0000000000..fe87cefa26
--- /dev/null
+++ b/third_party/libwebrtc/test/scenario/network_node.h
@@ -0,0 +1,83 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef TEST_SCENARIO_NETWORK_NODE_H_
+#define TEST_SCENARIO_NETWORK_NODE_H_
+
+#include <deque>
+#include <map>
+#include <memory>
+#include <utility>
+#include <vector>
+
+#include "api/call/transport.h"
+#include "api/units/timestamp.h"
+#include "call/call.h"
+#include "call/simulated_network.h"
+#include "rtc_base/copy_on_write_buffer.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "test/network/network_emulation.h"
+#include "test/scenario/column_printer.h"
+#include "test/scenario/scenario_config.h"
+
+namespace webrtc {
+namespace test {
+
+class SimulationNode {
+ public:
+ SimulationNode(NetworkSimulationConfig config,
+ SimulatedNetwork* behavior,
+ EmulatedNetworkNode* network_node);
+ static std::unique_ptr<SimulatedNetwork> CreateBehavior(
+ NetworkSimulationConfig config);
+
+ void UpdateConfig(std::function<void(NetworkSimulationConfig*)> modifier);
+ void PauseTransmissionUntil(Timestamp until);
+ ColumnPrinter ConfigPrinter() const;
+ EmulatedNetworkNode* node() { return network_node_; }
+
+ private:
+ NetworkSimulationConfig config_;
+ SimulatedNetwork* const simulation_;
+ EmulatedNetworkNode* const network_node_;
+};
+
+class NetworkNodeTransport : public Transport {
+ public:
+ NetworkNodeTransport(Clock* sender_clock, Call* sender_call);
+ ~NetworkNodeTransport() override;
+
+ bool SendRtp(const uint8_t* packet,
+ size_t length,
+ const PacketOptions& options) override;
+ bool SendRtcp(const uint8_t* packet, size_t length) override;
+
+ void Connect(EmulatedEndpoint* endpoint,
+ const rtc::SocketAddress& receiver_address,
+ DataSize packet_overhead);
+ void Disconnect();
+
+ DataSize packet_overhead() {
+ MutexLock lock(&mutex_);
+ return packet_overhead_;
+ }
+
+ private:
+ Mutex mutex_;
+ Clock* const sender_clock_;
+ Call* const sender_call_;
+ EmulatedEndpoint* endpoint_ RTC_GUARDED_BY(mutex_) = nullptr;
+ rtc::SocketAddress local_address_ RTC_GUARDED_BY(mutex_);
+ rtc::SocketAddress remote_address_ RTC_GUARDED_BY(mutex_);
+ DataSize packet_overhead_ RTC_GUARDED_BY(mutex_) = DataSize::Zero();
+ rtc::NetworkRoute current_network_route_ RTC_GUARDED_BY(mutex_);
+};
+} // namespace test
+} // namespace webrtc
+#endif // TEST_SCENARIO_NETWORK_NODE_H_
diff --git a/third_party/libwebrtc/test/scenario/performance_stats.cc b/third_party/libwebrtc/test/scenario/performance_stats.cc
new file mode 100644
index 0000000000..e12be8a003
--- /dev/null
+++ b/third_party/libwebrtc/test/scenario/performance_stats.cc
@@ -0,0 +1,47 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "test/scenario/performance_stats.h"
+
+#include <algorithm>
+
+namespace webrtc {
+namespace test {
+void VideoFramesStats::AddFrameInfo(const VideoFrameBuffer& frame,
+ Timestamp at_time) {
+ ++count;
+ RTC_DCHECK(at_time.IsFinite());
+ pixels.AddSample(frame.width() * frame.height());
+ resolution.AddSample(std::max(frame.width(), frame.height()));
+ frames.AddEvent(at_time);
+}
+
+void VideoFramesStats::AddStats(const VideoFramesStats& other) {
+ count += other.count;
+ pixels.AddSamples(other.pixels);
+ resolution.AddSamples(other.resolution);
+ frames.AddEvents(other.frames);
+}
+
+void VideoQualityStats::AddStats(const VideoQualityStats& other) {
+ capture.AddStats(other.capture);
+ render.AddStats(other.render);
+ lost_count += other.lost_count;
+ freeze_count += other.freeze_count;
+ capture_to_decoded_delay.AddSamples(other.capture_to_decoded_delay);
+ end_to_end_delay.AddSamples(other.end_to_end_delay);
+ psnr.AddSamples(other.psnr);
+ psnr_with_freeze.AddSamples(other.psnr_with_freeze);
+ skipped_between_rendered.AddSamples(other.skipped_between_rendered);
+ freeze_duration.AddSamples(other.freeze_duration);
+ time_between_freezes.AddSamples(other.time_between_freezes);
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/scenario/performance_stats.h b/third_party/libwebrtc/test/scenario/performance_stats.h
new file mode 100644
index 0000000000..6974ab6d22
--- /dev/null
+++ b/third_party/libwebrtc/test/scenario/performance_stats.h
@@ -0,0 +1,108 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef TEST_SCENARIO_PERFORMANCE_STATS_H_
+#define TEST_SCENARIO_PERFORMANCE_STATS_H_
+
+#include "api/units/data_rate.h"
+#include "api/units/time_delta.h"
+#include "api/units/timestamp.h"
+#include "api/video/video_frame_buffer.h"
+#include "rtc_base/numerics/event_rate_counter.h"
+#include "rtc_base/numerics/sample_stats.h"
+
+namespace webrtc {
+namespace test {
+
+struct VideoFramePair {
+ rtc::scoped_refptr<VideoFrameBuffer> captured;
+ rtc::scoped_refptr<VideoFrameBuffer> decoded;
+ Timestamp capture_time = Timestamp::MinusInfinity();
+ Timestamp decoded_time = Timestamp::PlusInfinity();
+ Timestamp render_time = Timestamp::PlusInfinity();
+ // A unique identifier for the spatial/temporal layer the decoded frame
+ // belongs to. Note that this does not reflect the id as defined by the
+ // underlying layer setup.
+ int layer_id = 0;
+ int capture_id = 0;
+ int decode_id = 0;
+ // Indicates the repeat count for the decoded frame. Meaning that the same
+ // decoded frame has matched differend captured frames.
+ int repeated = 0;
+};
+
+
+struct VideoFramesStats {
+ int count = 0;
+ SampleStats<double> pixels;
+ SampleStats<double> resolution;
+ EventRateCounter frames;
+ void AddFrameInfo(const VideoFrameBuffer& frame, Timestamp at_time);
+ void AddStats(const VideoFramesStats& other);
+};
+
+struct VideoQualityStats {
+ int lost_count = 0;
+ int freeze_count = 0;
+ VideoFramesStats capture;
+ VideoFramesStats render;
+ // Time from frame was captured on device to time frame was delivered from
+ // decoder.
+ SampleStats<TimeDelta> capture_to_decoded_delay;
+ // Time from frame was captured on device to time frame was displayed on
+ // device.
+ SampleStats<TimeDelta> end_to_end_delay;
+ // PSNR for delivered frames. Note that this might go up for a worse
+ // connection due to frame dropping.
+ SampleStats<double> psnr;
+ // PSNR for all frames, dropped or lost frames are compared to the last
+ // successfully delivered frame
+ SampleStats<double> psnr_with_freeze;
+ // Frames skipped between two nearest.
+ SampleStats<double> skipped_between_rendered;
+ // In the next 2 metrics freeze is a pause that is longer, than maximum:
+ // 1. 150ms
+ // 2. 3 * average time between two sequential frames.
+ // Item 1 will cover high fps video and is a duration, that is noticeable by
+ // human eye. Item 2 will cover low fps video like screen sharing.
+ SampleStats<TimeDelta> freeze_duration;
+ // Mean time between one freeze end and next freeze start.
+ SampleStats<TimeDelta> time_between_freezes;
+ void AddStats(const VideoQualityStats& other);
+};
+
+struct CollectedCallStats {
+ SampleStats<DataRate> target_rate;
+ SampleStats<TimeDelta> pacer_delay;
+ SampleStats<TimeDelta> round_trip_time;
+ SampleStats<double> memory_usage;
+};
+
+struct CollectedAudioReceiveStats {
+ SampleStats<double> expand_rate;
+ SampleStats<double> accelerate_rate;
+ SampleStats<TimeDelta> jitter_buffer;
+};
+struct CollectedVideoSendStats {
+ SampleStats<double> encode_frame_rate;
+ SampleStats<TimeDelta> encode_time;
+ SampleStats<double> encode_usage;
+ SampleStats<DataRate> media_bitrate;
+ SampleStats<DataRate> fec_bitrate;
+};
+struct CollectedVideoReceiveStats {
+ SampleStats<TimeDelta> decode_time;
+ SampleStats<TimeDelta> decode_time_max;
+ SampleStats<double> decode_pixels;
+ SampleStats<double> resolution;
+};
+
+} // namespace test
+} // namespace webrtc
+#endif // TEST_SCENARIO_PERFORMANCE_STATS_H_
diff --git a/third_party/libwebrtc/test/scenario/performance_stats_unittest.cc b/third_party/libwebrtc/test/scenario/performance_stats_unittest.cc
new file mode 100644
index 0000000000..8d87c87745
--- /dev/null
+++ b/third_party/libwebrtc/test/scenario/performance_stats_unittest.cc
@@ -0,0 +1,27 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "test/scenario/performance_stats.h"
+
+#include "test/gtest.h"
+
+namespace webrtc {
+namespace test {
+
+TEST(EventRateCounter, ReturnsCorrectTotalDuration) {
+ EventRateCounter event_rate_counter;
+ EXPECT_EQ(event_rate_counter.TotalDuration(), TimeDelta::Zero());
+ event_rate_counter.AddEvent(Timestamp::Seconds(1));
+ EXPECT_EQ(event_rate_counter.TotalDuration(), TimeDelta::Zero());
+ event_rate_counter.AddEvent(Timestamp::Seconds(2));
+ EXPECT_EQ(event_rate_counter.TotalDuration(), TimeDelta::Seconds(1));
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/scenario/probing_test.cc b/third_party/libwebrtc/test/scenario/probing_test.cc
new file mode 100644
index 0000000000..86653ced9b
--- /dev/null
+++ b/third_party/libwebrtc/test/scenario/probing_test.cc
@@ -0,0 +1,135 @@
+/*
+ * Copyright 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "test/gtest.h"
+#include "test/scenario/scenario.h"
+
+namespace webrtc {
+namespace test {
+
+TEST(ProbingTest, InitialProbingRampsUpTargetRateWhenNetworkIsGood) {
+ Scenario s;
+ NetworkSimulationConfig good_network;
+ good_network.bandwidth = DataRate::KilobitsPerSec(2000);
+
+ VideoStreamConfig video_config;
+ video_config.encoder.codec =
+ VideoStreamConfig::Encoder::Codec::kVideoCodecVP8;
+ CallClientConfig send_config;
+ auto* caller = s.CreateClient("caller", send_config);
+ auto* callee = s.CreateClient("callee", CallClientConfig());
+ auto route =
+ s.CreateRoutes(caller, {s.CreateSimulationNode(good_network)}, callee,
+ {s.CreateSimulationNode(NetworkSimulationConfig())});
+ s.CreateVideoStream(route->forward(), video_config);
+
+ s.RunFor(TimeDelta::Seconds(1));
+ EXPECT_GE(DataRate::BitsPerSec(caller->GetStats().send_bandwidth_bps),
+ 3 * send_config.transport.rates.start_rate);
+}
+
+TEST(ProbingTest, MidCallProbingRampupTriggeredByUpdatedBitrateConstraints) {
+ Scenario s;
+
+ const DataRate kStartRate = DataRate::KilobitsPerSec(300);
+ const DataRate kConstrainedRate = DataRate::KilobitsPerSec(100);
+ const DataRate kHighRate = DataRate::KilobitsPerSec(1500);
+
+ VideoStreamConfig video_config;
+ video_config.encoder.codec =
+ VideoStreamConfig::Encoder::Codec::kVideoCodecVP8;
+ CallClientConfig send_call_config;
+ send_call_config.transport.rates.start_rate = kStartRate;
+ send_call_config.transport.rates.max_rate = kHighRate * 2;
+ auto* caller = s.CreateClient("caller", send_call_config);
+ auto* callee = s.CreateClient("callee", CallClientConfig());
+ auto route = s.CreateRoutes(
+ caller, {s.CreateSimulationNode(NetworkSimulationConfig())}, callee,
+ {s.CreateSimulationNode(NetworkSimulationConfig())});
+ s.CreateVideoStream(route->forward(), video_config);
+
+ // Wait until initial probing rampup is done and then set a low max bitrate.
+ s.RunFor(TimeDelta::Seconds(1));
+ EXPECT_GE(DataRate::BitsPerSec(caller->GetStats().send_bandwidth_bps),
+ 5 * send_call_config.transport.rates.start_rate);
+ BitrateConstraints bitrate_config;
+ bitrate_config.max_bitrate_bps = kConstrainedRate.bps();
+ caller->UpdateBitrateConstraints(bitrate_config);
+
+ // Wait until the low send bitrate has taken effect, and then set a much
+ // higher max bitrate.
+ s.RunFor(TimeDelta::Seconds(2));
+ EXPECT_LT(DataRate::BitsPerSec(caller->GetStats().send_bandwidth_bps),
+ kConstrainedRate * 1.1);
+ bitrate_config.max_bitrate_bps = 2 * kHighRate.bps();
+ caller->UpdateBitrateConstraints(bitrate_config);
+
+ // Check that the max send bitrate is reached quicker than would be possible
+ // with simple AIMD rate control.
+ s.RunFor(TimeDelta::Seconds(1));
+ EXPECT_GE(DataRate::BitsPerSec(caller->GetStats().send_bandwidth_bps),
+ kHighRate);
+}
+
+TEST(ProbingTest, ProbesRampsUpWhenVideoEncoderConfigChanges) {
+ Scenario s;
+ const DataRate kStartRate = DataRate::KilobitsPerSec(50);
+ const DataRate kHdRate = DataRate::KilobitsPerSec(3250);
+
+ // Set up 3-layer simulcast.
+ VideoStreamConfig video_config;
+ video_config.encoder.codec =
+ VideoStreamConfig::Encoder::Codec::kVideoCodecVP8;
+ video_config.encoder.simulcast_streams = {webrtc::ScalabilityMode::kL1T3,
+ webrtc::ScalabilityMode::kL1T3,
+ webrtc::ScalabilityMode::kL1T3};
+ video_config.source.generator.width = 1280;
+ video_config.source.generator.height = 720;
+
+ CallClientConfig send_call_config;
+ send_call_config.transport.rates.start_rate = kStartRate;
+ send_call_config.transport.rates.max_rate = kHdRate * 2;
+ auto* caller = s.CreateClient("caller", send_call_config);
+ auto* callee = s.CreateClient("callee", CallClientConfig());
+ auto send_net =
+ s.CreateMutableSimulationNode([&](NetworkSimulationConfig* c) {
+ c->bandwidth = DataRate::KilobitsPerSec(200);
+ });
+ auto route =
+ s.CreateRoutes(caller, {send_net->node()}, callee,
+ {s.CreateSimulationNode(NetworkSimulationConfig())});
+ auto* video_stream = s.CreateVideoStream(route->forward(), video_config);
+
+ // Only QVGA enabled initially. Run until initial probing is done and BWE
+ // has settled.
+ video_stream->send()->UpdateActiveLayers({true, false, false});
+ s.RunFor(TimeDelta::Seconds(2));
+
+ // Remove network constraints and run for a while more, BWE should be much
+ // less than required HD rate.
+ send_net->UpdateConfig([&](NetworkSimulationConfig* c) {
+ c->bandwidth = DataRate::PlusInfinity();
+ });
+ s.RunFor(TimeDelta::Seconds(2));
+
+ DataRate bandwidth =
+ DataRate::BitsPerSec(caller->GetStats().send_bandwidth_bps);
+ EXPECT_LT(bandwidth, kHdRate / 4);
+
+ // Enable all layers, triggering a probe.
+ video_stream->send()->UpdateActiveLayers({true, true, true});
+
+ // Run for a short while and verify BWE has ramped up fast.
+ s.RunFor(TimeDelta::Seconds(2));
+ EXPECT_GT(DataRate::BitsPerSec(caller->GetStats().send_bandwidth_bps),
+ kHdRate);
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/scenario/scenario.cc b/third_party/libwebrtc/test/scenario/scenario.cc
new file mode 100644
index 0000000000..93377120a1
--- /dev/null
+++ b/third_party/libwebrtc/test/scenario/scenario.cc
@@ -0,0 +1,355 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "test/scenario/scenario.h"
+
+#include <algorithm>
+#include <memory>
+
+#include "absl/flags/flag.h"
+#include "absl/flags/parse.h"
+#include "absl/strings/string_view.h"
+#include "api/audio_codecs/builtin_audio_decoder_factory.h"
+#include "api/audio_codecs/builtin_audio_encoder_factory.h"
+#include "rtc_base/socket_address.h"
+#include "test/logging/file_log_writer.h"
+#include "test/network/network_emulation.h"
+#include "test/scenario/video_stream.h"
+#include "test/testsupport/file_utils.h"
+
+ABSL_FLAG(bool, scenario_logs, false, "Save logs from scenario framework.");
+ABSL_FLAG(std::string,
+ scenario_logs_root,
+ "",
+ "Output root path, based on project root if unset.");
+
+namespace webrtc {
+namespace test {
+namespace {
+
+std::unique_ptr<FileLogWriterFactory> GetScenarioLogManager(
+ absl::string_view file_name) {
+ if (absl::GetFlag(FLAGS_scenario_logs) && !file_name.empty()) {
+ std::string output_root = absl::GetFlag(FLAGS_scenario_logs_root);
+ if (output_root.empty())
+ output_root = OutputPath() + "output_data/";
+
+ auto base_filename = output_root + std::string(file_name) + ".";
+ RTC_LOG(LS_INFO) << "Saving scenario logs to: " << base_filename;
+ return std::make_unique<FileLogWriterFactory>(base_filename);
+ }
+ return nullptr;
+}
+} // namespace
+
+Scenario::Scenario()
+ : Scenario(std::unique_ptr<LogWriterFactoryInterface>(),
+ /*real_time=*/false) {}
+
+Scenario::Scenario(const testing::TestInfo* test_info)
+ : Scenario(std::string(test_info->test_suite_name()) + "/" +
+ test_info->name()) {}
+
+Scenario::Scenario(absl::string_view file_name)
+ : Scenario(file_name, /*real_time=*/false) {}
+
+Scenario::Scenario(absl::string_view file_name, bool real_time)
+ : Scenario(GetScenarioLogManager(file_name), real_time) {}
+
+Scenario::Scenario(
+ std::unique_ptr<LogWriterFactoryInterface> log_writer_factory,
+ bool real_time)
+ : log_writer_factory_(std::move(log_writer_factory)),
+ network_manager_(real_time ? TimeMode::kRealTime : TimeMode::kSimulated,
+ EmulatedNetworkStatsGatheringMode::kDefault),
+ clock_(network_manager_.time_controller()->GetClock()),
+ audio_decoder_factory_(CreateBuiltinAudioDecoderFactory()),
+ audio_encoder_factory_(CreateBuiltinAudioEncoderFactory()),
+ task_queue_(network_manager_.time_controller()
+ ->GetTaskQueueFactory()
+ ->CreateTaskQueue("Scenario",
+ TaskQueueFactory::Priority::NORMAL)) {}
+
+Scenario::~Scenario() {
+ if (start_time_.IsFinite())
+ Stop();
+ for (auto& call_client : clients_) {
+ call_client->transport_->Disconnect();
+ call_client->UnBind();
+ }
+}
+
+ColumnPrinter Scenario::TimePrinter() {
+ return ColumnPrinter::Lambda(
+ "time",
+ [this](rtc::SimpleStringBuilder& sb) {
+ sb.AppendFormat("%.3lf", Now().seconds<double>());
+ },
+ 32);
+}
+
+StatesPrinter* Scenario::CreatePrinter(absl::string_view name,
+ TimeDelta interval,
+ std::vector<ColumnPrinter> printers) {
+ std::vector<ColumnPrinter> all_printers{TimePrinter()};
+ for (auto& printer : printers)
+ all_printers.push_back(printer);
+ StatesPrinter* printer = new StatesPrinter(GetLogWriter(name), all_printers);
+ printers_.emplace_back(printer);
+ printer->PrintHeaders();
+ if (interval.IsFinite())
+ Every(interval, [printer] { printer->PrintRow(); });
+ return printer;
+}
+
+CallClient* Scenario::CreateClient(absl::string_view name,
+ CallClientConfig config) {
+ CallClient* client = new CallClient(network_manager_.time_controller(),
+ GetLogWriterFactory(name), config);
+ if (config.transport.state_log_interval.IsFinite()) {
+ Every(config.transport.state_log_interval, [this, client]() {
+ client->network_controller_factory_.LogCongestionControllerStats(Now());
+ });
+ }
+ clients_.emplace_back(client);
+ return client;
+}
+
+CallClient* Scenario::CreateClient(
+ absl::string_view name,
+ std::function<void(CallClientConfig*)> config_modifier) {
+ CallClientConfig config;
+ config_modifier(&config);
+ return CreateClient(name, config);
+}
+
+CallClientPair* Scenario::CreateRoutes(
+ CallClient* first,
+ std::vector<EmulatedNetworkNode*> send_link,
+ CallClient* second,
+ std::vector<EmulatedNetworkNode*> return_link) {
+ return CreateRoutes(first, send_link,
+ DataSize::Bytes(PacketOverhead::kDefault), second,
+ return_link, DataSize::Bytes(PacketOverhead::kDefault));
+}
+
+CallClientPair* Scenario::CreateRoutes(
+ CallClient* first,
+ std::vector<EmulatedNetworkNode*> send_link,
+ DataSize first_overhead,
+ CallClient* second,
+ std::vector<EmulatedNetworkNode*> return_link,
+ DataSize second_overhead) {
+ CallClientPair* client_pair = new CallClientPair(first, second);
+ ChangeRoute(client_pair->forward(), send_link, first_overhead);
+ ChangeRoute(client_pair->reverse(), return_link, second_overhead);
+ client_pairs_.emplace_back(client_pair);
+ return client_pair;
+}
+
+void Scenario::ChangeRoute(std::pair<CallClient*, CallClient*> clients,
+ std::vector<EmulatedNetworkNode*> over_nodes) {
+ ChangeRoute(clients, over_nodes, DataSize::Bytes(PacketOverhead::kDefault));
+}
+
+void Scenario::ChangeRoute(std::pair<CallClient*, CallClient*> clients,
+ std::vector<EmulatedNetworkNode*> over_nodes,
+ DataSize overhead) {
+ EmulatedRoute* route = network_manager_.CreateRoute(over_nodes);
+ uint16_t port = clients.second->Bind(route->to);
+ auto addr = rtc::SocketAddress(route->to->GetPeerLocalAddress(), port);
+ clients.first->transport_->Connect(route->from, addr, overhead);
+}
+
+EmulatedNetworkNode* Scenario::CreateSimulationNode(
+ std::function<void(NetworkSimulationConfig*)> config_modifier) {
+ NetworkSimulationConfig config;
+ config_modifier(&config);
+ return CreateSimulationNode(config);
+}
+
+EmulatedNetworkNode* Scenario::CreateSimulationNode(
+ NetworkSimulationConfig config) {
+ return network_manager_.CreateEmulatedNode(
+ SimulationNode::CreateBehavior(config));
+}
+
+SimulationNode* Scenario::CreateMutableSimulationNode(
+ std::function<void(NetworkSimulationConfig*)> config_modifier) {
+ NetworkSimulationConfig config;
+ config_modifier(&config);
+ return CreateMutableSimulationNode(config);
+}
+
+SimulationNode* Scenario::CreateMutableSimulationNode(
+ NetworkSimulationConfig config) {
+ std::unique_ptr<SimulatedNetwork> behavior =
+ SimulationNode::CreateBehavior(config);
+ SimulatedNetwork* behavior_ptr = behavior.get();
+ auto* emulated_node =
+ network_manager_.CreateEmulatedNode(std::move(behavior));
+ simulation_nodes_.emplace_back(
+ new SimulationNode(config, behavior_ptr, emulated_node));
+ return simulation_nodes_.back().get();
+}
+
+void Scenario::TriggerPacketBurst(std::vector<EmulatedNetworkNode*> over_nodes,
+ size_t num_packets,
+ size_t packet_size) {
+ network_manager_.CreateCrossTrafficRoute(over_nodes)
+ ->TriggerPacketBurst(num_packets, packet_size);
+}
+
+void Scenario::NetworkDelayedAction(
+ std::vector<EmulatedNetworkNode*> over_nodes,
+ size_t packet_size,
+ std::function<void()> action) {
+ network_manager_.CreateCrossTrafficRoute(over_nodes)
+ ->NetworkDelayedAction(packet_size, action);
+}
+
+VideoStreamPair* Scenario::CreateVideoStream(
+ std::pair<CallClient*, CallClient*> clients,
+ std::function<void(VideoStreamConfig*)> config_modifier) {
+ VideoStreamConfig config;
+ config_modifier(&config);
+ return CreateVideoStream(clients, config);
+}
+
+VideoStreamPair* Scenario::CreateVideoStream(
+ std::pair<CallClient*, CallClient*> clients,
+ VideoStreamConfig config) {
+ std::vector<RtpExtension> extensions = GetVideoRtpExtensions(config);
+ clients.first->SetVideoReceiveRtpHeaderExtensions(extensions);
+ clients.second->SetVideoReceiveRtpHeaderExtensions(extensions);
+ video_streams_.emplace_back(
+ new VideoStreamPair(clients.first, clients.second, config));
+ return video_streams_.back().get();
+}
+
+AudioStreamPair* Scenario::CreateAudioStream(
+ std::pair<CallClient*, CallClient*> clients,
+ std::function<void(AudioStreamConfig*)> config_modifier) {
+ AudioStreamConfig config;
+ config_modifier(&config);
+ return CreateAudioStream(clients, config);
+}
+
+AudioStreamPair* Scenario::CreateAudioStream(
+ std::pair<CallClient*, CallClient*> clients,
+ AudioStreamConfig config) {
+ std::vector<RtpExtension> extensions = GetAudioRtpExtensions(config);
+ clients.first->SetAudioReceiveRtpHeaderExtensions(extensions);
+ clients.second->SetAudioReceiveRtpHeaderExtensions(extensions);
+ audio_streams_.emplace_back(
+ new AudioStreamPair(clients.first, audio_encoder_factory_, clients.second,
+ audio_decoder_factory_, config));
+ return audio_streams_.back().get();
+}
+
+void Scenario::Every(TimeDelta interval,
+ absl::AnyInvocable<void(TimeDelta)> function) {
+ RepeatingTaskHandle::DelayedStart(
+ task_queue_.get(), interval,
+ [interval, function = std::move(function)]() mutable {
+ function(interval);
+ return interval;
+ });
+}
+
+void Scenario::Every(TimeDelta interval, absl::AnyInvocable<void()> function) {
+ RepeatingTaskHandle::DelayedStart(
+ task_queue_.get(), interval,
+ [interval, function = std::move(function)]() mutable {
+ function();
+ return interval;
+ });
+}
+
+void Scenario::Post(absl::AnyInvocable<void() &&> function) {
+ task_queue_->PostTask(std::move(function));
+}
+
+void Scenario::At(TimeDelta offset, absl::AnyInvocable<void() &&> function) {
+ RTC_DCHECK_GT(offset, TimeSinceStart());
+ task_queue_->PostDelayedTask(std::move(function), TimeUntilTarget(offset));
+}
+
+void Scenario::RunFor(TimeDelta duration) {
+ if (start_time_.IsInfinite())
+ Start();
+ network_manager_.time_controller()->AdvanceTime(duration);
+}
+
+void Scenario::RunUntil(TimeDelta target_time_since_start) {
+ RunFor(TimeUntilTarget(target_time_since_start));
+}
+
+void Scenario::RunUntil(TimeDelta target_time_since_start,
+ TimeDelta check_interval,
+ std::function<bool()> exit_function) {
+ if (start_time_.IsInfinite())
+ Start();
+ while (check_interval >= TimeUntilTarget(target_time_since_start)) {
+ network_manager_.time_controller()->AdvanceTime(check_interval);
+ if (exit_function())
+ return;
+ }
+ network_manager_.time_controller()->AdvanceTime(
+ TimeUntilTarget(target_time_since_start));
+}
+
+void Scenario::Start() {
+ start_time_ = clock_->CurrentTime();
+ for (auto& stream_pair : video_streams_)
+ stream_pair->receive()->Start();
+ for (auto& stream_pair : audio_streams_)
+ stream_pair->receive()->Start();
+ for (auto& stream_pair : video_streams_) {
+ if (stream_pair->config_.autostart) {
+ stream_pair->send()->Start();
+ }
+ }
+ for (auto& stream_pair : audio_streams_) {
+ if (stream_pair->config_.autostart) {
+ stream_pair->send()->Start();
+ }
+ }
+}
+
+void Scenario::Stop() {
+ RTC_DCHECK(start_time_.IsFinite());
+ for (auto& stream_pair : video_streams_) {
+ stream_pair->send()->Stop();
+ }
+ for (auto& stream_pair : audio_streams_)
+ stream_pair->send()->Stop();
+ for (auto& stream_pair : video_streams_)
+ stream_pair->receive()->Stop();
+ for (auto& stream_pair : audio_streams_)
+ stream_pair->receive()->Stop();
+ start_time_ = Timestamp::PlusInfinity();
+}
+
+Timestamp Scenario::Now() {
+ return clock_->CurrentTime();
+}
+
+TimeDelta Scenario::TimeSinceStart() {
+ if (start_time_.IsInfinite())
+ return TimeDelta::Zero();
+ return Now() - start_time_;
+}
+
+TimeDelta Scenario::TimeUntilTarget(TimeDelta target_time_offset) {
+ return target_time_offset - TimeSinceStart();
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/scenario/scenario.h b/third_party/libwebrtc/test/scenario/scenario.h
new file mode 100644
index 0000000000..cad9210002
--- /dev/null
+++ b/third_party/libwebrtc/test/scenario/scenario.h
@@ -0,0 +1,189 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef TEST_SCENARIO_SCENARIO_H_
+#define TEST_SCENARIO_SCENARIO_H_
+#include <memory>
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "absl/functional/any_invocable.h"
+#include "absl/strings/string_view.h"
+#include "api/task_queue/task_queue_base.h"
+#include "api/test/time_controller.h"
+#include "rtc_base/fake_clock.h"
+#include "rtc_base/task_utils/repeating_task.h"
+#include "test/gtest.h"
+#include "test/logging/log_writer.h"
+#include "test/network/network_emulation_manager.h"
+#include "test/scenario/audio_stream.h"
+#include "test/scenario/call_client.h"
+#include "test/scenario/column_printer.h"
+#include "test/scenario/network_node.h"
+#include "test/scenario/scenario_config.h"
+#include "test/scenario/video_stream.h"
+
+namespace webrtc {
+namespace test {
+// Scenario is a class owning everything for a test scenario. It creates and
+// holds network nodes, call clients and media streams. It also provides methods
+// for changing behavior at runtime. Since it always keeps ownership of the
+// created components, it generally returns non-owning pointers. It maintains
+// the life of its objects until it is destroyed.
+// For methods accepting configuration structs, a modifier function interface is
+// generally provided. This allows simple partial overriding of the default
+// configuration.
+class Scenario {
+ public:
+ Scenario();
+ explicit Scenario(const testing::TestInfo* test_info);
+ explicit Scenario(absl::string_view file_name);
+ Scenario(absl::string_view file_name, bool real_time);
+ Scenario(std::unique_ptr<LogWriterFactoryInterface> log_writer_manager,
+ bool real_time);
+
+ ~Scenario();
+
+ Scenario(const Scenario&) = delete;
+ Scenario& operator=(const Scenario&) = delete;
+
+ NetworkEmulationManagerImpl* net() { return &network_manager_; }
+
+ EmulatedNetworkNode* CreateSimulationNode(NetworkSimulationConfig config);
+ EmulatedNetworkNode* CreateSimulationNode(
+ std::function<void(NetworkSimulationConfig*)> config_modifier);
+
+ SimulationNode* CreateMutableSimulationNode(NetworkSimulationConfig config);
+ SimulationNode* CreateMutableSimulationNode(
+ std::function<void(NetworkSimulationConfig*)> config_modifier);
+
+ CallClient* CreateClient(absl::string_view name, CallClientConfig config);
+ CallClient* CreateClient(
+ absl::string_view name,
+ std::function<void(CallClientConfig*)> config_modifier);
+
+ CallClientPair* CreateRoutes(CallClient* first,
+ std::vector<EmulatedNetworkNode*> send_link,
+ CallClient* second,
+ std::vector<EmulatedNetworkNode*> return_link);
+
+ CallClientPair* CreateRoutes(CallClient* first,
+ std::vector<EmulatedNetworkNode*> send_link,
+ DataSize first_overhead,
+ CallClient* second,
+ std::vector<EmulatedNetworkNode*> return_link,
+ DataSize second_overhead);
+
+ void ChangeRoute(std::pair<CallClient*, CallClient*> clients,
+ std::vector<EmulatedNetworkNode*> over_nodes);
+
+ void ChangeRoute(std::pair<CallClient*, CallClient*> clients,
+ std::vector<EmulatedNetworkNode*> over_nodes,
+ DataSize overhead);
+
+ VideoStreamPair* CreateVideoStream(
+ std::pair<CallClient*, CallClient*> clients,
+ std::function<void(VideoStreamConfig*)> config_modifier);
+ VideoStreamPair* CreateVideoStream(
+ std::pair<CallClient*, CallClient*> clients,
+ VideoStreamConfig config);
+
+ AudioStreamPair* CreateAudioStream(
+ std::pair<CallClient*, CallClient*> clients,
+ std::function<void(AudioStreamConfig*)> config_modifier);
+ AudioStreamPair* CreateAudioStream(
+ std::pair<CallClient*, CallClient*> clients,
+ AudioStreamConfig config);
+
+ // Runs the provided function with a fixed interval. For real time tests,
+ // `function` starts being called after `interval` from the call to Every().
+ void Every(TimeDelta interval, absl::AnyInvocable<void(TimeDelta)> function);
+ void Every(TimeDelta interval, absl::AnyInvocable<void()> function);
+
+ // Runs the provided function on the internal task queue. This ensure that
+ // it's run on the main thread for simulated time tests.
+ void Post(absl::AnyInvocable<void() &&> function);
+
+ // Runs the provided function after given duration has passed. For real time
+ // tests, `function` is called after `target_time_since_start` from the call
+ // to Every().
+ void At(TimeDelta offset, absl::AnyInvocable<void() &&> function);
+
+ // Sends a packet over the nodes and runs `action` when it has been delivered.
+ void NetworkDelayedAction(std::vector<EmulatedNetworkNode*> over_nodes,
+ size_t packet_size,
+ std::function<void()> action);
+
+ // Runs the scenario for the given time.
+ void RunFor(TimeDelta duration);
+ // Runs the scenario until `target_time_since_start`.
+ void RunUntil(TimeDelta target_time_since_start);
+ // Runs the scenario until `target_time_since_start` or `exit_function`
+ // returns true. `exit_function` is polled after each `check_interval` has
+ // passed.
+ void RunUntil(TimeDelta target_time_since_start,
+ TimeDelta check_interval,
+ std::function<bool()> exit_function);
+ void Start();
+ void Stop();
+
+ // Triggers sending of dummy packets over the given nodes.
+ void TriggerPacketBurst(std::vector<EmulatedNetworkNode*> over_nodes,
+ size_t num_packets,
+ size_t packet_size);
+
+ ColumnPrinter TimePrinter();
+ StatesPrinter* CreatePrinter(absl::string_view name,
+ TimeDelta interval,
+ std::vector<ColumnPrinter> printers);
+
+ // Returns the current time.
+ Timestamp Now();
+ // Return the duration of the current session so far.
+ TimeDelta TimeSinceStart();
+
+ std::unique_ptr<RtcEventLogOutput> GetLogWriter(absl::string_view name) {
+ if (!log_writer_factory_ || name.empty())
+ return nullptr;
+ return log_writer_factory_->Create(name);
+ }
+ std::unique_ptr<LogWriterFactoryInterface> GetLogWriterFactory(
+ absl::string_view name) {
+ if (!log_writer_factory_ || name.empty())
+ return nullptr;
+ return std::make_unique<LogWriterFactoryAddPrefix>(
+ log_writer_factory_.get(), name);
+ }
+
+ private:
+ TimeDelta TimeUntilTarget(TimeDelta target_time_offset);
+
+ const std::unique_ptr<LogWriterFactoryInterface> log_writer_factory_;
+ NetworkEmulationManagerImpl network_manager_;
+ Clock* clock_;
+
+ std::vector<std::unique_ptr<CallClient>> clients_;
+ std::vector<std::unique_ptr<CallClientPair>> client_pairs_;
+ std::vector<std::unique_ptr<VideoStreamPair>> video_streams_;
+ std::vector<std::unique_ptr<AudioStreamPair>> audio_streams_;
+ std::vector<std::unique_ptr<SimulationNode>> simulation_nodes_;
+ std::vector<std::unique_ptr<StatesPrinter>> printers_;
+
+ rtc::scoped_refptr<AudioDecoderFactory> audio_decoder_factory_;
+ rtc::scoped_refptr<AudioEncoderFactory> audio_encoder_factory_;
+
+ Timestamp start_time_ = Timestamp::PlusInfinity();
+ // Defined last so it's destroyed first.
+ std::unique_ptr<TaskQueueBase, TaskQueueDeleter> task_queue_;
+};
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_SCENARIO_SCENARIO_H_
diff --git a/third_party/libwebrtc/test/scenario/scenario_config.cc b/third_party/libwebrtc/test/scenario/scenario_config.cc
new file mode 100644
index 0000000000..3f8a70a162
--- /dev/null
+++ b/third_party/libwebrtc/test/scenario/scenario_config.cc
@@ -0,0 +1,47 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "test/scenario/scenario_config.h"
+
+namespace webrtc {
+namespace test {
+
+TransportControllerConfig::Rates::Rates() = default;
+TransportControllerConfig::Rates::Rates(
+ const TransportControllerConfig::Rates&) = default;
+TransportControllerConfig::Rates::~Rates() = default;
+
+PacketStreamConfig::PacketStreamConfig() = default;
+PacketStreamConfig::PacketStreamConfig(const PacketStreamConfig&) = default;
+PacketStreamConfig::~PacketStreamConfig() = default;
+
+VideoStreamConfig::Encoder::Encoder() = default;
+VideoStreamConfig::Encoder::Encoder(const VideoStreamConfig::Encoder&) =
+ default;
+VideoStreamConfig::Encoder::~Encoder() = default;
+
+VideoStreamConfig::Stream::Stream() = default;
+VideoStreamConfig::Stream::Stream(const VideoStreamConfig::Stream&) = default;
+VideoStreamConfig::Stream::~Stream() = default;
+
+AudioStreamConfig::AudioStreamConfig() = default;
+AudioStreamConfig::AudioStreamConfig(const AudioStreamConfig&) = default;
+AudioStreamConfig::~AudioStreamConfig() = default;
+
+AudioStreamConfig::Encoder::Encoder() = default;
+AudioStreamConfig::Encoder::Encoder(const AudioStreamConfig::Encoder&) =
+ default;
+AudioStreamConfig::Encoder::~Encoder() = default;
+
+AudioStreamConfig::Stream::Stream() = default;
+AudioStreamConfig::Stream::Stream(const AudioStreamConfig::Stream&) = default;
+AudioStreamConfig::Stream::~Stream() = default;
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/scenario/scenario_config.h b/third_party/libwebrtc/test/scenario/scenario_config.h
new file mode 100644
index 0000000000..9ce99401d7
--- /dev/null
+++ b/third_party/libwebrtc/test/scenario/scenario_config.h
@@ -0,0 +1,231 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef TEST_SCENARIO_SCENARIO_CONFIG_H_
+#define TEST_SCENARIO_SCENARIO_CONFIG_H_
+
+#include <stddef.h>
+
+#include <string>
+
+#include "absl/types/optional.h"
+#include "api/fec_controller.h"
+#include "api/rtp_parameters.h"
+#include "api/test/frame_generator_interface.h"
+#include "api/transport/network_control.h"
+#include "api/units/data_rate.h"
+#include "api/units/data_size.h"
+#include "api/units/time_delta.h"
+#include "api/video/video_codec_type.h"
+#include "api/video_codecs/scalability_mode.h"
+#include "test/scenario/performance_stats.h"
+
+namespace webrtc {
+namespace test {
+struct PacketOverhead {
+ static constexpr size_t kSrtp = 10;
+ static constexpr size_t kStun = 4;
+ // TURN messages can be sent either with or without an establieshed channel.
+ // In the latter case, a TURN Send/Data Indication is sent which has
+ // significantly more overhead.
+ static constexpr size_t kTurnChannelMessage = 4;
+ static constexpr size_t kTurnIndicationMessage = 36;
+ static constexpr size_t kDefault = kSrtp;
+};
+struct TransportControllerConfig {
+ struct Rates {
+ Rates();
+ Rates(const Rates&);
+ ~Rates();
+ DataRate min_rate = DataRate::KilobitsPerSec(30);
+ DataRate max_rate = DataRate::KilobitsPerSec(3000);
+ DataRate start_rate = DataRate::KilobitsPerSec(300);
+ } rates;
+ NetworkControllerFactoryInterface* cc_factory = nullptr;
+ TimeDelta state_log_interval = TimeDelta::Millis(100);
+};
+
+struct CallClientConfig {
+ TransportControllerConfig transport;
+ // Allows the pacer to send out multiple packets in a burst.
+ // The number of bites that can be sent in one burst is pacer_burst_interval *
+ // current bwe. 40ms is the default Chrome setting.
+ TimeDelta pacer_burst_interval = TimeDelta::Millis(40);
+ const FieldTrialsView* field_trials = nullptr;
+};
+
+struct PacketStreamConfig {
+ PacketStreamConfig();
+ PacketStreamConfig(const PacketStreamConfig&);
+ ~PacketStreamConfig();
+ int frame_rate = 30;
+ DataRate max_data_rate = DataRate::Infinity();
+ DataSize max_packet_size = DataSize::Bytes(1400);
+ DataSize min_frame_size = DataSize::Bytes(100);
+ double keyframe_multiplier = 1;
+ DataSize packet_overhead = DataSize::Bytes(PacketOverhead::kDefault);
+};
+
+struct VideoStreamConfig {
+ bool autostart = true;
+ struct Source {
+ enum Capture {
+ kGenerator,
+ kVideoFile,
+ kGenerateSlides,
+ kImageSlides,
+ // Support for explicit frame triggers should be added here if needed.
+ } capture = Capture::kGenerator;
+ struct Slides {
+ TimeDelta change_interval = TimeDelta::Seconds(10);
+ struct Generator {
+ int width = 1600;
+ int height = 1200;
+ } generator;
+ struct Images {
+ struct Crop {
+ TimeDelta scroll_duration = TimeDelta::Seconds(0);
+ absl::optional<int> width;
+ absl::optional<int> height;
+ } crop;
+ int width = 1850;
+ int height = 1110;
+ std::vector<std::string> paths = {
+ "web_screenshot_1850_1110",
+ "presentation_1850_1110",
+ "photo_1850_1110",
+ "difficult_photo_1850_1110",
+ };
+ } images;
+ } slides;
+ struct Generator {
+ using PixelFormat = FrameGeneratorInterface::OutputType;
+ PixelFormat pixel_format = PixelFormat::kI420;
+ int width = 320;
+ int height = 180;
+ } generator;
+ struct VideoFile {
+ std::string name;
+ // Must be set to width and height of the source video file.
+ int width = 0;
+ int height = 0;
+ } video_file;
+ int framerate = 30;
+ } source;
+ struct Encoder {
+ Encoder();
+ Encoder(const Encoder&);
+ ~Encoder();
+ enum class ContentType {
+ kVideo,
+ kScreen,
+ } content_type = ContentType::kVideo;
+ enum Implementation { kFake, kSoftware, kHardware } implementation = kFake;
+ struct Fake {
+ DataRate max_rate = DataRate::Infinity();
+ } fake;
+
+ using Codec = VideoCodecType;
+ Codec codec = Codec::kVideoCodecGeneric;
+ absl::optional<DataRate> max_data_rate;
+ absl::optional<DataRate> min_data_rate;
+ absl::optional<int> max_framerate;
+ // Counted in frame count.
+ absl::optional<int> key_frame_interval = 3000;
+ bool frame_dropping = true;
+ struct SingleLayer {
+ bool denoising = true;
+ bool automatic_scaling = true;
+ } single;
+ std::vector<webrtc::ScalabilityMode> simulcast_streams = {
+ webrtc::ScalabilityMode::kL1T1};
+
+ DegradationPreference degradation_preference =
+ DegradationPreference::MAINTAIN_FRAMERATE;
+ bool suspend_below_min_bitrate = false;
+ } encoder;
+ struct Stream {
+ Stream();
+ Stream(const Stream&);
+ ~Stream();
+ bool abs_send_time = false;
+ bool packet_feedback = true;
+ bool use_rtx = true;
+ DataRate pad_to_rate = DataRate::Zero();
+ TimeDelta nack_history_time = TimeDelta::Millis(1000);
+ bool use_flexfec = false;
+ bool use_ulpfec = false;
+ FecControllerFactoryInterface* fec_controller_factory = nullptr;
+ } stream;
+ struct Rendering {
+ enum Type { kFake } type = kFake;
+ std::string sync_group;
+ } render;
+ struct Hooks {
+ std::vector<std::function<void(const VideoFramePair&)>> frame_pair_handlers;
+ } hooks;
+};
+
+struct AudioStreamConfig {
+ AudioStreamConfig();
+ AudioStreamConfig(const AudioStreamConfig&);
+ ~AudioStreamConfig();
+ bool autostart = true;
+ struct Source {
+ int channels = 1;
+ } source;
+ bool network_adaptation = false;
+ struct NetworkAdaptation {
+ struct FrameLength {
+ double min_packet_loss_for_decrease = 0;
+ double max_packet_loss_for_increase = 1;
+ DataRate min_rate_for_20_ms = DataRate::Zero();
+ DataRate max_rate_for_60_ms = DataRate::Infinity();
+ DataRate min_rate_for_60_ms = DataRate::Zero();
+ DataRate max_rate_for_120_ms = DataRate::Infinity();
+ } frame;
+ std::string binary_proto;
+ } adapt;
+ struct Encoder {
+ Encoder();
+ Encoder(const Encoder&);
+ ~Encoder();
+ bool allocate_bitrate = false;
+ bool enable_dtx = false;
+ DataRate fixed_rate = DataRate::KilobitsPerSec(32);
+ // Overrides fixed rate.
+ absl::optional<DataRate> min_rate;
+ absl::optional<DataRate> max_rate;
+ TimeDelta initial_frame_length = TimeDelta::Millis(20);
+ } encoder;
+ struct Stream {
+ Stream();
+ Stream(const Stream&);
+ ~Stream();
+ bool abs_send_time = true;
+ bool in_bandwidth_estimation = true;
+ } stream;
+ struct Rendering {
+ std::string sync_group;
+ } render;
+};
+
+// TODO(srte): Merge this with BuiltInNetworkBehaviorConfig.
+struct NetworkSimulationConfig {
+ DataRate bandwidth = DataRate::Infinity();
+ TimeDelta delay = TimeDelta::Zero();
+ TimeDelta delay_std_dev = TimeDelta::Zero();
+ double loss_rate = 0;
+ absl::optional<int> packet_queue_length_limit;
+ DataSize packet_overhead = DataSize::Zero();
+};
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_SCENARIO_SCENARIO_CONFIG_H_
diff --git a/third_party/libwebrtc/test/scenario/scenario_unittest.cc b/third_party/libwebrtc/test/scenario/scenario_unittest.cc
new file mode 100644
index 0000000000..6861151a2d
--- /dev/null
+++ b/third_party/libwebrtc/test/scenario/scenario_unittest.cc
@@ -0,0 +1,196 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "test/scenario/scenario.h"
+
+#include <atomic>
+
+#include "api/test/network_emulation/create_cross_traffic.h"
+#include "api/test/network_emulation/cross_traffic.h"
+#include "test/field_trial.h"
+#include "test/gtest.h"
+#include "test/logging/memory_log_writer.h"
+#include "test/scenario/stats_collection.h"
+
+namespace webrtc {
+namespace test {
+TEST(ScenarioTest, StartsAndStopsWithoutErrors) {
+ std::atomic<bool> packet_received(false);
+ std::atomic<bool> bitrate_changed(false);
+ Scenario s;
+ CallClientConfig call_client_config;
+ call_client_config.transport.rates.start_rate = DataRate::KilobitsPerSec(300);
+ auto* alice = s.CreateClient("alice", call_client_config);
+ auto* bob = s.CreateClient("bob", call_client_config);
+ NetworkSimulationConfig network_config;
+ auto alice_net = s.CreateSimulationNode(network_config);
+ auto bob_net = s.CreateSimulationNode(network_config);
+ auto route = s.CreateRoutes(alice, {alice_net}, bob, {bob_net});
+
+ VideoStreamConfig video_stream_config;
+ s.CreateVideoStream(route->forward(), video_stream_config);
+ s.CreateVideoStream(route->reverse(), video_stream_config);
+
+ AudioStreamConfig audio_stream_config;
+ audio_stream_config.encoder.min_rate = DataRate::KilobitsPerSec(6);
+ audio_stream_config.encoder.max_rate = DataRate::KilobitsPerSec(64);
+ audio_stream_config.encoder.allocate_bitrate = true;
+ audio_stream_config.stream.in_bandwidth_estimation = false;
+ s.CreateAudioStream(route->forward(), audio_stream_config);
+ s.CreateAudioStream(route->reverse(), audio_stream_config);
+
+ RandomWalkConfig cross_traffic_config;
+ s.net()->StartCrossTraffic(CreateRandomWalkCrossTraffic(
+ s.net()->CreateCrossTrafficRoute({alice_net}), cross_traffic_config));
+
+ s.NetworkDelayedAction({alice_net, bob_net}, 100,
+ [&packet_received] { packet_received = true; });
+ s.Every(TimeDelta::Millis(10), [alice, bob, &bitrate_changed] {
+ if (alice->GetStats().send_bandwidth_bps != 300000 &&
+ bob->GetStats().send_bandwidth_bps != 300000)
+ bitrate_changed = true;
+ });
+ s.RunUntil(TimeDelta::Seconds(2), TimeDelta::Millis(5),
+ [&bitrate_changed, &packet_received] {
+ return packet_received && bitrate_changed;
+ });
+ EXPECT_TRUE(packet_received);
+ EXPECT_TRUE(bitrate_changed);
+}
+namespace {
+void SetupVideoCall(Scenario& s, VideoQualityAnalyzer* analyzer) {
+ CallClientConfig call_config;
+ auto* alice = s.CreateClient("alice", call_config);
+ auto* bob = s.CreateClient("bob", call_config);
+ NetworkSimulationConfig network_config;
+ network_config.bandwidth = DataRate::KilobitsPerSec(1000);
+ network_config.delay = TimeDelta::Millis(50);
+ auto alice_net = s.CreateSimulationNode(network_config);
+ auto bob_net = s.CreateSimulationNode(network_config);
+ auto route = s.CreateRoutes(alice, {alice_net}, bob, {bob_net});
+ VideoStreamConfig video;
+ if (analyzer) {
+ video.source.capture = VideoStreamConfig::Source::Capture::kVideoFile;
+ video.source.video_file.name = "foreman_cif";
+ video.source.video_file.width = 352;
+ video.source.video_file.height = 288;
+ video.source.framerate = 30;
+ video.encoder.codec = VideoStreamConfig::Encoder::Codec::kVideoCodecVP8;
+ video.encoder.implementation =
+ VideoStreamConfig::Encoder::Implementation::kSoftware;
+ video.hooks.frame_pair_handlers = {analyzer->Handler()};
+ }
+ s.CreateVideoStream(route->forward(), video);
+ s.CreateAudioStream(route->forward(), AudioStreamConfig());
+}
+} // namespace
+
+TEST(ScenarioTest, SimTimeEncoding) {
+ VideoQualityAnalyzerConfig analyzer_config;
+ analyzer_config.psnr_coverage = 0.1;
+ VideoQualityAnalyzer analyzer(analyzer_config);
+ {
+ Scenario s("scenario/encode_sim", false);
+ SetupVideoCall(s, &analyzer);
+ s.RunFor(TimeDelta::Seconds(2));
+ }
+ // Regression tests based on previous runs.
+ EXPECT_EQ(analyzer.stats().lost_count, 0);
+ EXPECT_NEAR(analyzer.stats().psnr_with_freeze.Mean(), 38, 5);
+}
+
+// TODO(bugs.webrtc.org/10515): Remove this when performance has been improved.
+#if defined(WEBRTC_IOS) && defined(WEBRTC_ARCH_ARM64) && !defined(NDEBUG)
+#define MAYBE_RealTimeEncoding DISABLED_RealTimeEncoding
+#else
+#define MAYBE_RealTimeEncoding RealTimeEncoding
+#endif
+TEST(ScenarioTest, MAYBE_RealTimeEncoding) {
+ VideoQualityAnalyzerConfig analyzer_config;
+ analyzer_config.psnr_coverage = 0.1;
+ VideoQualityAnalyzer analyzer(analyzer_config);
+ {
+ Scenario s("scenario/encode_real", true);
+ SetupVideoCall(s, &analyzer);
+ s.RunFor(TimeDelta::Seconds(2));
+ }
+ // Regression tests based on previous runs.
+ EXPECT_LT(analyzer.stats().lost_count, 2);
+ // This far below expected but ensures that we get something.
+ EXPECT_GT(analyzer.stats().psnr_with_freeze.Mean(), 10);
+}
+
+TEST(ScenarioTest, SimTimeFakeing) {
+ Scenario s("scenario/encode_sim", false);
+ SetupVideoCall(s, nullptr);
+ s.RunFor(TimeDelta::Seconds(2));
+}
+
+TEST(ScenarioTest, WritesToRtcEventLog) {
+ MemoryLogStorage storage;
+ {
+ Scenario s(storage.CreateFactory(), false);
+ SetupVideoCall(s, nullptr);
+ s.RunFor(TimeDelta::Seconds(1));
+ }
+ auto logs = storage.logs();
+ // We expect that a rtc event log has been created and that it has some data.
+ EXPECT_GE(storage.logs().at("alice.rtc.dat").size(), 1u);
+}
+
+TEST(ScenarioTest,
+ RetransmitsVideoPacketsInAudioAndVideoCallWithSendSideBweAndLoss) {
+ // Make sure audio packets are included in transport feedback.
+ test::ScopedFieldTrials override_field_trials(
+ "WebRTC-Audio-ABWENoTWCC/Disabled/");
+
+ Scenario s;
+ CallClientConfig call_client_config;
+ call_client_config.transport.rates.start_rate = DataRate::KilobitsPerSec(300);
+ auto* alice = s.CreateClient("alice", call_client_config);
+ auto* bob = s.CreateClient("bob", call_client_config);
+ NetworkSimulationConfig network_config;
+ // Add some loss and delay.
+ network_config.delay = TimeDelta::Millis(200);
+ network_config.loss_rate = 0.05;
+ auto alice_net = s.CreateSimulationNode(network_config);
+ auto bob_net = s.CreateSimulationNode(network_config);
+ auto route = s.CreateRoutes(alice, {alice_net}, bob, {bob_net});
+
+ // First add an audio stream, then a video stream.
+ // Needed to make sure audio RTP module is selected first when sending
+ // transport feedback message.
+ AudioStreamConfig audio_stream_config;
+ audio_stream_config.encoder.min_rate = DataRate::KilobitsPerSec(6);
+ audio_stream_config.encoder.max_rate = DataRate::KilobitsPerSec(64);
+ audio_stream_config.encoder.allocate_bitrate = true;
+ audio_stream_config.stream.in_bandwidth_estimation = true;
+ s.CreateAudioStream(route->forward(), audio_stream_config);
+ s.CreateAudioStream(route->reverse(), audio_stream_config);
+
+ VideoStreamConfig video_stream_config;
+ auto video = s.CreateVideoStream(route->forward(), video_stream_config);
+ s.CreateVideoStream(route->reverse(), video_stream_config);
+
+ // Run for 10 seconds.
+ s.RunFor(TimeDelta::Seconds(10));
+ // Make sure retransmissions have happened.
+ int retransmit_packets = 0;
+
+ VideoSendStream::Stats stats;
+ alice->SendTask([&]() { stats = video->send()->GetStats(); });
+
+ for (const auto& substream : stats.substreams) {
+ retransmit_packets += substream.second.rtp_stats.retransmitted.packets;
+ }
+ EXPECT_GT(retransmit_packets, 0);
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/scenario/stats_collection.cc b/third_party/libwebrtc/test/scenario/stats_collection.cc
new file mode 100644
index 0000000000..e32696de71
--- /dev/null
+++ b/third_party/libwebrtc/test/scenario/stats_collection.cc
@@ -0,0 +1,190 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/scenario/stats_collection.h"
+
+#include "common_video/libyuv/include/webrtc_libyuv.h"
+#include "rtc_base/memory_usage.h"
+#include "rtc_base/thread.h"
+
+namespace webrtc {
+namespace test {
+
+VideoQualityAnalyzer::VideoQualityAnalyzer(
+ VideoQualityAnalyzerConfig config,
+ std::unique_ptr<RtcEventLogOutput> writer)
+ : config_(config), writer_(std::move(writer)) {
+ if (writer_) {
+ PrintHeaders();
+ }
+}
+
+VideoQualityAnalyzer::~VideoQualityAnalyzer() = default;
+
+void VideoQualityAnalyzer::PrintHeaders() {
+ writer_->Write(
+ "capture_time render_time capture_width capture_height render_width "
+ "render_height psnr\n");
+}
+
+std::function<void(const VideoFramePair&)> VideoQualityAnalyzer::Handler() {
+ return [this](VideoFramePair pair) { HandleFramePair(pair); };
+}
+
+void VideoQualityAnalyzer::HandleFramePair(VideoFramePair sample, double psnr) {
+ layer_analyzers_[sample.layer_id].HandleFramePair(sample, psnr,
+ writer_.get());
+ cached_.reset();
+}
+
+void VideoQualityAnalyzer::HandleFramePair(VideoFramePair sample) {
+ double psnr = NAN;
+ if (sample.decoded)
+ psnr = I420PSNR(*sample.captured->ToI420(), *sample.decoded->ToI420());
+
+ if (config_.thread) {
+ config_.thread->PostTask(
+ [this, sample, psnr] { HandleFramePair(std::move(sample), psnr); });
+ } else {
+ HandleFramePair(std::move(sample), psnr);
+ }
+}
+
+std::vector<VideoQualityStats> VideoQualityAnalyzer::layer_stats() const {
+ std::vector<VideoQualityStats> res;
+ for (auto& layer : layer_analyzers_)
+ res.push_back(layer.second.stats_);
+ return res;
+}
+
+VideoQualityStats& VideoQualityAnalyzer::stats() {
+ if (!cached_) {
+ cached_ = VideoQualityStats();
+ for (auto& layer : layer_analyzers_)
+ cached_->AddStats(layer.second.stats_);
+ }
+ return *cached_;
+}
+
+void VideoLayerAnalyzer::HandleFramePair(VideoFramePair sample,
+ double psnr,
+ RtcEventLogOutput* writer) {
+ RTC_CHECK(sample.captured);
+ HandleCapturedFrame(sample);
+ if (!sample.decoded) {
+ // Can only happen in the beginning of a call or if the resolution is
+ // reduced. Otherwise we will detect a freeze.
+ ++stats_.lost_count;
+ ++skip_count_;
+ } else {
+ stats_.psnr_with_freeze.AddSample(psnr);
+ if (sample.repeated) {
+ ++stats_.freeze_count;
+ ++skip_count_;
+ } else {
+ stats_.psnr.AddSample(psnr);
+ HandleRenderedFrame(sample);
+ }
+ }
+ if (writer) {
+ LogWriteFormat(writer, "%.3f %.3f %.3f %i %i %i %i %.3f\n",
+ sample.capture_time.seconds<double>(),
+ sample.render_time.seconds<double>(),
+ sample.captured->width(), sample.captured->height(),
+ sample.decoded ? sample.decoded->width() : 0,
+ sample.decoded ? sample.decoded->height() : 0, psnr);
+ }
+}
+
+void VideoLayerAnalyzer::HandleCapturedFrame(const VideoFramePair& sample) {
+ stats_.capture.AddFrameInfo(*sample.captured, sample.capture_time);
+ if (last_freeze_time_.IsInfinite())
+ last_freeze_time_ = sample.capture_time;
+}
+
+void VideoLayerAnalyzer::HandleRenderedFrame(const VideoFramePair& sample) {
+ stats_.capture_to_decoded_delay.AddSample(sample.decoded_time -
+ sample.capture_time);
+ stats_.end_to_end_delay.AddSample(sample.render_time - sample.capture_time);
+ stats_.render.AddFrameInfo(*sample.decoded, sample.render_time);
+ stats_.skipped_between_rendered.AddSample(skip_count_);
+ skip_count_ = 0;
+
+ if (last_render_time_.IsFinite()) {
+ RTC_DCHECK(sample.render_time.IsFinite());
+ TimeDelta render_interval = sample.render_time - last_render_time_;
+ TimeDelta mean_interval = stats_.render.frames.interval().Mean();
+ if (render_interval > TimeDelta::Millis(150) + mean_interval ||
+ render_interval > 3 * mean_interval) {
+ stats_.freeze_duration.AddSample(render_interval);
+ stats_.time_between_freezes.AddSample(last_render_time_ -
+ last_freeze_time_);
+ last_freeze_time_ = sample.render_time;
+ }
+ }
+ last_render_time_ = sample.render_time;
+}
+
+void CallStatsCollector::AddStats(Call::Stats sample) {
+ if (sample.send_bandwidth_bps > 0)
+ stats_.target_rate.AddSampleBps(sample.send_bandwidth_bps);
+ if (sample.pacer_delay_ms > 0)
+ stats_.pacer_delay.AddSample(TimeDelta::Millis(sample.pacer_delay_ms));
+ if (sample.rtt_ms > 0)
+ stats_.round_trip_time.AddSample(TimeDelta::Millis(sample.rtt_ms));
+ stats_.memory_usage.AddSample(rtc::GetProcessResidentSizeBytes());
+}
+
+void AudioReceiveStatsCollector::AddStats(
+ AudioReceiveStreamInterface::Stats sample) {
+ stats_.expand_rate.AddSample(sample.expand_rate);
+ stats_.accelerate_rate.AddSample(sample.accelerate_rate);
+ stats_.jitter_buffer.AddSampleMs(sample.jitter_buffer_ms);
+}
+
+void VideoSendStatsCollector::AddStats(VideoSendStream::Stats sample,
+ Timestamp at_time) {
+ // It's not certain that we yet have estimates for any of these stats.
+ // Check that they are positive before mixing them in.
+ if (sample.encode_frame_rate <= 0)
+ return;
+
+ stats_.encode_frame_rate.AddSample(sample.encode_frame_rate);
+ stats_.encode_time.AddSampleMs(sample.avg_encode_time_ms);
+ stats_.encode_usage.AddSample(sample.encode_usage_percent / 100.0);
+ stats_.media_bitrate.AddSampleBps(sample.media_bitrate_bps);
+
+ size_t fec_bytes = 0;
+ for (const auto& kv : sample.substreams) {
+ fec_bytes += kv.second.rtp_stats.fec.payload_bytes +
+ kv.second.rtp_stats.fec.padding_bytes;
+ }
+ if (last_update_.IsFinite()) {
+ auto fec_delta = DataSize::Bytes(fec_bytes - last_fec_bytes_);
+ auto time_delta = at_time - last_update_;
+ stats_.fec_bitrate.AddSample(fec_delta / time_delta);
+ }
+ last_fec_bytes_ = fec_bytes;
+ last_update_ = at_time;
+}
+
+void VideoReceiveStatsCollector::AddStats(
+ VideoReceiveStreamInterface::Stats sample) {
+ if (sample.decode_ms > 0)
+ stats_.decode_time.AddSampleMs(sample.decode_ms);
+ if (sample.max_decode_ms > 0)
+ stats_.decode_time_max.AddSampleMs(sample.max_decode_ms);
+ if (sample.width > 0 && sample.height > 0) {
+ stats_.decode_pixels.AddSample(sample.width * sample.height);
+ stats_.resolution.AddSample(sample.height);
+ }
+}
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/scenario/stats_collection.h b/third_party/libwebrtc/test/scenario/stats_collection.h
new file mode 100644
index 0000000000..1f5d8daea7
--- /dev/null
+++ b/third_party/libwebrtc/test/scenario/stats_collection.h
@@ -0,0 +1,110 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef TEST_SCENARIO_STATS_COLLECTION_H_
+#define TEST_SCENARIO_STATS_COLLECTION_H_
+
+#include <map>
+#include <memory>
+
+#include "absl/types/optional.h"
+#include "call/call.h"
+#include "rtc_base/thread.h"
+#include "test/logging/log_writer.h"
+#include "test/scenario/performance_stats.h"
+
+namespace webrtc {
+namespace test {
+
+struct VideoQualityAnalyzerConfig {
+ double psnr_coverage = 1;
+ rtc::Thread* thread = nullptr;
+};
+
+class VideoLayerAnalyzer {
+ public:
+ void HandleCapturedFrame(const VideoFramePair& sample);
+ void HandleRenderedFrame(const VideoFramePair& sample);
+ void HandleFramePair(VideoFramePair sample,
+ double psnr,
+ RtcEventLogOutput* writer);
+ VideoQualityStats stats_;
+ Timestamp last_capture_time_ = Timestamp::MinusInfinity();
+ Timestamp last_render_time_ = Timestamp::MinusInfinity();
+ Timestamp last_freeze_time_ = Timestamp::MinusInfinity();
+ int skip_count_ = 0;
+};
+
+class VideoQualityAnalyzer {
+ public:
+ explicit VideoQualityAnalyzer(
+ VideoQualityAnalyzerConfig config = VideoQualityAnalyzerConfig(),
+ std::unique_ptr<RtcEventLogOutput> writer = nullptr);
+ ~VideoQualityAnalyzer();
+ void HandleFramePair(VideoFramePair sample);
+ std::vector<VideoQualityStats> layer_stats() const;
+ VideoQualityStats& stats();
+ void PrintHeaders();
+ void PrintFrameInfo(const VideoFramePair& sample);
+ std::function<void(const VideoFramePair&)> Handler();
+
+ private:
+ void HandleFramePair(VideoFramePair sample, double psnr);
+ const VideoQualityAnalyzerConfig config_;
+ std::map<int, VideoLayerAnalyzer> layer_analyzers_;
+ const std::unique_ptr<RtcEventLogOutput> writer_;
+ absl::optional<VideoQualityStats> cached_;
+};
+
+class CallStatsCollector {
+ public:
+ void AddStats(Call::Stats sample);
+ CollectedCallStats& stats() { return stats_; }
+
+ private:
+ CollectedCallStats stats_;
+};
+class AudioReceiveStatsCollector {
+ public:
+ void AddStats(AudioReceiveStreamInterface::Stats sample);
+ CollectedAudioReceiveStats& stats() { return stats_; }
+
+ private:
+ CollectedAudioReceiveStats stats_;
+};
+class VideoSendStatsCollector {
+ public:
+ void AddStats(VideoSendStream::Stats sample, Timestamp at_time);
+ CollectedVideoSendStats& stats() { return stats_; }
+
+ private:
+ CollectedVideoSendStats stats_;
+ Timestamp last_update_ = Timestamp::MinusInfinity();
+ size_t last_fec_bytes_ = 0;
+};
+class VideoReceiveStatsCollector {
+ public:
+ void AddStats(VideoReceiveStreamInterface::Stats sample);
+ CollectedVideoReceiveStats& stats() { return stats_; }
+
+ private:
+ CollectedVideoReceiveStats stats_;
+};
+
+struct CallStatsCollectors {
+ CallStatsCollector call;
+ AudioReceiveStatsCollector audio_receive;
+ VideoSendStatsCollector video_send;
+ VideoReceiveStatsCollector video_receive;
+};
+
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_SCENARIO_STATS_COLLECTION_H_
diff --git a/third_party/libwebrtc/test/scenario/stats_collection_unittest.cc b/third_party/libwebrtc/test/scenario/stats_collection_unittest.cc
new file mode 100644
index 0000000000..9f46f10073
--- /dev/null
+++ b/third_party/libwebrtc/test/scenario/stats_collection_unittest.cc
@@ -0,0 +1,114 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "test/scenario/stats_collection.h"
+
+#include "test/gtest.h"
+#include "test/scenario/scenario.h"
+
+namespace webrtc {
+namespace test {
+namespace {
+void CreateAnalyzedStream(Scenario* s,
+ NetworkSimulationConfig network_config,
+ VideoQualityAnalyzer* analyzer,
+ CallStatsCollectors* collectors) {
+ VideoStreamConfig config;
+ config.encoder.codec = VideoStreamConfig::Encoder::Codec::kVideoCodecVP8;
+ config.encoder.implementation =
+ VideoStreamConfig::Encoder::Implementation::kSoftware;
+ config.hooks.frame_pair_handlers = {analyzer->Handler()};
+ auto* caller = s->CreateClient("caller", CallClientConfig());
+ auto* callee = s->CreateClient("callee", CallClientConfig());
+ auto route =
+ s->CreateRoutes(caller, {s->CreateSimulationNode(network_config)}, callee,
+ {s->CreateSimulationNode(NetworkSimulationConfig())});
+ VideoStreamPair* video = s->CreateVideoStream(route->forward(), config);
+ auto* audio = s->CreateAudioStream(route->forward(), AudioStreamConfig());
+ s->Every(TimeDelta::Seconds(1), [=] {
+ collectors->call.AddStats(caller->GetStats());
+
+ VideoSendStream::Stats send_stats;
+ caller->SendTask([&]() { send_stats = video->send()->GetStats(); });
+ collectors->video_send.AddStats(send_stats, s->Now());
+
+ AudioReceiveStreamInterface::Stats receive_stats;
+ caller->SendTask([&]() { receive_stats = audio->receive()->GetStats(); });
+ collectors->audio_receive.AddStats(receive_stats);
+
+ // Querying the video stats from within the expected runtime environment
+ // (i.e. the TQ that belongs to the CallClient, not the Scenario TQ that
+ // we're currently on).
+ VideoReceiveStreamInterface::Stats video_receive_stats;
+ auto* video_stream = video->receive();
+ callee->SendTask([&video_stream, &video_receive_stats]() {
+ video_receive_stats = video_stream->GetStats();
+ });
+ collectors->video_receive.AddStats(video_receive_stats);
+ });
+}
+} // namespace
+
+TEST(ScenarioAnalyzerTest, PsnrIsHighWhenNetworkIsGood) {
+ VideoQualityAnalyzer analyzer;
+ CallStatsCollectors stats;
+ {
+ Scenario s;
+ NetworkSimulationConfig good_network;
+ good_network.bandwidth = DataRate::KilobitsPerSec(1000);
+ CreateAnalyzedStream(&s, good_network, &analyzer, &stats);
+ s.RunFor(TimeDelta::Seconds(3));
+ }
+ // This is a change detecting test, the targets are based on previous runs and
+ // might change due to changes in configuration and encoder etc. The main
+ // purpose is to show how the stats can be used. To avoid being overly
+ // sensistive to change, the ranges are chosen to be quite large.
+ EXPECT_NEAR(analyzer.stats().psnr_with_freeze.Mean(), 43, 10);
+ EXPECT_NEAR(stats.call.stats().target_rate.Mean().kbps(), 700, 300);
+ EXPECT_NEAR(stats.video_send.stats().media_bitrate.Mean().kbps(), 500, 200);
+ EXPECT_NEAR(stats.video_receive.stats().resolution.Mean(), 180, 10);
+ EXPECT_NEAR(stats.audio_receive.stats().jitter_buffer.Mean().ms(), 40, 20);
+}
+
+TEST(ScenarioAnalyzerTest, PsnrIsLowWhenNetworkIsBad) {
+ VideoQualityAnalyzer analyzer;
+ CallStatsCollectors stats;
+ {
+ Scenario s;
+ NetworkSimulationConfig bad_network;
+ bad_network.bandwidth = DataRate::KilobitsPerSec(100);
+ bad_network.loss_rate = 0.02;
+ CreateAnalyzedStream(&s, bad_network, &analyzer, &stats);
+ s.RunFor(TimeDelta::Seconds(3));
+ }
+ // This is a change detecting test, the targets are based on previous runs and
+ // might change due to changes in configuration and encoder etc.
+ EXPECT_NEAR(analyzer.stats().psnr_with_freeze.Mean(), 20, 10);
+ EXPECT_NEAR(stats.call.stats().target_rate.Mean().kbps(), 75, 50);
+ EXPECT_NEAR(stats.video_send.stats().media_bitrate.Mean().kbps(), 70, 30);
+ EXPECT_NEAR(stats.video_receive.stats().resolution.Mean(), 180, 10);
+ EXPECT_NEAR(stats.audio_receive.stats().jitter_buffer.Mean().ms(), 250, 200);
+}
+
+TEST(ScenarioAnalyzerTest, CountsCapturedButNotRendered) {
+ VideoQualityAnalyzer analyzer;
+ CallStatsCollectors stats;
+ {
+ Scenario s;
+ NetworkSimulationConfig long_delays;
+ long_delays.delay = TimeDelta::Seconds(5);
+ CreateAnalyzedStream(&s, long_delays, &analyzer, &stats);
+ // Enough time to send frames but not enough to deliver.
+ s.RunFor(TimeDelta::Millis(100));
+ }
+ EXPECT_GE(analyzer.stats().capture.count, 1);
+ EXPECT_EQ(analyzer.stats().render.count, 0);
+}
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/scenario/video_frame_matcher.cc b/third_party/libwebrtc/test/scenario/video_frame_matcher.cc
new file mode 100644
index 0000000000..dc8cd59756
--- /dev/null
+++ b/third_party/libwebrtc/test/scenario/video_frame_matcher.cc
@@ -0,0 +1,188 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "test/scenario/video_frame_matcher.h"
+
+#include <utility>
+
+#include "common_video/libyuv/include/webrtc_libyuv.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/event.h"
+
+namespace webrtc {
+namespace test {
+namespace {
+constexpr int kThumbWidth = 96;
+constexpr int kThumbHeight = 96;
+} // namespace
+
+VideoFrameMatcher::VideoFrameMatcher(
+ std::vector<std::function<void(const VideoFramePair&)> >
+ frame_pair_handlers)
+ : frame_pair_handlers_(std::move(frame_pair_handlers)),
+ task_queue_("VideoAnalyzer") {}
+
+VideoFrameMatcher::~VideoFrameMatcher() {
+ task_queue_.SendTask([this] { Finalize(); });
+}
+
+void VideoFrameMatcher::RegisterLayer(int layer_id) {
+ task_queue_.PostTask([this, layer_id] { layers_[layer_id] = VideoLayer(); });
+}
+
+void VideoFrameMatcher::OnCapturedFrame(const VideoFrame& frame,
+ Timestamp at_time) {
+ CapturedFrame captured;
+ captured.id = next_capture_id_++;
+ captured.capture_time = at_time;
+ captured.frame = frame.video_frame_buffer();
+ captured.thumb = ScaleVideoFrameBuffer(*frame.video_frame_buffer()->ToI420(),
+ kThumbWidth, kThumbHeight),
+ task_queue_.PostTask([this, captured]() {
+ for (auto& layer : layers_) {
+ CapturedFrame copy = captured;
+ if (layer.second.last_decode &&
+ layer.second.last_decode->frame->width() <= captured.frame->width()) {
+ copy.best_score = I420SSE(*captured.thumb->GetI420(),
+ *layer.second.last_decode->thumb->GetI420());
+ copy.best_decode = layer.second.last_decode;
+ }
+ layer.second.captured_frames.push_back(std::move(copy));
+ }
+ });
+}
+
+void VideoFrameMatcher::OnDecodedFrame(const VideoFrame& frame,
+ int layer_id,
+ Timestamp render_time,
+ Timestamp at_time) {
+ rtc::scoped_refptr<DecodedFrame> decoded(new DecodedFrame{});
+ decoded->decoded_time = at_time;
+ decoded->render_time = render_time;
+ decoded->frame = frame.video_frame_buffer();
+ decoded->thumb = ScaleVideoFrameBuffer(*frame.video_frame_buffer()->ToI420(),
+ kThumbWidth, kThumbHeight);
+
+ task_queue_.PostTask([this, decoded, layer_id] {
+ auto& layer = layers_[layer_id];
+ decoded->id = layer.next_decoded_id++;
+ layer.last_decode = decoded;
+ for (auto& captured : layer.captured_frames) {
+ // We can't match with a smaller capture.
+ if (captured.frame->width() < decoded->frame->width()) {
+ captured.matched = true;
+ continue;
+ }
+ double score =
+ I420SSE(*captured.thumb->GetI420(), *decoded->thumb->GetI420());
+ if (score < captured.best_score) {
+ captured.best_score = score;
+ captured.best_decode = decoded;
+ captured.matched = false;
+ } else {
+ captured.matched = true;
+ }
+ }
+ while (!layer.captured_frames.empty() &&
+ layer.captured_frames.front().matched) {
+ HandleMatch(std::move(layer.captured_frames.front()), layer_id);
+ layer.captured_frames.pop_front();
+ }
+ });
+}
+
+bool VideoFrameMatcher::Active() const {
+ return !frame_pair_handlers_.empty();
+}
+
+void VideoFrameMatcher::HandleMatch(VideoFrameMatcher::CapturedFrame captured,
+ int layer_id) {
+ VideoFramePair frame_pair;
+ frame_pair.layer_id = layer_id;
+ frame_pair.captured = captured.frame;
+ frame_pair.capture_id = captured.id;
+ frame_pair.capture_time = captured.capture_time;
+ if (captured.best_decode) {
+ frame_pair.decode_id = captured.best_decode->id;
+ frame_pair.decoded = captured.best_decode->frame;
+ frame_pair.decoded_time = captured.best_decode->decoded_time;
+ // We can't render frames before they have been decoded.
+ frame_pair.render_time = std::max(captured.best_decode->render_time,
+ captured.best_decode->decoded_time);
+ frame_pair.repeated = captured.best_decode->repeat_count++;
+ }
+ for (auto& handler : frame_pair_handlers_)
+ handler(frame_pair);
+}
+
+void VideoFrameMatcher::Finalize() {
+ for (auto& layer : layers_) {
+ while (!layer.second.captured_frames.empty()) {
+ HandleMatch(std::move(layer.second.captured_frames.front()), layer.first);
+ layer.second.captured_frames.pop_front();
+ }
+ }
+}
+
+CapturedFrameTap::CapturedFrameTap(Clock* clock, VideoFrameMatcher* matcher)
+ : clock_(clock), matcher_(matcher) {}
+
+void CapturedFrameTap::OnFrame(const VideoFrame& frame) {
+ matcher_->OnCapturedFrame(frame, clock_->CurrentTime());
+}
+void CapturedFrameTap::OnDiscardedFrame() {
+ discarded_count_++;
+}
+
+ForwardingCapturedFrameTap::ForwardingCapturedFrameTap(
+ Clock* clock,
+ VideoFrameMatcher* matcher,
+ rtc::VideoSourceInterface<VideoFrame>* source)
+ : clock_(clock), matcher_(matcher), source_(source) {}
+
+void ForwardingCapturedFrameTap::OnFrame(const VideoFrame& frame) {
+ RTC_CHECK(sink_);
+ matcher_->OnCapturedFrame(frame, clock_->CurrentTime());
+ sink_->OnFrame(frame);
+}
+void ForwardingCapturedFrameTap::OnDiscardedFrame() {
+ RTC_CHECK(sink_);
+ discarded_count_++;
+ sink_->OnDiscardedFrame();
+}
+
+void ForwardingCapturedFrameTap::AddOrUpdateSink(
+ VideoSinkInterface<VideoFrame>* sink,
+ const rtc::VideoSinkWants& wants) {
+ if (!sink_)
+ sink_ = sink;
+ RTC_DCHECK_EQ(sink_, sink);
+ source_->AddOrUpdateSink(this, wants);
+}
+void ForwardingCapturedFrameTap::RemoveSink(
+ VideoSinkInterface<VideoFrame>* sink) {
+ source_->RemoveSink(this);
+ sink_ = nullptr;
+}
+
+DecodedFrameTap::DecodedFrameTap(Clock* clock,
+ VideoFrameMatcher* matcher,
+ int layer_id)
+ : clock_(clock), matcher_(matcher), layer_id_(layer_id) {
+ matcher_->RegisterLayer(layer_id_);
+}
+
+void DecodedFrameTap::OnFrame(const VideoFrame& frame) {
+ matcher_->OnDecodedFrame(frame, layer_id_,
+ Timestamp::Millis(frame.render_time_ms()),
+ clock_->CurrentTime());
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/scenario/video_frame_matcher.h b/third_party/libwebrtc/test/scenario/video_frame_matcher.h
new file mode 100644
index 0000000000..a3aa85447d
--- /dev/null
+++ b/third_party/libwebrtc/test/scenario/video_frame_matcher.h
@@ -0,0 +1,134 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef TEST_SCENARIO_VIDEO_FRAME_MATCHER_H_
+#define TEST_SCENARIO_VIDEO_FRAME_MATCHER_H_
+
+#include <deque>
+#include <map>
+#include <memory>
+#include <set>
+#include <string>
+#include <vector>
+
+#include "api/units/timestamp.h"
+#include "api/video/video_frame.h"
+#include "api/video/video_sink_interface.h"
+#include "api/video/video_source_interface.h"
+#include "rtc_base/ref_counted_object.h"
+#include "rtc_base/task_queue_for_test.h"
+#include "system_wrappers/include/clock.h"
+#include "test/scenario/performance_stats.h"
+
+namespace webrtc {
+namespace test {
+
+class VideoFrameMatcher {
+ public:
+ explicit VideoFrameMatcher(
+ std::vector<std::function<void(const VideoFramePair&)>>
+ frame_pair_handlers);
+ ~VideoFrameMatcher();
+ void RegisterLayer(int layer_id);
+ void OnCapturedFrame(const VideoFrame& frame, Timestamp at_time);
+ void OnDecodedFrame(const VideoFrame& frame,
+ int layer_id,
+ Timestamp render_time,
+ Timestamp at_time);
+ bool Active() const;
+
+ private:
+ struct DecodedFrameBase {
+ int id;
+ Timestamp decoded_time = Timestamp::PlusInfinity();
+ Timestamp render_time = Timestamp::PlusInfinity();
+ rtc::scoped_refptr<VideoFrameBuffer> frame;
+ rtc::scoped_refptr<VideoFrameBuffer> thumb;
+ int repeat_count = 0;
+ };
+ using DecodedFrame = rtc::FinalRefCountedObject<DecodedFrameBase>;
+ struct CapturedFrame {
+ int id;
+ Timestamp capture_time = Timestamp::PlusInfinity();
+ rtc::scoped_refptr<VideoFrameBuffer> frame;
+ rtc::scoped_refptr<VideoFrameBuffer> thumb;
+ double best_score = INFINITY;
+ rtc::scoped_refptr<DecodedFrame> best_decode;
+ bool matched = false;
+ };
+ struct VideoLayer {
+ int layer_id;
+ std::deque<CapturedFrame> captured_frames;
+ rtc::scoped_refptr<DecodedFrame> last_decode;
+ int next_decoded_id = 1;
+ };
+ void HandleMatch(CapturedFrame captured, int layer_id);
+ void Finalize();
+ int next_capture_id_ = 1;
+ std::vector<std::function<void(const VideoFramePair&)>> frame_pair_handlers_;
+ std::map<int, VideoLayer> layers_;
+ TaskQueueForTest task_queue_;
+};
+
+class CapturedFrameTap : public rtc::VideoSinkInterface<VideoFrame> {
+ public:
+ CapturedFrameTap(Clock* clock, VideoFrameMatcher* matcher);
+ CapturedFrameTap(CapturedFrameTap&) = delete;
+ CapturedFrameTap& operator=(CapturedFrameTap&) = delete;
+
+ void OnFrame(const VideoFrame& frame) override;
+ void OnDiscardedFrame() override;
+
+ private:
+ Clock* const clock_;
+ VideoFrameMatcher* const matcher_;
+ int discarded_count_ = 0;
+};
+
+class ForwardingCapturedFrameTap
+ : public rtc::VideoSinkInterface<VideoFrame>,
+ public rtc::VideoSourceInterface<VideoFrame> {
+ public:
+ ForwardingCapturedFrameTap(Clock* clock,
+ VideoFrameMatcher* matcher,
+ rtc::VideoSourceInterface<VideoFrame>* source);
+ ForwardingCapturedFrameTap(ForwardingCapturedFrameTap&) = delete;
+ ForwardingCapturedFrameTap& operator=(ForwardingCapturedFrameTap&) = delete;
+
+ // VideoSinkInterface interface
+ void OnFrame(const VideoFrame& frame) override;
+ void OnDiscardedFrame() override;
+
+ // VideoSourceInterface interface
+ void AddOrUpdateSink(VideoSinkInterface<VideoFrame>* sink,
+ const rtc::VideoSinkWants& wants) override;
+ void RemoveSink(VideoSinkInterface<VideoFrame>* sink) override;
+
+ private:
+ Clock* const clock_;
+ VideoFrameMatcher* const matcher_;
+ rtc::VideoSourceInterface<VideoFrame>* const source_;
+ VideoSinkInterface<VideoFrame>* sink_ = nullptr;
+ int discarded_count_ = 0;
+};
+
+class DecodedFrameTap : public rtc::VideoSinkInterface<VideoFrame> {
+ public:
+ DecodedFrameTap(Clock* clock, VideoFrameMatcher* matcher, int layer_id);
+ // VideoSinkInterface interface
+ void OnFrame(const VideoFrame& frame) override;
+
+ private:
+ Clock* const clock_;
+ VideoFrameMatcher* const matcher_;
+ int layer_id_;
+};
+} // namespace test
+} // namespace webrtc
+#endif // TEST_SCENARIO_VIDEO_FRAME_MATCHER_H_
diff --git a/third_party/libwebrtc/test/scenario/video_stream.cc b/third_party/libwebrtc/test/scenario/video_stream.cc
new file mode 100644
index 0000000000..8d627d8893
--- /dev/null
+++ b/third_party/libwebrtc/test/scenario/video_stream.cc
@@ -0,0 +1,636 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "test/scenario/video_stream.h"
+
+#include <algorithm>
+#include <memory>
+#include <utility>
+
+#include "absl/strings/match.h"
+#include "api/test/create_frame_generator.h"
+#include "api/test/frame_generator_interface.h"
+#include "api/test/video/function_video_encoder_factory.h"
+#include "api/video/builtin_video_bitrate_allocator_factory.h"
+#include "media/base/media_constants.h"
+#include "media/engine/internal_decoder_factory.h"
+#include "media/engine/internal_encoder_factory.h"
+#include "media/engine/webrtc_video_engine.h"
+#include "modules/video_coding/svc/scalability_mode_util.h"
+#include "test/call_test.h"
+#include "test/fake_encoder.h"
+#include "test/scenario/hardware_codecs.h"
+#include "test/testsupport/file_utils.h"
+#include "video/config/encoder_stream_factory.h"
+
+namespace webrtc {
+namespace test {
+namespace {
+enum : int { // The first valid value is 1.
+ kTransportSequenceNumberExtensionId = 1,
+ kAbsSendTimeExtensionId,
+ kVideoContentTypeExtensionId,
+ kVideoRotationRtpExtensionId,
+};
+
+constexpr int kDefaultMaxQp = cricket::WebRtcVideoChannel::kDefaultQpMax;
+uint8_t CodecTypeToPayloadType(VideoCodecType codec_type) {
+ switch (codec_type) {
+ case VideoCodecType::kVideoCodecGeneric:
+ return CallTest::kFakeVideoSendPayloadType;
+ case VideoCodecType::kVideoCodecVP8:
+ return CallTest::kPayloadTypeVP8;
+ case VideoCodecType::kVideoCodecVP9:
+ return CallTest::kPayloadTypeVP9;
+ case VideoCodecType::kVideoCodecH264:
+ return CallTest::kPayloadTypeH264;
+ default:
+ RTC_DCHECK_NOTREACHED();
+ }
+ return {};
+}
+std::string CodecTypeToCodecName(VideoCodecType codec_type) {
+ switch (codec_type) {
+ case VideoCodecType::kVideoCodecGeneric:
+ return "";
+ case VideoCodecType::kVideoCodecVP8:
+ return cricket::kVp8CodecName;
+ case VideoCodecType::kVideoCodecVP9:
+ return cricket::kVp9CodecName;
+ case VideoCodecType::kVideoCodecH264:
+ return cricket::kH264CodecName;
+ default:
+ RTC_DCHECK_NOTREACHED();
+ }
+ return {};
+}
+VideoEncoderConfig::ContentType ConvertContentType(
+ VideoStreamConfig::Encoder::ContentType content_type) {
+ switch (content_type) {
+ case VideoStreamConfig::Encoder::ContentType::kVideo:
+ return VideoEncoderConfig::ContentType::kRealtimeVideo;
+ case VideoStreamConfig::Encoder::ContentType::kScreen:
+ return VideoEncoderConfig::ContentType::kScreen;
+ }
+}
+
+std::string TransformFilePath(std::string path) {
+ static const std::string resource_prefix = "res://";
+ int ext_pos = path.rfind('.');
+ if (ext_pos < 0) {
+ return test::ResourcePath(path, "yuv");
+ } else if (absl::StartsWith(path, resource_prefix)) {
+ std::string name = path.substr(resource_prefix.length(), ext_pos);
+ std::string ext = path.substr(ext_pos, path.size());
+ return test::ResourcePath(name, ext);
+ }
+ return path;
+}
+
+VideoSendStream::Config CreateVideoSendStreamConfig(
+ VideoStreamConfig config,
+ std::vector<uint32_t> ssrcs,
+ std::vector<uint32_t> rtx_ssrcs,
+ Transport* send_transport) {
+ VideoSendStream::Config send_config(send_transport);
+ send_config.rtp.payload_name = CodecTypeToPayloadString(config.encoder.codec);
+ send_config.rtp.payload_type = CodecTypeToPayloadType(config.encoder.codec);
+ send_config.rtp.nack.rtp_history_ms =
+ config.stream.nack_history_time.ms<int>();
+
+ send_config.rtp.ssrcs = ssrcs;
+ send_config.rtp.extensions = GetVideoRtpExtensions(config);
+
+ if (config.stream.use_rtx) {
+ send_config.rtp.rtx.payload_type = CallTest::kSendRtxPayloadType;
+ send_config.rtp.rtx.ssrcs = rtx_ssrcs;
+ }
+ if (config.stream.use_flexfec) {
+ send_config.rtp.flexfec.payload_type = CallTest::kFlexfecPayloadType;
+ send_config.rtp.flexfec.ssrc = CallTest::kFlexfecSendSsrc;
+ send_config.rtp.flexfec.protected_media_ssrcs = ssrcs;
+ }
+ if (config.stream.use_ulpfec) {
+ send_config.rtp.ulpfec.red_payload_type = CallTest::kRedPayloadType;
+ send_config.rtp.ulpfec.ulpfec_payload_type = CallTest::kUlpfecPayloadType;
+ send_config.rtp.ulpfec.red_rtx_payload_type = CallTest::kRtxRedPayloadType;
+ }
+ return send_config;
+}
+rtc::scoped_refptr<VideoEncoderConfig::EncoderSpecificSettings>
+CreateVp9SpecificSettings(VideoStreamConfig video_config) {
+ constexpr auto kScreen = VideoStreamConfig::Encoder::ContentType::kScreen;
+ VideoStreamConfig::Encoder conf = video_config.encoder;
+ VideoCodecVP9 vp9 = VideoEncoder::GetDefaultVp9Settings();
+ // TODO(bugs.webrtc.org/11607): Support separate scalability mode per
+ // simulcast stream.
+ ScalabilityMode scalability_mode = conf.simulcast_streams[0];
+ vp9.keyFrameInterval = conf.key_frame_interval.value_or(0);
+ vp9.numberOfTemporalLayers =
+ ScalabilityModeToNumTemporalLayers(scalability_mode);
+ vp9.numberOfSpatialLayers =
+ ScalabilityModeToNumSpatialLayers(scalability_mode);
+ vp9.interLayerPred = ScalabilityModeToInterLayerPredMode(scalability_mode);
+
+ if (conf.content_type == kScreen &&
+ (video_config.source.framerate > 5 || vp9.numberOfSpatialLayers >= 3)) {
+ vp9.flexibleMode = true;
+ }
+
+ if (conf.content_type == kScreen || vp9.numberOfTemporalLayers > 1 ||
+ vp9.numberOfSpatialLayers > 1) {
+ vp9.automaticResizeOn = false;
+ vp9.denoisingOn = false;
+ } else {
+ vp9.automaticResizeOn = conf.single.automatic_scaling;
+ vp9.denoisingOn = conf.single.denoising;
+ }
+ return rtc::make_ref_counted<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
+ vp9);
+}
+
+rtc::scoped_refptr<VideoEncoderConfig::EncoderSpecificSettings>
+CreateVp8SpecificSettings(VideoStreamConfig config) {
+ VideoCodecVP8 vp8_settings = VideoEncoder::GetDefaultVp8Settings();
+ vp8_settings.keyFrameInterval = config.encoder.key_frame_interval.value_or(0);
+ // TODO(bugs.webrtc.org/11607): Support separate scalability mode per
+ // simulcast stream.
+ ScalabilityMode scalability_mode = config.encoder.simulcast_streams[0];
+ vp8_settings.numberOfTemporalLayers =
+ ScalabilityModeToNumTemporalLayers(scalability_mode);
+ if (vp8_settings.numberOfTemporalLayers > 1 ||
+ config.encoder.simulcast_streams.size() > 1) {
+ vp8_settings.automaticResizeOn = false;
+ vp8_settings.denoisingOn = false;
+ } else {
+ vp8_settings.automaticResizeOn = config.encoder.single.automatic_scaling;
+ vp8_settings.denoisingOn = config.encoder.single.denoising;
+ }
+ return rtc::make_ref_counted<VideoEncoderConfig::Vp8EncoderSpecificSettings>(
+ vp8_settings);
+}
+
+rtc::scoped_refptr<VideoEncoderConfig::EncoderSpecificSettings>
+CreateH264SpecificSettings(VideoStreamConfig config) {
+ RTC_DCHECK_EQ(config.encoder.simulcast_streams.size(), 1);
+ RTC_DCHECK(config.encoder.simulcast_streams[0] == ScalabilityMode::kL1T1);
+ // TODO(bugs.webrtc.org/6883): Set a key frame interval as a setting that
+ // isn't codec specific.
+ RTC_CHECK_EQ(0, config.encoder.key_frame_interval.value_or(0));
+ return nullptr;
+}
+
+rtc::scoped_refptr<VideoEncoderConfig::EncoderSpecificSettings>
+CreateEncoderSpecificSettings(VideoStreamConfig config) {
+ using Codec = VideoStreamConfig::Encoder::Codec;
+ switch (config.encoder.codec) {
+ case Codec::kVideoCodecH264:
+ return CreateH264SpecificSettings(config);
+ case Codec::kVideoCodecVP8:
+ return CreateVp8SpecificSettings(config);
+ case Codec::kVideoCodecVP9:
+ return CreateVp9SpecificSettings(config);
+ case Codec::kVideoCodecGeneric:
+ case Codec::kVideoCodecAV1:
+ return nullptr;
+ case Codec::kVideoCodecMultiplex:
+ RTC_DCHECK_NOTREACHED();
+ return nullptr;
+ }
+}
+
+VideoEncoderConfig CreateVideoEncoderConfig(VideoStreamConfig config) {
+ webrtc::VideoEncoder::EncoderInfo encoder_info;
+ VideoEncoderConfig encoder_config;
+ encoder_config.codec_type = config.encoder.codec;
+ encoder_config.content_type = ConvertContentType(config.encoder.content_type);
+ encoder_config.video_format =
+ SdpVideoFormat(CodecTypeToPayloadString(config.encoder.codec), {});
+
+ encoder_config.number_of_streams = config.encoder.simulcast_streams.size();
+ encoder_config.simulcast_layers =
+ std::vector<VideoStream>(encoder_config.number_of_streams);
+ encoder_config.min_transmit_bitrate_bps = config.stream.pad_to_rate.bps();
+
+ std::string cricket_codec = CodecTypeToCodecName(config.encoder.codec);
+ if (!cricket_codec.empty()) {
+ bool screenshare = config.encoder.content_type ==
+ VideoStreamConfig::Encoder::ContentType::kScreen;
+ encoder_config.video_stream_factory =
+ rtc::make_ref_counted<cricket::EncoderStreamFactory>(
+ cricket_codec, kDefaultMaxQp, screenshare, screenshare,
+ encoder_info);
+ } else {
+ encoder_config.video_stream_factory =
+ rtc::make_ref_counted<DefaultVideoStreamFactory>();
+ }
+
+ // TODO(srte): Base this on encoder capabilities.
+ encoder_config.max_bitrate_bps =
+ config.encoder.max_data_rate.value_or(DataRate::KilobitsPerSec(10000))
+ .bps();
+
+ encoder_config.frame_drop_enabled = config.encoder.frame_dropping;
+ encoder_config.encoder_specific_settings =
+ CreateEncoderSpecificSettings(config);
+
+ for (size_t i = 0; i < encoder_config.number_of_streams; ++i) {
+ auto& layer = encoder_config.simulcast_layers[i];
+ if (config.encoder.max_framerate) {
+ layer.max_framerate = *config.encoder.max_framerate;
+ layer.min_bitrate_bps = config.encoder.min_data_rate->bps_or(-1);
+ }
+ layer.scalability_mode = config.encoder.simulcast_streams[i];
+ }
+
+ return encoder_config;
+}
+
+std::unique_ptr<FrameGeneratorInterface> CreateImageSlideGenerator(
+ Clock* clock,
+ VideoStreamConfig::Source::Slides slides,
+ int framerate) {
+ std::vector<std::string> paths = slides.images.paths;
+ for (std::string& path : paths)
+ path = TransformFilePath(path);
+ if (slides.images.crop.width || slides.images.crop.height) {
+ TimeDelta pause_duration =
+ slides.change_interval - slides.images.crop.scroll_duration;
+ RTC_CHECK_GE(pause_duration, TimeDelta::Zero());
+ int crop_width = slides.images.crop.width.value_or(slides.images.width);
+ int crop_height = slides.images.crop.height.value_or(slides.images.height);
+ RTC_CHECK_LE(crop_width, slides.images.width);
+ RTC_CHECK_LE(crop_height, slides.images.height);
+ return CreateScrollingInputFromYuvFilesFrameGenerator(
+ clock, paths, slides.images.width, slides.images.height, crop_width,
+ crop_height, slides.images.crop.scroll_duration.ms(),
+ pause_duration.ms());
+ } else {
+ return CreateFromYuvFileFrameGenerator(
+ paths, slides.images.width, slides.images.height,
+ slides.change_interval.seconds<double>() * framerate);
+ }
+}
+
+std::unique_ptr<FrameGeneratorInterface> CreateFrameGenerator(
+ Clock* clock,
+ VideoStreamConfig::Source source) {
+ using Capture = VideoStreamConfig::Source::Capture;
+ switch (source.capture) {
+ case Capture::kGenerator:
+ return CreateSquareFrameGenerator(
+ source.generator.width, source.generator.height,
+ source.generator.pixel_format, /*num_squares*/ absl::nullopt);
+ case Capture::kVideoFile:
+ RTC_CHECK(source.video_file.width && source.video_file.height);
+ return CreateFromYuvFileFrameGenerator(
+ {TransformFilePath(source.video_file.name)}, source.video_file.width,
+ source.video_file.height, /*frame_repeat_count*/ 1);
+ case Capture::kGenerateSlides:
+ return CreateSlideFrameGenerator(
+ source.slides.generator.width, source.slides.generator.height,
+ source.slides.change_interval.seconds<double>() * source.framerate);
+ case Capture::kImageSlides:
+ return CreateImageSlideGenerator(clock, source.slides, source.framerate);
+ }
+}
+
+VideoReceiveStreamInterface::Config CreateVideoReceiveStreamConfig(
+ VideoStreamConfig config,
+ Transport* feedback_transport,
+ VideoDecoderFactory* decoder_factory,
+ VideoReceiveStreamInterface::Decoder decoder,
+ rtc::VideoSinkInterface<VideoFrame>* renderer,
+ uint32_t local_ssrc,
+ uint32_t ssrc,
+ uint32_t rtx_ssrc) {
+ VideoReceiveStreamInterface::Config recv(feedback_transport);
+ recv.rtp.local_ssrc = local_ssrc;
+ recv.rtp.extensions = GetVideoRtpExtensions(config);
+
+ RTC_DCHECK(!config.stream.use_rtx ||
+ config.stream.nack_history_time > TimeDelta::Zero());
+ recv.rtp.nack.rtp_history_ms = config.stream.nack_history_time.ms();
+ recv.rtp.protected_by_flexfec = config.stream.use_flexfec;
+ recv.rtp.remote_ssrc = ssrc;
+ recv.decoder_factory = decoder_factory;
+ recv.decoders.push_back(decoder);
+ recv.renderer = renderer;
+ if (config.stream.use_rtx) {
+ recv.rtp.rtx_ssrc = rtx_ssrc;
+ recv.rtp.rtx_associated_payload_types[CallTest::kSendRtxPayloadType] =
+ CodecTypeToPayloadType(config.encoder.codec);
+ }
+ if (config.stream.use_ulpfec) {
+ recv.rtp.red_payload_type = CallTest::kRedPayloadType;
+ recv.rtp.ulpfec_payload_type = CallTest::kUlpfecPayloadType;
+ recv.rtp.rtx_associated_payload_types[CallTest::kRtxRedPayloadType] =
+ CallTest::kRedPayloadType;
+ }
+ recv.sync_group = config.render.sync_group;
+ return recv;
+}
+} // namespace
+
+std::vector<RtpExtension> GetVideoRtpExtensions(
+ const VideoStreamConfig config) {
+ std::vector<RtpExtension> res = {
+ RtpExtension(RtpExtension::kVideoContentTypeUri,
+ kVideoContentTypeExtensionId),
+ RtpExtension(RtpExtension::kVideoRotationUri,
+ kVideoRotationRtpExtensionId)};
+ if (config.stream.packet_feedback) {
+ res.push_back(RtpExtension(RtpExtension::kTransportSequenceNumberUri,
+ kTransportSequenceNumberExtensionId));
+ }
+ if (config.stream.abs_send_time) {
+ res.push_back(
+ RtpExtension(RtpExtension::kAbsSendTimeUri, kAbsSendTimeExtensionId));
+ }
+ return res;
+}
+
+SendVideoStream::SendVideoStream(CallClient* sender,
+ VideoStreamConfig config,
+ Transport* send_transport,
+ VideoFrameMatcher* matcher)
+ : sender_(sender), config_(config) {
+ video_capturer_ = std::make_unique<FrameGeneratorCapturer>(
+ sender_->clock_, CreateFrameGenerator(sender_->clock_, config.source),
+ config.source.framerate,
+ *sender->time_controller_->GetTaskQueueFactory());
+ video_capturer_->Init();
+
+ using Encoder = VideoStreamConfig::Encoder;
+ using Codec = VideoStreamConfig::Encoder::Codec;
+ switch (config.encoder.implementation) {
+ case Encoder::Implementation::kFake:
+ encoder_factory_ =
+ std::make_unique<FunctionVideoEncoderFactory>([this]() {
+ MutexLock lock(&mutex_);
+ std::unique_ptr<FakeEncoder> encoder;
+ if (config_.encoder.codec == Codec::kVideoCodecVP8) {
+ encoder = std::make_unique<test::FakeVp8Encoder>(sender_->clock_);
+ } else if (config_.encoder.codec == Codec::kVideoCodecGeneric) {
+ encoder = std::make_unique<test::FakeEncoder>(sender_->clock_);
+ } else {
+ RTC_DCHECK_NOTREACHED();
+ }
+ fake_encoders_.push_back(encoder.get());
+ if (config_.encoder.fake.max_rate.IsFinite())
+ encoder->SetMaxBitrate(config_.encoder.fake.max_rate.kbps());
+ return encoder;
+ });
+ break;
+ case VideoStreamConfig::Encoder::Implementation::kSoftware:
+ encoder_factory_.reset(new InternalEncoderFactory());
+ break;
+ case VideoStreamConfig::Encoder::Implementation::kHardware:
+ encoder_factory_ = CreateHardwareEncoderFactory();
+ break;
+ }
+ RTC_CHECK(encoder_factory_);
+
+ bitrate_allocator_factory_ = CreateBuiltinVideoBitrateAllocatorFactory();
+ RTC_CHECK(bitrate_allocator_factory_);
+
+ VideoEncoderConfig encoder_config = CreateVideoEncoderConfig(config);
+ for (size_t i = 0; i < encoder_config.number_of_streams; ++i) {
+ ssrcs_.push_back(sender->GetNextVideoSsrc());
+ rtx_ssrcs_.push_back(sender->GetNextRtxSsrc());
+ }
+ VideoSendStream::Config send_config =
+ CreateVideoSendStreamConfig(config, ssrcs_, rtx_ssrcs_, send_transport);
+ send_config.encoder_settings.encoder_factory = encoder_factory_.get();
+ send_config.encoder_settings.bitrate_allocator_factory =
+ bitrate_allocator_factory_.get();
+ send_config.suspend_below_min_bitrate =
+ config.encoder.suspend_below_min_bitrate;
+
+ sender_->SendTask([&] {
+ if (config.stream.fec_controller_factory) {
+ send_stream_ = sender_->call_->CreateVideoSendStream(
+ std::move(send_config), std::move(encoder_config),
+ config.stream.fec_controller_factory->CreateFecController());
+ } else {
+ send_stream_ = sender_->call_->CreateVideoSendStream(
+ std::move(send_config), std::move(encoder_config));
+ }
+
+ if (matcher->Active()) {
+ frame_tap_ = std::make_unique<ForwardingCapturedFrameTap>(
+ sender_->clock_, matcher, video_capturer_.get());
+ send_stream_->SetSource(frame_tap_.get(),
+ config.encoder.degradation_preference);
+ } else {
+ send_stream_->SetSource(video_capturer_.get(),
+ config.encoder.degradation_preference);
+ }
+ });
+}
+
+SendVideoStream::~SendVideoStream() {
+ sender_->SendTask(
+ [this] { sender_->call_->DestroyVideoSendStream(send_stream_); });
+}
+
+void SendVideoStream::Start() {
+ sender_->SendTask([this] {
+ send_stream_->Start();
+ sender_->call_->SignalChannelNetworkState(MediaType::VIDEO, kNetworkUp);
+ });
+}
+
+void SendVideoStream::Stop() {
+ sender_->SendTask([this] { send_stream_->Stop(); });
+}
+
+void SendVideoStream::UpdateConfig(
+ std::function<void(VideoStreamConfig*)> modifier) {
+ sender_->SendTask([&] {
+ MutexLock lock(&mutex_);
+ VideoStreamConfig prior_config = config_;
+ modifier(&config_);
+ if (prior_config.encoder.fake.max_rate != config_.encoder.fake.max_rate) {
+ for (auto* encoder : fake_encoders_) {
+ encoder->SetMaxBitrate(config_.encoder.fake.max_rate.kbps());
+ }
+ }
+ // TODO(srte): Add more conditions that should cause reconfiguration.
+ if (prior_config.encoder.max_framerate != config_.encoder.max_framerate ||
+ prior_config.encoder.max_data_rate != config_.encoder.max_data_rate) {
+ VideoEncoderConfig encoder_config = CreateVideoEncoderConfig(config_);
+ send_stream_->ReconfigureVideoEncoder(std::move(encoder_config));
+ }
+ if (prior_config.source.framerate != config_.source.framerate) {
+ SetCaptureFramerate(config_.source.framerate);
+ }
+ });
+}
+
+void SendVideoStream::UpdateActiveLayers(std::vector<bool> active_layers) {
+ sender_->task_queue_.PostTask([=] {
+ MutexLock lock(&mutex_);
+ if (config_.encoder.codec ==
+ VideoStreamConfig::Encoder::Codec::kVideoCodecVP8) {
+ send_stream_->StartPerRtpStream(active_layers);
+ }
+ VideoEncoderConfig encoder_config = CreateVideoEncoderConfig(config_);
+ RTC_CHECK_EQ(encoder_config.simulcast_layers.size(), active_layers.size());
+ for (size_t i = 0; i < encoder_config.simulcast_layers.size(); ++i)
+ encoder_config.simulcast_layers[i].active = active_layers[i];
+ send_stream_->ReconfigureVideoEncoder(std::move(encoder_config));
+ });
+}
+
+bool SendVideoStream::UsingSsrc(uint32_t ssrc) const {
+ for (uint32_t owned : ssrcs_) {
+ if (owned == ssrc)
+ return true;
+ }
+ return false;
+}
+
+bool SendVideoStream::UsingRtxSsrc(uint32_t ssrc) const {
+ for (uint32_t owned : rtx_ssrcs_) {
+ if (owned == ssrc)
+ return true;
+ }
+ return false;
+}
+
+void SendVideoStream::SetCaptureFramerate(int framerate) {
+ sender_->SendTask([&] { video_capturer_->ChangeFramerate(framerate); });
+}
+
+VideoSendStream::Stats SendVideoStream::GetStats() const {
+ return send_stream_->GetStats();
+}
+
+ColumnPrinter SendVideoStream::StatsPrinter() {
+ return ColumnPrinter::Lambda(
+ "video_target_rate video_sent_rate width height",
+ [this](rtc::SimpleStringBuilder& sb) {
+ VideoSendStream::Stats video_stats = send_stream_->GetStats();
+ int width = 0;
+ int height = 0;
+ for (const auto& stream_stat : video_stats.substreams) {
+ width = std::max(width, stream_stat.second.width);
+ height = std::max(height, stream_stat.second.height);
+ }
+ sb.AppendFormat("%.0lf %.0lf %i %i",
+ video_stats.target_media_bitrate_bps / 8.0,
+ video_stats.media_bitrate_bps / 8.0, width, height);
+ },
+ 64);
+}
+
+ReceiveVideoStream::ReceiveVideoStream(CallClient* receiver,
+ VideoStreamConfig config,
+ SendVideoStream* send_stream,
+ size_t chosen_stream,
+ Transport* feedback_transport,
+ VideoFrameMatcher* matcher)
+ : receiver_(receiver), config_(config) {
+ if (config.encoder.codec ==
+ VideoStreamConfig::Encoder::Codec::kVideoCodecGeneric ||
+ config.encoder.implementation == VideoStreamConfig::Encoder::kFake) {
+ decoder_factory_ = std::make_unique<FunctionVideoDecoderFactory>(
+ []() { return std::make_unique<FakeDecoder>(); });
+ } else {
+ decoder_factory_ = std::make_unique<InternalDecoderFactory>();
+ }
+
+ VideoReceiveStreamInterface::Decoder decoder =
+ CreateMatchingDecoder(CodecTypeToPayloadType(config.encoder.codec),
+ CodecTypeToPayloadString(config.encoder.codec));
+ size_t num_streams = config.encoder.simulcast_streams.size();
+ for (size_t i = 0; i < num_streams; ++i) {
+ rtc::VideoSinkInterface<VideoFrame>* renderer = &fake_renderer_;
+ if (matcher->Active()) {
+ render_taps_.emplace_back(
+ std::make_unique<DecodedFrameTap>(receiver_->clock_, matcher, i));
+ renderer = render_taps_.back().get();
+ }
+ auto recv_config = CreateVideoReceiveStreamConfig(
+ config, feedback_transport, decoder_factory_.get(), decoder, renderer,
+ receiver_->GetNextVideoLocalSsrc(), send_stream->ssrcs_[i],
+ send_stream->rtx_ssrcs_[i]);
+ if (config.stream.use_flexfec) {
+ RTC_DCHECK(num_streams == 1);
+ FlexfecReceiveStream::Config flexfec(feedback_transport);
+ flexfec.payload_type = CallTest::kFlexfecPayloadType;
+ flexfec.rtp.remote_ssrc = CallTest::kFlexfecSendSsrc;
+ flexfec.protected_media_ssrcs = send_stream->rtx_ssrcs_;
+ flexfec.rtp.local_ssrc = recv_config.rtp.local_ssrc;
+ receiver_->ssrc_media_types_[flexfec.rtp.remote_ssrc] = MediaType::VIDEO;
+
+ receiver_->SendTask([this, &flexfec] {
+ flecfec_stream_ = receiver_->call_->CreateFlexfecReceiveStream(flexfec);
+ });
+ }
+ receiver_->ssrc_media_types_[recv_config.rtp.remote_ssrc] =
+ MediaType::VIDEO;
+ if (config.stream.use_rtx)
+ receiver_->ssrc_media_types_[recv_config.rtp.rtx_ssrc] = MediaType::VIDEO;
+ receiver_->SendTask([this, &recv_config] {
+ receive_streams_.push_back(
+ receiver_->call_->CreateVideoReceiveStream(std::move(recv_config)));
+ });
+ }
+}
+
+ReceiveVideoStream::~ReceiveVideoStream() {
+ receiver_->SendTask([this] {
+ for (auto* recv_stream : receive_streams_)
+ receiver_->call_->DestroyVideoReceiveStream(recv_stream);
+ if (flecfec_stream_)
+ receiver_->call_->DestroyFlexfecReceiveStream(flecfec_stream_);
+ });
+}
+
+void ReceiveVideoStream::Start() {
+ receiver_->SendTask([this] {
+ for (auto* recv_stream : receive_streams_)
+ recv_stream->Start();
+ receiver_->call_->SignalChannelNetworkState(MediaType::VIDEO, kNetworkUp);
+ });
+}
+
+void ReceiveVideoStream::Stop() {
+ receiver_->SendTask([this] {
+ for (auto* recv_stream : receive_streams_)
+ recv_stream->Stop();
+ });
+}
+
+VideoReceiveStreamInterface::Stats ReceiveVideoStream::GetStats() const {
+ if (receive_streams_.empty())
+ return VideoReceiveStreamInterface::Stats();
+ // TODO(srte): Handle multiple receive streams.
+ return receive_streams_.back()->GetStats();
+}
+
+VideoStreamPair::~VideoStreamPair() = default;
+
+VideoStreamPair::VideoStreamPair(CallClient* sender,
+ CallClient* receiver,
+ VideoStreamConfig config)
+ : config_(config),
+ matcher_(config.hooks.frame_pair_handlers),
+ send_stream_(sender, config, sender->transport_.get(), &matcher_),
+ receive_stream_(receiver,
+ config,
+ &send_stream_,
+ /*chosen_stream=*/0,
+ receiver->transport_.get(),
+ &matcher_) {}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/scenario/video_stream.h b/third_party/libwebrtc/test/scenario/video_stream.h
new file mode 100644
index 0000000000..43c51eab73
--- /dev/null
+++ b/third_party/libwebrtc/test/scenario/video_stream.h
@@ -0,0 +1,138 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef TEST_SCENARIO_VIDEO_STREAM_H_
+#define TEST_SCENARIO_VIDEO_STREAM_H_
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "rtc_base/synchronization/mutex.h"
+#include "test/fake_encoder.h"
+#include "test/fake_videorenderer.h"
+#include "test/frame_generator_capturer.h"
+#include "test/logging/log_writer.h"
+#include "test/scenario/call_client.h"
+#include "test/scenario/column_printer.h"
+#include "test/scenario/network_node.h"
+#include "test/scenario/scenario_config.h"
+#include "test/scenario/video_frame_matcher.h"
+#include "test/test_video_capturer.h"
+
+namespace webrtc {
+namespace test {
+// SendVideoStream provides an interface for changing parameters and retrieving
+// states at run time.
+class SendVideoStream {
+ public:
+ ~SendVideoStream();
+
+ SendVideoStream(const SendVideoStream&) = delete;
+ SendVideoStream& operator=(const SendVideoStream&) = delete;
+
+ void SetCaptureFramerate(int framerate);
+ VideoSendStream::Stats GetStats() const;
+ ColumnPrinter StatsPrinter();
+ void Start();
+ void Stop();
+ void UpdateConfig(std::function<void(VideoStreamConfig*)> modifier);
+ void UpdateActiveLayers(std::vector<bool> active_layers);
+ bool UsingSsrc(uint32_t ssrc) const;
+ bool UsingRtxSsrc(uint32_t ssrc) const;
+
+ private:
+ friend class Scenario;
+ friend class VideoStreamPair;
+ friend class ReceiveVideoStream;
+ // Handles RTCP feedback for this stream.
+ SendVideoStream(CallClient* sender,
+ VideoStreamConfig config,
+ Transport* send_transport,
+ VideoFrameMatcher* matcher);
+
+ Mutex mutex_;
+ std::vector<uint32_t> ssrcs_;
+ std::vector<uint32_t> rtx_ssrcs_;
+ VideoSendStream* send_stream_ = nullptr;
+ CallClient* const sender_;
+ VideoStreamConfig config_ RTC_GUARDED_BY(mutex_);
+ std::unique_ptr<VideoEncoderFactory> encoder_factory_;
+ std::vector<test::FakeEncoder*> fake_encoders_ RTC_GUARDED_BY(mutex_);
+ std::unique_ptr<VideoBitrateAllocatorFactory> bitrate_allocator_factory_;
+ std::unique_ptr<FrameGeneratorCapturer> video_capturer_;
+ std::unique_ptr<ForwardingCapturedFrameTap> frame_tap_;
+ int next_local_network_id_ = 0;
+ int next_remote_network_id_ = 0;
+};
+
+// ReceiveVideoStream represents a video receiver. It can't be used directly.
+class ReceiveVideoStream {
+ public:
+ ~ReceiveVideoStream();
+
+ ReceiveVideoStream(const ReceiveVideoStream&) = delete;
+ ReceiveVideoStream& operator=(const ReceiveVideoStream&) = delete;
+
+ void Start();
+ void Stop();
+ VideoReceiveStreamInterface::Stats GetStats() const;
+
+ private:
+ friend class Scenario;
+ friend class VideoStreamPair;
+ ReceiveVideoStream(CallClient* receiver,
+ VideoStreamConfig config,
+ SendVideoStream* send_stream,
+ size_t chosen_stream,
+ Transport* feedback_transport,
+ VideoFrameMatcher* matcher);
+
+ std::vector<VideoReceiveStreamInterface*> receive_streams_;
+ FlexfecReceiveStream* flecfec_stream_ = nullptr;
+ FakeVideoRenderer fake_renderer_;
+ std::vector<std::unique_ptr<rtc::VideoSinkInterface<VideoFrame>>>
+ render_taps_;
+ CallClient* const receiver_;
+ const VideoStreamConfig config_;
+ std::unique_ptr<VideoDecoderFactory> decoder_factory_;
+};
+
+// VideoStreamPair represents a video streaming session. It can be used to
+// access underlying send and receive classes. It can also be used in calls to
+// the Scenario class.
+class VideoStreamPair {
+ public:
+ ~VideoStreamPair();
+
+ VideoStreamPair(const VideoStreamPair&) = delete;
+ VideoStreamPair& operator=(const VideoStreamPair&) = delete;
+
+ SendVideoStream* send() { return &send_stream_; }
+ ReceiveVideoStream* receive() { return &receive_stream_; }
+ VideoFrameMatcher* matcher() { return &matcher_; }
+
+ private:
+ friend class Scenario;
+ VideoStreamPair(CallClient* sender,
+ CallClient* receiver,
+ VideoStreamConfig config);
+
+ const VideoStreamConfig config_;
+
+ VideoFrameMatcher matcher_;
+ SendVideoStream send_stream_;
+ ReceiveVideoStream receive_stream_;
+};
+
+std::vector<RtpExtension> GetVideoRtpExtensions(const VideoStreamConfig config);
+
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_SCENARIO_VIDEO_STREAM_H_
diff --git a/third_party/libwebrtc/test/scenario/video_stream_unittest.cc b/third_party/libwebrtc/test/scenario/video_stream_unittest.cc
new file mode 100644
index 0000000000..e53af4ef2b
--- /dev/null
+++ b/third_party/libwebrtc/test/scenario/video_stream_unittest.cc
@@ -0,0 +1,322 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include <atomic>
+
+#include "api/test/network_emulation/create_cross_traffic.h"
+#include "api/test/network_emulation/cross_traffic.h"
+#include "test/field_trial.h"
+#include "test/gtest.h"
+#include "test/scenario/scenario.h"
+
+namespace webrtc {
+namespace test {
+namespace {
+using Capture = VideoStreamConfig::Source::Capture;
+using ContentType = VideoStreamConfig::Encoder::ContentType;
+using Codec = VideoStreamConfig::Encoder::Codec;
+using CodecImpl = VideoStreamConfig::Encoder::Implementation;
+} // namespace
+
+TEST(VideoStreamTest, ReceivesFramesFromFileBasedStreams) {
+ TimeDelta kRunTime = TimeDelta::Millis(500);
+ std::vector<int> kFrameRates = {15, 30};
+ std::deque<std::atomic<int>> frame_counts(2);
+ frame_counts[0] = 0;
+ frame_counts[1] = 0;
+ {
+ Scenario s;
+ auto route =
+ s.CreateRoutes(s.CreateClient("caller", CallClientConfig()),
+ {s.CreateSimulationNode(NetworkSimulationConfig())},
+ s.CreateClient("callee", CallClientConfig()),
+ {s.CreateSimulationNode(NetworkSimulationConfig())});
+
+ s.CreateVideoStream(route->forward(), [&](VideoStreamConfig* c) {
+ c->hooks.frame_pair_handlers = {
+ [&](const VideoFramePair&) { frame_counts[0]++; }};
+ c->source.capture = Capture::kVideoFile;
+ c->source.video_file.name = "foreman_cif";
+ c->source.video_file.width = 352;
+ c->source.video_file.height = 288;
+ c->source.framerate = kFrameRates[0];
+ c->encoder.implementation = CodecImpl::kSoftware;
+ c->encoder.codec = Codec::kVideoCodecVP8;
+ });
+ s.CreateVideoStream(route->forward(), [&](VideoStreamConfig* c) {
+ c->hooks.frame_pair_handlers = {
+ [&](const VideoFramePair&) { frame_counts[1]++; }};
+ c->source.capture = Capture::kImageSlides;
+ c->source.slides.images.crop.width = 320;
+ c->source.slides.images.crop.height = 240;
+ c->source.framerate = kFrameRates[1];
+ c->encoder.implementation = CodecImpl::kSoftware;
+ c->encoder.codec = Codec::kVideoCodecVP9;
+ });
+ s.RunFor(kRunTime);
+ }
+ std::vector<int> expected_counts;
+ for (int fps : kFrameRates)
+ expected_counts.push_back(
+ static_cast<int>(kRunTime.seconds<double>() * fps * 0.8));
+
+ EXPECT_GE(frame_counts[0], expected_counts[0]);
+ EXPECT_GE(frame_counts[1], expected_counts[1]);
+}
+
+TEST(VideoStreamTest, ReceivesVp8SimulcastFrames) {
+ TimeDelta kRunTime = TimeDelta::Millis(500);
+ int kFrameRate = 30;
+
+ std::deque<std::atomic<int>> frame_counts(3);
+ frame_counts[0] = 0;
+ frame_counts[1] = 0;
+ frame_counts[2] = 0;
+ {
+ Scenario s;
+ auto route =
+ s.CreateRoutes(s.CreateClient("caller", CallClientConfig()),
+ {s.CreateSimulationNode(NetworkSimulationConfig())},
+ s.CreateClient("callee", CallClientConfig()),
+ {s.CreateSimulationNode(NetworkSimulationConfig())});
+ s.CreateVideoStream(route->forward(), [&](VideoStreamConfig* c) {
+ // TODO(srte): Replace with code checking for all simulcast streams when
+ // there's a hook available for that.
+ c->hooks.frame_pair_handlers = {[&](const VideoFramePair& info) {
+ frame_counts[info.layer_id]++;
+ RTC_DCHECK(info.decoded);
+ printf("%i: [%3i->%3i, %i], %i->%i, \n", info.layer_id, info.capture_id,
+ info.decode_id, info.repeated, info.captured->width(),
+ info.decoded->width());
+ }};
+ c->source.framerate = kFrameRate;
+ // The resolution must be high enough to allow smaller layers to be
+ // created.
+ c->source.generator.width = 1024;
+ c->source.generator.height = 768;
+ c->encoder.implementation = CodecImpl::kSoftware;
+ c->encoder.codec = Codec::kVideoCodecVP8;
+ // Enable simulcast.
+ c->encoder.simulcast_streams = {webrtc::ScalabilityMode::kL1T1,
+ webrtc::ScalabilityMode::kL1T1,
+ webrtc::ScalabilityMode::kL1T1};
+
+ });
+ s.RunFor(kRunTime);
+ }
+
+ // Using high error margin to avoid flakyness.
+ const int kExpectedCount =
+ static_cast<int>(kRunTime.seconds<double>() * kFrameRate * 0.5);
+
+ EXPECT_GE(frame_counts[0], kExpectedCount);
+ EXPECT_GE(frame_counts[1], kExpectedCount);
+ EXPECT_GE(frame_counts[2], kExpectedCount);
+}
+
+TEST(VideoStreamTest, SendsNacksOnLoss) {
+ Scenario s;
+ auto route =
+ s.CreateRoutes(s.CreateClient("caller", CallClientConfig()),
+ {s.CreateSimulationNode([](NetworkSimulationConfig* c) {
+ c->loss_rate = 0.2;
+ })},
+ s.CreateClient("callee", CallClientConfig()),
+ {s.CreateSimulationNode(NetworkSimulationConfig())});
+ // NACK retransmissions are enabled by default.
+ auto video = s.CreateVideoStream(route->forward(), VideoStreamConfig());
+ s.RunFor(TimeDelta::Seconds(1));
+ int retransmit_packets = 0;
+ VideoSendStream::Stats stats;
+ route->first()->SendTask([&]() { stats = video->send()->GetStats(); });
+ for (const auto& substream : stats.substreams) {
+ retransmit_packets += substream.second.rtp_stats.retransmitted.packets;
+ }
+ EXPECT_GT(retransmit_packets, 0);
+}
+
+TEST(VideoStreamTest, SendsFecWithUlpFec) {
+ Scenario s;
+ auto route =
+ s.CreateRoutes(s.CreateClient("caller", CallClientConfig()),
+ {s.CreateSimulationNode([](NetworkSimulationConfig* c) {
+ c->loss_rate = 0.1;
+ c->delay = TimeDelta::Millis(100);
+ })},
+ s.CreateClient("callee", CallClientConfig()),
+ {s.CreateSimulationNode(NetworkSimulationConfig())});
+ auto video = s.CreateVideoStream(route->forward(), [&](VideoStreamConfig* c) {
+ // We do not allow NACK+ULPFEC for generic codec, using VP8.
+ c->encoder.codec = VideoStreamConfig::Encoder::Codec::kVideoCodecVP8;
+ c->stream.use_ulpfec = true;
+ });
+ s.RunFor(TimeDelta::Seconds(5));
+ VideoSendStream::Stats video_stats;
+ route->first()->SendTask([&]() { video_stats = video->send()->GetStats(); });
+ EXPECT_GT(video_stats.substreams.begin()->second.rtp_stats.fec.packets, 0u);
+}
+TEST(VideoStreamTest, SendsFecWithFlexFec) {
+ Scenario s;
+ auto route =
+ s.CreateRoutes(s.CreateClient("caller", CallClientConfig()),
+ {s.CreateSimulationNode([](NetworkSimulationConfig* c) {
+ c->loss_rate = 0.1;
+ c->delay = TimeDelta::Millis(100);
+ })},
+ s.CreateClient("callee", CallClientConfig()),
+ {s.CreateSimulationNode(NetworkSimulationConfig())});
+ auto video = s.CreateVideoStream(route->forward(), [&](VideoStreamConfig* c) {
+ c->stream.use_flexfec = true;
+ });
+ s.RunFor(TimeDelta::Seconds(5));
+ VideoSendStream::Stats video_stats;
+ route->first()->SendTask([&]() { video_stats = video->send()->GetStats(); });
+ EXPECT_GT(video_stats.substreams.begin()->second.rtp_stats.fec.packets, 0u);
+}
+
+TEST(VideoStreamTest, ResolutionAdaptsToAvailableBandwidth) {
+ // Declared before scenario to avoid use after free.
+ std::atomic<size_t> num_qvga_frames_(0);
+ std::atomic<size_t> num_vga_frames_(0);
+
+ Scenario s;
+ // Link has enough capacity for VGA.
+ NetworkSimulationConfig net_conf;
+ net_conf.bandwidth = DataRate::KilobitsPerSec(800);
+ net_conf.delay = TimeDelta::Millis(50);
+ auto* client = s.CreateClient("send", [&](CallClientConfig* c) {
+ c->transport.rates.start_rate = DataRate::KilobitsPerSec(800);
+ });
+ auto send_net = {s.CreateSimulationNode(net_conf)};
+ auto ret_net = {s.CreateSimulationNode(net_conf)};
+ auto* route = s.CreateRoutes(
+ client, send_net, s.CreateClient("return", CallClientConfig()), ret_net);
+
+ s.CreateVideoStream(route->forward(), [&](VideoStreamConfig* c) {
+ c->hooks.frame_pair_handlers = {[&](const VideoFramePair& info) {
+ if (info.decoded->width() == 640) {
+ ++num_vga_frames_;
+ } else if (info.decoded->width() == 320) {
+ ++num_qvga_frames_;
+ } else {
+ ADD_FAILURE() << "Unexpected resolution: " << info.decoded->width();
+ }
+ }};
+ c->source.framerate = 30;
+ // The resolution must be high enough to allow smaller layers to be
+ // created.
+ c->source.generator.width = 640;
+ c->source.generator.height = 480;
+ c->encoder.implementation = CodecImpl::kSoftware;
+ c->encoder.codec = Codec::kVideoCodecVP9;
+ // Enable SVC.
+ c->encoder.simulcast_streams = {webrtc::ScalabilityMode::kL2T1};
+ });
+
+ // Run for a few seconds, until streams have stabilized,
+ // check that we are sending VGA.
+ s.RunFor(TimeDelta::Seconds(5));
+ EXPECT_GT(num_vga_frames_, 0u);
+
+ // Trigger cross traffic, run until we have seen 3 consecutive
+ // seconds with no VGA frames due to reduced available bandwidth.
+ auto cross_traffic = s.net()->StartCrossTraffic(CreateFakeTcpCrossTraffic(
+ s.net()->CreateRoute(send_net), s.net()->CreateRoute(ret_net),
+ FakeTcpConfig()));
+
+ int num_seconds_without_vga = 0;
+ int num_iterations = 0;
+ do {
+ ASSERT_LE(++num_iterations, 100);
+ num_qvga_frames_ = 0;
+ num_vga_frames_ = 0;
+ s.RunFor(TimeDelta::Seconds(1));
+ if (num_qvga_frames_ > 0 && num_vga_frames_ == 0) {
+ ++num_seconds_without_vga;
+ } else {
+ num_seconds_without_vga = 0;
+ }
+ } while (num_seconds_without_vga < 3);
+
+ // Stop cross traffic, make sure we recover and get VGA frames agian.
+ s.net()->StopCrossTraffic(cross_traffic);
+ num_qvga_frames_ = 0;
+ num_vga_frames_ = 0;
+
+ s.RunFor(TimeDelta::Seconds(40));
+ EXPECT_GT(num_qvga_frames_, 0u);
+ EXPECT_GT(num_vga_frames_, 0u);
+}
+
+TEST(VideoStreamTest, SuspendsBelowMinBitrate) {
+ const DataRate kMinVideoBitrate = DataRate::KilobitsPerSec(30);
+
+ // Declared before scenario to avoid use after free.
+ std::atomic<Timestamp> last_frame_timestamp(Timestamp::MinusInfinity());
+
+ Scenario s;
+ NetworkSimulationConfig net_config;
+ net_config.bandwidth = kMinVideoBitrate * 4;
+ net_config.delay = TimeDelta::Millis(10);
+ auto* client = s.CreateClient("send", [&](CallClientConfig* c) {
+ // Min transmit rate needs to be lower than kMinVideoBitrate for this test
+ // to make sense.
+ c->transport.rates.min_rate = kMinVideoBitrate / 2;
+ c->transport.rates.start_rate = kMinVideoBitrate;
+ c->transport.rates.max_rate = kMinVideoBitrate * 2;
+ });
+ auto send_net = s.CreateMutableSimulationNode(
+ [&](NetworkSimulationConfig* c) { *c = net_config; });
+ auto ret_net = {s.CreateSimulationNode(net_config)};
+ auto* route =
+ s.CreateRoutes(client, {send_net->node()},
+ s.CreateClient("return", CallClientConfig()), ret_net);
+
+ s.CreateVideoStream(route->forward(), [&](VideoStreamConfig* c) {
+ c->hooks.frame_pair_handlers = {[&](const VideoFramePair& pair) {
+ if (pair.repeated == 0) {
+ last_frame_timestamp = pair.capture_time;
+ }
+ }};
+ c->source.framerate = 30;
+ c->source.generator.width = 320;
+ c->source.generator.height = 180;
+ c->encoder.implementation = CodecImpl::kFake;
+ c->encoder.codec = Codec::kVideoCodecVP8;
+ c->encoder.min_data_rate = kMinVideoBitrate;
+ c->encoder.suspend_below_min_bitrate = true;
+ c->stream.pad_to_rate = kMinVideoBitrate;
+ });
+
+ // Run for a few seconds, check we have received at least one frame.
+ s.RunFor(TimeDelta::Seconds(2));
+ EXPECT_TRUE(last_frame_timestamp.load().IsFinite());
+
+ // Degrade network to below min bitrate.
+ send_net->UpdateConfig([&](NetworkSimulationConfig* c) {
+ c->bandwidth = kMinVideoBitrate * 0.9;
+ });
+
+ // Run for 20s, verify that no frames arrive that were captured after the
+ // first five seconds, allowing some margin for BWE backoff to trigger and
+ // packets already in the pipeline to potentially arrive.
+ s.RunFor(TimeDelta::Seconds(20));
+ EXPECT_GT(s.Now() - last_frame_timestamp, TimeDelta::Seconds(15));
+
+ // Relax the network constraints and run for a while more, verify that we
+ // start receiving frames again.
+ send_net->UpdateConfig(
+ [&](NetworkSimulationConfig* c) { c->bandwidth = kMinVideoBitrate * 4; });
+ last_frame_timestamp = Timestamp::MinusInfinity();
+ s.RunFor(TimeDelta::Seconds(15));
+ EXPECT_TRUE(last_frame_timestamp.load().IsFinite());
+}
+
+} // namespace test
+} // namespace webrtc