summaryrefslogtreecommitdiffstats
path: root/third_party/libwebrtc/video/end_to_end_tests
diff options
context:
space:
mode:
Diffstat (limited to 'third_party/libwebrtc/video/end_to_end_tests')
-rw-r--r--third_party/libwebrtc/video/end_to_end_tests/bandwidth_tests.cc402
-rw-r--r--third_party/libwebrtc/video/end_to_end_tests/call_operation_tests.cc195
-rw-r--r--third_party/libwebrtc/video/end_to_end_tests/codec_tests.cc288
-rw-r--r--third_party/libwebrtc/video/end_to_end_tests/config_tests.cc113
-rw-r--r--third_party/libwebrtc/video/end_to_end_tests/extended_reports_tests.cc264
-rw-r--r--third_party/libwebrtc/video/end_to_end_tests/fec_tests.cc502
-rw-r--r--third_party/libwebrtc/video/end_to_end_tests/frame_encryption_tests.cc91
-rw-r--r--third_party/libwebrtc/video/end_to_end_tests/histogram_tests.cc317
-rw-r--r--third_party/libwebrtc/video/end_to_end_tests/multi_codec_receive_tests.cc291
-rw-r--r--third_party/libwebrtc/video/end_to_end_tests/multi_stream_tester.cc180
-rw-r--r--third_party/libwebrtc/video/end_to_end_tests/multi_stream_tester.h64
-rw-r--r--third_party/libwebrtc/video/end_to_end_tests/multi_stream_tests.cc92
-rw-r--r--third_party/libwebrtc/video/end_to_end_tests/network_state_tests.cc428
-rw-r--r--third_party/libwebrtc/video/end_to_end_tests/resolution_bitrate_limits_tests.cc481
-rw-r--r--third_party/libwebrtc/video/end_to_end_tests/retransmission_tests.cc513
-rw-r--r--third_party/libwebrtc/video/end_to_end_tests/rtp_rtcp_tests.cc551
-rw-r--r--third_party/libwebrtc/video/end_to_end_tests/ssrc_tests.cc325
-rw-r--r--third_party/libwebrtc/video/end_to_end_tests/stats_tests.cc733
-rw-r--r--third_party/libwebrtc/video/end_to_end_tests/transport_feedback_tests.cc493
19 files changed, 6323 insertions, 0 deletions
diff --git a/third_party/libwebrtc/video/end_to_end_tests/bandwidth_tests.cc b/third_party/libwebrtc/video/end_to_end_tests/bandwidth_tests.cc
new file mode 100644
index 0000000000..d6610a8ec2
--- /dev/null
+++ b/third_party/libwebrtc/video/end_to_end_tests/bandwidth_tests.cc
@@ -0,0 +1,402 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <memory>
+
+#include "api/task_queue/task_queue_base.h"
+#include "api/test/simulated_network.h"
+#include "api/units/time_delta.h"
+#include "api/video/builtin_video_bitrate_allocator_factory.h"
+#include "api/video/video_bitrate_allocation.h"
+#include "call/fake_network_pipe.h"
+#include "call/simulated_network.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h"
+#include "rtc_base/rate_limiter.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "rtc_base/task_queue_for_test.h"
+#include "system_wrappers/include/sleep.h"
+#include "test/call_test.h"
+#include "test/fake_encoder.h"
+#include "test/field_trial.h"
+#include "test/gtest.h"
+#include "test/rtcp_packet_parser.h"
+#include "test/rtp_rtcp_observer.h"
+#include "test/video_encoder_proxy_factory.h"
+
+namespace webrtc {
+namespace {
+enum : int { // The first valid value is 1.
+ kAbsSendTimeExtensionId = 1,
+ kTransportSequenceNumberId,
+};
+} // namespace
+
+class BandwidthEndToEndTest : public test::CallTest {
+ public:
+ BandwidthEndToEndTest() = default;
+};
+
+TEST_F(BandwidthEndToEndTest, ReceiveStreamSendsRemb) {
+ class RembObserver : public test::EndToEndTest {
+ public:
+ RembObserver() : EndToEndTest(kDefaultTimeout) {}
+
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStreamInterface::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
+ send_config->rtp.extensions.clear();
+ send_config->rtp.extensions.push_back(
+ RtpExtension(RtpExtension::kAbsSendTimeUri, kAbsSendTimeExtensionId));
+ }
+
+ Action OnReceiveRtcp(const uint8_t* packet, size_t length) override {
+ test::RtcpPacketParser parser;
+ EXPECT_TRUE(parser.Parse(packet, length));
+
+ if (parser.remb()->num_packets() > 0) {
+ EXPECT_EQ(kReceiverLocalVideoSsrc, parser.remb()->sender_ssrc());
+ EXPECT_LT(0U, parser.remb()->bitrate_bps());
+ EXPECT_EQ(1U, parser.remb()->ssrcs().size());
+ EXPECT_EQ(kVideoSendSsrcs[0], parser.remb()->ssrcs()[0]);
+ observation_complete_.Set();
+ }
+
+ return SEND_PACKET;
+ }
+ void PerformTest() override {
+ EXPECT_TRUE(Wait()) << "Timed out while waiting for a "
+ "receiver RTCP REMB packet to be "
+ "sent.";
+ }
+ } test;
+
+ RunBaseTest(&test);
+}
+
+class BandwidthStatsTest : public test::EndToEndTest {
+ public:
+ BandwidthStatsTest(bool send_side_bwe, TaskQueueBase* task_queue)
+ : EndToEndTest(test::CallTest::kDefaultTimeout),
+ sender_call_(nullptr),
+ receiver_call_(nullptr),
+ has_seen_pacer_delay_(false),
+ send_side_bwe_(send_side_bwe),
+ task_queue_(task_queue) {}
+
+ ~BandwidthStatsTest() override {
+ // Block until all already posted tasks run to avoid races when such task
+ // accesses `this`.
+ SendTask(task_queue_, [] {});
+ }
+
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStreamInterface::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
+ send_config->rtp.extensions.clear();
+ if (!send_side_bwe_) {
+ send_config->rtp.extensions.push_back(
+ RtpExtension(RtpExtension::kAbsSendTimeUri, kAbsSendTimeExtensionId));
+ } else {
+ send_config->rtp.extensions.push_back(
+ RtpExtension(RtpExtension::kTransportSequenceNumberUri,
+ kTransportSequenceNumberId));
+ }
+
+ // Force a too high encoder bitrate to make sure we get pacer delay.
+ encoder_config->number_of_streams = 1;
+ encoder_config->max_bitrate_bps = kMaxBitrateBps * 2;
+ encoder_config->simulcast_layers[0].min_bitrate_bps = kMaxBitrateBps * 2;
+ encoder_config->simulcast_layers[0].target_bitrate_bps = kMaxBitrateBps * 2;
+ encoder_config->simulcast_layers[0].max_bitrate_bps = kMaxBitrateBps * 2;
+ }
+
+ void ModifySenderBitrateConfig(BitrateConstraints* bitrate_config) override {
+ bitrate_config->max_bitrate_bps = kMaxBitrateBps;
+ }
+
+ // Called on the pacer thread.
+ Action OnSendRtp(const uint8_t* packet, size_t length) override {
+ // Stats need to be fetched on the thread where the caller objects were
+ // constructed.
+ task_queue_->PostTask([this]() {
+ if (!sender_call_ || !receiver_call_) {
+ return;
+ }
+
+ Call::Stats sender_stats = sender_call_->GetStats();
+ if (!has_seen_pacer_delay_) {
+ has_seen_pacer_delay_ = sender_stats.pacer_delay_ms > 0;
+ }
+
+ if (sender_stats.send_bandwidth_bps > 0 && has_seen_pacer_delay_) {
+ Call::Stats receiver_stats = receiver_call_->GetStats();
+ if (send_side_bwe_ || receiver_stats.recv_bandwidth_bps > 0) {
+ observation_complete_.Set();
+ }
+ }
+ });
+
+ return SEND_PACKET;
+ }
+
+ void OnCallsCreated(Call* sender_call, Call* receiver_call) override {
+ sender_call_ = sender_call;
+ receiver_call_ = receiver_call;
+ }
+
+ void OnStreamsStopped() override {
+ sender_call_ = nullptr;
+ receiver_call_ = nullptr;
+ }
+
+ void PerformTest() override {
+ EXPECT_TRUE(Wait()) << "Timed out while waiting for "
+ "non-zero bandwidth stats.";
+ }
+
+ private:
+ static const int kMaxBitrateBps = 3000000;
+ Call* sender_call_;
+ Call* receiver_call_;
+ bool has_seen_pacer_delay_;
+ const bool send_side_bwe_;
+ TaskQueueBase* const task_queue_;
+};
+
+TEST_F(BandwidthEndToEndTest, VerifySendSideBweStats) {
+ BandwidthStatsTest test(true, task_queue());
+ RunBaseTest(&test);
+}
+
+TEST_F(BandwidthEndToEndTest, VerifyRecvSideBweStats) {
+ BandwidthStatsTest test(false, task_queue());
+ RunBaseTest(&test);
+}
+
+// Verifies that it's possible to limit the send BWE by sending a REMB.
+// This is verified by allowing the send BWE to ramp-up to >1000 kbps,
+// then have the test generate a REMB of 500 kbps and verify that the send BWE
+// is reduced to exactly 500 kbps. Then a REMB of 1000 kbps is generated and the
+// test verifies that the send BWE ramps back up to exactly 1000 kbps.
+TEST_F(BandwidthEndToEndTest, RembWithSendSideBwe) {
+ class BweObserver : public test::EndToEndTest {
+ public:
+ explicit BweObserver(TaskQueueBase* task_queue)
+ : EndToEndTest(kDefaultTimeout),
+ sender_call_(nullptr),
+ clock_(Clock::GetRealTimeClock()),
+ sender_ssrc_(0),
+ remb_bitrate_bps_(1000000),
+ state_(kWaitForFirstRampUp),
+ retransmission_rate_limiter_(clock_, 1000),
+ task_queue_(task_queue) {}
+
+ void OnStreamsStopped() override { rtp_rtcp_ = nullptr; }
+
+ void ModifySenderBitrateConfig(
+ BitrateConstraints* bitrate_config) override {
+ // Set a high start bitrate to reduce the test completion time.
+ bitrate_config->start_bitrate_bps = remb_bitrate_bps_;
+ }
+
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStreamInterface::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
+ ASSERT_EQ(1u, send_config->rtp.ssrcs.size());
+ sender_ssrc_ = send_config->rtp.ssrcs[0];
+
+ encoder_config->max_bitrate_bps = 2000000;
+
+ ASSERT_EQ(1u, receive_configs->size());
+ remb_sender_local_ssrc_ = (*receive_configs)[0].rtp.local_ssrc;
+ remb_sender_remote_ssrc_ = (*receive_configs)[0].rtp.remote_ssrc;
+ }
+
+ void OnCallsCreated(Call* sender_call, Call* receiver_call) override {
+ RTC_DCHECK(sender_call);
+ sender_call_ = sender_call;
+ task_queue_->PostTask([this]() { PollStats(); });
+ }
+
+ void OnTransportCreated(
+ test::PacketTransport* /*to_receiver*/,
+ SimulatedNetworkInterface* /*sender_network*/,
+ test::PacketTransport* to_sender,
+ SimulatedNetworkInterface* /*receiver_network*/) override {
+ RtpRtcpInterface::Configuration config;
+ config.receiver_only = true;
+ config.clock = clock_;
+ config.outgoing_transport = to_sender;
+ config.retransmission_rate_limiter = &retransmission_rate_limiter_;
+ config.local_media_ssrc = remb_sender_local_ssrc_;
+
+ rtp_rtcp_ = ModuleRtpRtcpImpl2::Create(config);
+ rtp_rtcp_->SetRemoteSSRC(remb_sender_remote_ssrc_);
+ rtp_rtcp_->SetRTCPStatus(RtcpMode::kReducedSize);
+ }
+
+ void PollStats() {
+ Call::Stats stats = sender_call_->GetStats();
+ switch (state_) {
+ case kWaitForFirstRampUp:
+ if (stats.send_bandwidth_bps >= remb_bitrate_bps_) {
+ state_ = kWaitForRemb;
+ remb_bitrate_bps_ /= 2;
+ rtp_rtcp_->SetRemb(
+ remb_bitrate_bps_,
+ std::vector<uint32_t>(&sender_ssrc_, &sender_ssrc_ + 1));
+ rtp_rtcp_->SendRTCP(kRtcpRr);
+ }
+ break;
+
+ case kWaitForRemb:
+ if (stats.send_bandwidth_bps == remb_bitrate_bps_) {
+ state_ = kWaitForSecondRampUp;
+ remb_bitrate_bps_ *= 2;
+ rtp_rtcp_->SetRemb(
+ remb_bitrate_bps_,
+ std::vector<uint32_t>(&sender_ssrc_, &sender_ssrc_ + 1));
+ rtp_rtcp_->SendRTCP(kRtcpRr);
+ }
+ break;
+
+ case kWaitForSecondRampUp:
+ if (stats.send_bandwidth_bps == remb_bitrate_bps_) {
+ observation_complete_.Set();
+ return;
+ }
+ break;
+ }
+
+ task_queue_->PostDelayedTask([this] { PollStats(); },
+ TimeDelta::Seconds(1));
+ }
+
+ void PerformTest() override {
+ EXPECT_TRUE(Wait())
+ << "Timed out while waiting for bitrate to change according to REMB.";
+ }
+
+ private:
+ enum TestState { kWaitForFirstRampUp, kWaitForRemb, kWaitForSecondRampUp };
+
+ Call* sender_call_;
+ Clock* const clock_;
+ uint32_t sender_ssrc_;
+ uint32_t remb_sender_local_ssrc_ = 0;
+ uint32_t remb_sender_remote_ssrc_ = 0;
+ int remb_bitrate_bps_;
+ std::unique_ptr<ModuleRtpRtcpImpl2> rtp_rtcp_;
+ TestState state_;
+ RateLimiter retransmission_rate_limiter_;
+ TaskQueueBase* const task_queue_;
+ } test(task_queue());
+
+ RunBaseTest(&test);
+}
+
+TEST_F(BandwidthEndToEndTest, ReportsSetEncoderRates) {
+ // If these fields trial are on, we get lower bitrates than expected by this
+ // test, due to the packetization overhead and encoder pushback.
+ webrtc::test::ScopedFieldTrials field_trials(
+ std::string(field_trial::GetFieldTrialString()) +
+ "WebRTC-VideoRateControl/bitrate_adjuster:false/");
+ class EncoderRateStatsTest : public test::EndToEndTest,
+ public test::FakeEncoder {
+ public:
+ explicit EncoderRateStatsTest(TaskQueueBase* task_queue)
+ : EndToEndTest(kDefaultTimeout),
+ FakeEncoder(Clock::GetRealTimeClock()),
+ task_queue_(task_queue),
+ send_stream_(nullptr),
+ encoder_factory_(this),
+ bitrate_allocator_factory_(
+ CreateBuiltinVideoBitrateAllocatorFactory()),
+ bitrate_kbps_(0) {}
+
+ void OnVideoStreamsCreated(VideoSendStream* send_stream,
+ const std::vector<VideoReceiveStreamInterface*>&
+ receive_streams) override {
+ send_stream_ = send_stream;
+ }
+
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStreamInterface::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
+ send_config->encoder_settings.encoder_factory = &encoder_factory_;
+ send_config->encoder_settings.bitrate_allocator_factory =
+ bitrate_allocator_factory_.get();
+ RTC_DCHECK_EQ(1, encoder_config->number_of_streams);
+ }
+
+ void SetRates(const RateControlParameters& parameters) override {
+ // Make sure not to trigger on any default zero bitrates.
+ if (parameters.bitrate.get_sum_bps() == 0)
+ return;
+ MutexLock lock(&mutex_);
+ bitrate_kbps_ = parameters.bitrate.get_sum_kbps();
+ observation_complete_.Set();
+ }
+
+ void PerformTest() override {
+ ASSERT_TRUE(Wait())
+ << "Timed out while waiting for encoder SetRates() call.";
+
+ SendTask(task_queue_, [this]() {
+ WaitForEncoderTargetBitrateMatchStats();
+ send_stream_->Stop();
+ WaitForStatsReportZeroTargetBitrate();
+ send_stream_->Start();
+ WaitForEncoderTargetBitrateMatchStats();
+ });
+ }
+
+ void WaitForEncoderTargetBitrateMatchStats() {
+ for (int i = 0; i < kDefaultTimeout.ms(); ++i) {
+ VideoSendStream::Stats stats = send_stream_->GetStats();
+ {
+ MutexLock lock(&mutex_);
+ if ((stats.target_media_bitrate_bps + 500) / 1000 ==
+ static_cast<int>(bitrate_kbps_)) {
+ return;
+ }
+ }
+ SleepMs(1);
+ }
+ FAIL()
+ << "Timed out waiting for stats reporting the currently set bitrate.";
+ }
+
+ void WaitForStatsReportZeroTargetBitrate() {
+ for (int i = 0; i < kDefaultTimeout.ms(); ++i) {
+ if (send_stream_->GetStats().target_media_bitrate_bps == 0) {
+ return;
+ }
+ SleepMs(1);
+ }
+ FAIL() << "Timed out waiting for stats reporting zero bitrate.";
+ }
+
+ private:
+ TaskQueueBase* const task_queue_;
+ Mutex mutex_;
+ VideoSendStream* send_stream_;
+ test::VideoEncoderProxyFactory encoder_factory_;
+ std::unique_ptr<VideoBitrateAllocatorFactory> bitrate_allocator_factory_;
+ uint32_t bitrate_kbps_ RTC_GUARDED_BY(mutex_);
+ } test(task_queue());
+
+ RunBaseTest(&test);
+}
+} // namespace webrtc
diff --git a/third_party/libwebrtc/video/end_to_end_tests/call_operation_tests.cc b/third_party/libwebrtc/video/end_to_end_tests/call_operation_tests.cc
new file mode 100644
index 0000000000..f5b32388b1
--- /dev/null
+++ b/third_party/libwebrtc/video/end_to_end_tests/call_operation_tests.cc
@@ -0,0 +1,195 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <memory>
+
+#include "api/test/create_frame_generator.h"
+#include "api/test/frame_generator_interface.h"
+#include "api/test/simulated_network.h"
+#include "call/fake_network_pipe.h"
+#include "call/simulated_network.h"
+#include "rtc_base/task_queue_for_test.h"
+#include "system_wrappers/include/sleep.h"
+#include "test/call_test.h"
+#include "test/field_trial.h"
+#include "test/frame_forwarder.h"
+#include "test/gtest.h"
+#include "test/null_transport.h"
+
+namespace webrtc {
+
+class CallOperationEndToEndTest : public test::CallTest {};
+
+TEST_F(CallOperationEndToEndTest, ReceiverCanBeStartedTwice) {
+ CreateCalls();
+
+ test::NullTransport transport;
+ CreateSendConfig(1, 0, 0, &transport);
+ CreateMatchingReceiveConfigs(&transport);
+
+ CreateVideoStreams();
+
+ video_receive_streams_[0]->Start();
+ video_receive_streams_[0]->Start();
+
+ DestroyStreams();
+}
+
+TEST_F(CallOperationEndToEndTest, ReceiverCanBeStoppedTwice) {
+ CreateCalls();
+
+ test::NullTransport transport;
+ CreateSendConfig(1, 0, 0, &transport);
+ CreateMatchingReceiveConfigs(&transport);
+
+ CreateVideoStreams();
+
+ video_receive_streams_[0]->Stop();
+ video_receive_streams_[0]->Stop();
+
+ DestroyStreams();
+}
+
+TEST_F(CallOperationEndToEndTest, ReceiverCanBeStoppedAndRestarted) {
+ CreateCalls();
+
+ test::NullTransport transport;
+ CreateSendConfig(1, 0, 0, &transport);
+ CreateMatchingReceiveConfigs(&transport);
+
+ CreateVideoStreams();
+
+ video_receive_streams_[0]->Stop();
+ video_receive_streams_[0]->Start();
+ video_receive_streams_[0]->Stop();
+
+ DestroyStreams();
+}
+
+TEST_F(CallOperationEndToEndTest, RendersSingleDelayedFrame) {
+ static const int kWidth = 320;
+ static const int kHeight = 240;
+ // This constant is chosen to be higher than the timeout in the video_render
+ // module. This makes sure that frames aren't dropped if there are no other
+ // frames in the queue.
+ static const int kRenderDelayMs = 1000;
+
+ class Renderer : public rtc::VideoSinkInterface<VideoFrame> {
+ public:
+ void OnFrame(const VideoFrame& video_frame) override {
+ SleepMs(kRenderDelayMs);
+ event_.Set();
+ }
+
+ bool Wait() { return event_.Wait(kDefaultTimeout); }
+
+ rtc::Event event_;
+ } renderer;
+
+ test::FrameForwarder frame_forwarder;
+
+ SendTask(
+ task_queue(), [this, &renderer, &frame_forwarder]() {
+ CreateCalls();
+ CreateSendTransport(BuiltInNetworkBehaviorConfig(),
+ /*observer=*/nullptr);
+
+ CreateReceiveTransport(BuiltInNetworkBehaviorConfig(),
+ /*observer=*/nullptr);
+ CreateSendConfig(1, 0, 0);
+ CreateMatchingReceiveConfigs();
+
+ video_receive_configs_[0].renderer = &renderer;
+
+ CreateVideoStreams();
+ Start();
+
+ // Create frames that are smaller than the send width/height, this is
+ // done to check that the callbacks are done after processing video.
+ std::unique_ptr<test::FrameGeneratorInterface> frame_generator(
+ test::CreateSquareFrameGenerator(kWidth, kHeight, absl::nullopt,
+ absl::nullopt));
+ GetVideoSendStream()->SetSource(
+ &frame_forwarder, DegradationPreference::MAINTAIN_FRAMERATE);
+
+ test::FrameGeneratorInterface::VideoFrameData frame_data =
+ frame_generator->NextFrame();
+ VideoFrame frame = VideoFrame::Builder()
+ .set_video_frame_buffer(frame_data.buffer)
+ .set_update_rect(frame_data.update_rect)
+ .build();
+ frame_forwarder.IncomingCapturedFrame(frame);
+ });
+
+ EXPECT_TRUE(renderer.Wait())
+ << "Timed out while waiting for the frame to render.";
+
+ SendTask(task_queue(), [this]() {
+ Stop();
+ DestroyStreams();
+ DestroyCalls();
+ });
+}
+
+TEST_F(CallOperationEndToEndTest, TransmitsFirstFrame) {
+ class Renderer : public rtc::VideoSinkInterface<VideoFrame> {
+ public:
+ void OnFrame(const VideoFrame& video_frame) override { event_.Set(); }
+
+ bool Wait() { return event_.Wait(kDefaultTimeout); }
+
+ rtc::Event event_;
+ } renderer;
+
+ std::unique_ptr<test::FrameGeneratorInterface> frame_generator;
+ test::FrameForwarder frame_forwarder;
+
+ std::unique_ptr<test::DirectTransport> sender_transport;
+ std::unique_ptr<test::DirectTransport> receiver_transport;
+
+ SendTask(
+ task_queue(), [this, &renderer, &frame_generator, &frame_forwarder]() {
+ CreateCalls();
+ CreateSendTransport(BuiltInNetworkBehaviorConfig(),
+ /*observer=*/nullptr);
+ CreateReceiveTransport(BuiltInNetworkBehaviorConfig(),
+ /*observer=*/nullptr);
+
+ CreateSendConfig(1, 0, 0);
+ CreateMatchingReceiveConfigs();
+ video_receive_configs_[0].renderer = &renderer;
+
+ CreateVideoStreams();
+ Start();
+
+ frame_generator = test::CreateSquareFrameGenerator(
+ kDefaultWidth, kDefaultHeight, absl::nullopt, absl::nullopt);
+ GetVideoSendStream()->SetSource(
+ &frame_forwarder, DegradationPreference::MAINTAIN_FRAMERATE);
+ test::FrameGeneratorInterface::VideoFrameData frame_data =
+ frame_generator->NextFrame();
+ VideoFrame frame = VideoFrame::Builder()
+ .set_video_frame_buffer(frame_data.buffer)
+ .set_update_rect(frame_data.update_rect)
+ .build();
+ frame_forwarder.IncomingCapturedFrame(frame);
+ });
+
+ EXPECT_TRUE(renderer.Wait())
+ << "Timed out while waiting for the frame to render.";
+
+ SendTask(task_queue(), [this]() {
+ Stop();
+ DestroyStreams();
+ DestroyCalls();
+ });
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/video/end_to_end_tests/codec_tests.cc b/third_party/libwebrtc/video/end_to_end_tests/codec_tests.cc
new file mode 100644
index 0000000000..53ec9f5b17
--- /dev/null
+++ b/third_party/libwebrtc/video/end_to_end_tests/codec_tests.cc
@@ -0,0 +1,288 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <memory>
+#include "absl/types/optional.h"
+#include "api/test/video/function_video_encoder_factory.h"
+#include "api/video/color_space.h"
+#include "api/video/video_rotation.h"
+#include "common_video/test/utilities.h"
+#include "media/engine/internal_decoder_factory.h"
+#include "media/engine/internal_encoder_factory.h"
+#include "modules/video_coding/codecs/h264/include/h264.h"
+#include "modules/video_coding/codecs/multiplex/include/multiplex_decoder_adapter.h"
+#include "modules/video_coding/codecs/multiplex/include/multiplex_encoder_adapter.h"
+#include "modules/video_coding/codecs/vp8/include/vp8.h"
+#include "modules/video_coding/codecs/vp9/include/vp9.h"
+#include "test/call_test.h"
+#include "test/encoder_settings.h"
+#include "test/field_trial.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+namespace {
+enum : int { // The first valid value is 1.
+ kColorSpaceExtensionId = 1,
+ kVideoRotationExtensionId,
+};
+} // namespace
+
+class CodecEndToEndTest : public test::CallTest {
+ public:
+ CodecEndToEndTest() {
+ RegisterRtpExtension(
+ RtpExtension(RtpExtension::kColorSpaceUri, kColorSpaceExtensionId));
+ RegisterRtpExtension(RtpExtension(RtpExtension::kVideoRotationUri,
+ kVideoRotationExtensionId));
+ }
+};
+
+class CodecObserver : public test::EndToEndTest,
+ public rtc::VideoSinkInterface<VideoFrame> {
+ public:
+ CodecObserver(int no_frames_to_wait_for,
+ VideoRotation rotation_to_test,
+ absl::optional<ColorSpace> color_space_to_test,
+ const std::string& payload_name,
+ VideoEncoderFactory* encoder_factory,
+ VideoDecoderFactory* decoder_factory)
+ : EndToEndTest(4 * CodecEndToEndTest::kDefaultTimeout),
+ // TODO(hta): This timeout (120 seconds) is excessive.
+ // https://bugs.webrtc.org/6830
+ no_frames_to_wait_for_(no_frames_to_wait_for),
+ expected_rotation_(rotation_to_test),
+ expected_color_space_(color_space_to_test),
+ payload_name_(payload_name),
+ encoder_factory_(encoder_factory),
+ decoder_factory_(decoder_factory),
+ frame_counter_(0) {}
+
+ void PerformTest() override {
+ EXPECT_TRUE(Wait())
+ << "Timed out while waiting for enough frames to be decoded.";
+ }
+
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStreamInterface::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
+ encoder_config->codec_type = PayloadStringToCodecType(payload_name_);
+ send_config->encoder_settings.encoder_factory = encoder_factory_;
+ send_config->rtp.payload_name = payload_name_;
+ send_config->rtp.payload_type = test::CallTest::kVideoSendPayloadType;
+
+ (*receive_configs)[0].renderer = this;
+ (*receive_configs)[0].decoders.resize(1);
+ (*receive_configs)[0].decoders[0].payload_type =
+ send_config->rtp.payload_type;
+ (*receive_configs)[0].decoders[0].video_format =
+ SdpVideoFormat(send_config->rtp.payload_name);
+ (*receive_configs)[0].decoder_factory = decoder_factory_;
+ }
+
+ void OnFrame(const VideoFrame& video_frame) override {
+ EXPECT_EQ(expected_rotation_, video_frame.rotation());
+ // Test only if explicit color space has been specified since otherwise the
+ // color space is codec dependent.
+ if (expected_color_space_) {
+ EXPECT_EQ(expected_color_space_,
+ video_frame.color_space()
+ ? absl::make_optional(*video_frame.color_space())
+ : absl::nullopt);
+ }
+ if (++frame_counter_ == no_frames_to_wait_for_)
+ observation_complete_.Set();
+ }
+
+ void OnFrameGeneratorCapturerCreated(
+ test::FrameGeneratorCapturer* frame_generator_capturer) override {
+ frame_generator_capturer->SetFakeRotation(expected_rotation_);
+ frame_generator_capturer->SetFakeColorSpace(expected_color_space_);
+ }
+
+ private:
+ int no_frames_to_wait_for_;
+ VideoRotation expected_rotation_;
+ absl::optional<ColorSpace> expected_color_space_;
+ std::string payload_name_;
+ VideoEncoderFactory* encoder_factory_;
+ VideoDecoderFactory* decoder_factory_;
+ int frame_counter_;
+};
+
+TEST_F(CodecEndToEndTest, SendsAndReceivesVP8) {
+ test::FunctionVideoEncoderFactory encoder_factory(
+ []() { return VP8Encoder::Create(); });
+ test::FunctionVideoDecoderFactory decoder_factory(
+ []() { return VP8Decoder::Create(); });
+ CodecObserver test(5, kVideoRotation_0, absl::nullopt, "VP8",
+ &encoder_factory, &decoder_factory);
+ RunBaseTest(&test);
+}
+
+TEST_F(CodecEndToEndTest, SendsAndReceivesVP8Rotation90) {
+ test::FunctionVideoEncoderFactory encoder_factory(
+ []() { return VP8Encoder::Create(); });
+ test::FunctionVideoDecoderFactory decoder_factory(
+ []() { return VP8Decoder::Create(); });
+ CodecObserver test(5, kVideoRotation_90, absl::nullopt, "VP8",
+ &encoder_factory, &decoder_factory);
+ RunBaseTest(&test);
+}
+
+#if defined(RTC_ENABLE_VP9)
+TEST_F(CodecEndToEndTest, SendsAndReceivesVP9) {
+ test::FunctionVideoEncoderFactory encoder_factory(
+ []() { return VP9Encoder::Create(); });
+ test::FunctionVideoDecoderFactory decoder_factory(
+ []() { return VP9Decoder::Create(); });
+ CodecObserver test(500, kVideoRotation_0, absl::nullopt, "VP9",
+ &encoder_factory, &decoder_factory);
+ RunBaseTest(&test);
+}
+
+TEST_F(CodecEndToEndTest, SendsAndReceivesVP9VideoRotation90) {
+ test::FunctionVideoEncoderFactory encoder_factory(
+ []() { return VP9Encoder::Create(); });
+ test::FunctionVideoDecoderFactory decoder_factory(
+ []() { return VP9Decoder::Create(); });
+ CodecObserver test(5, kVideoRotation_90, absl::nullopt, "VP9",
+ &encoder_factory, &decoder_factory);
+ RunBaseTest(&test);
+}
+
+TEST_F(CodecEndToEndTest, SendsAndReceivesVP9ExplicitColorSpace) {
+ test::FunctionVideoEncoderFactory encoder_factory(
+ []() { return VP9Encoder::Create(); });
+ test::FunctionVideoDecoderFactory decoder_factory(
+ []() { return VP9Decoder::Create(); });
+ CodecObserver test(5, kVideoRotation_90,
+ CreateTestColorSpace(/*with_hdr_metadata=*/false), "VP9",
+ &encoder_factory, &decoder_factory);
+ RunBaseTest(&test);
+}
+
+TEST_F(CodecEndToEndTest,
+ SendsAndReceivesVP9ExplicitColorSpaceWithHdrMetadata) {
+ test::FunctionVideoEncoderFactory encoder_factory(
+ []() { return VP9Encoder::Create(); });
+ test::FunctionVideoDecoderFactory decoder_factory(
+ []() { return VP9Decoder::Create(); });
+ CodecObserver test(5, kVideoRotation_90,
+ CreateTestColorSpace(/*with_hdr_metadata=*/true), "VP9",
+ &encoder_factory, &decoder_factory);
+ RunBaseTest(&test);
+}
+
+// Mutiplex tests are using VP9 as the underlying implementation.
+TEST_F(CodecEndToEndTest, SendsAndReceivesMultiplex) {
+ InternalEncoderFactory internal_encoder_factory;
+ InternalDecoderFactory internal_decoder_factory;
+ test::FunctionVideoEncoderFactory encoder_factory(
+ [&internal_encoder_factory]() {
+ return std::make_unique<MultiplexEncoderAdapter>(
+ &internal_encoder_factory, SdpVideoFormat(cricket::kVp9CodecName));
+ });
+ test::FunctionVideoDecoderFactory decoder_factory(
+ [&internal_decoder_factory]() {
+ return std::make_unique<MultiplexDecoderAdapter>(
+ &internal_decoder_factory, SdpVideoFormat(cricket::kVp9CodecName));
+ });
+
+ CodecObserver test(5, kVideoRotation_0, absl::nullopt, "multiplex",
+ &encoder_factory, &decoder_factory);
+ RunBaseTest(&test);
+}
+
+TEST_F(CodecEndToEndTest, SendsAndReceivesMultiplexVideoRotation90) {
+ InternalEncoderFactory internal_encoder_factory;
+ InternalDecoderFactory internal_decoder_factory;
+ test::FunctionVideoEncoderFactory encoder_factory(
+ [&internal_encoder_factory]() {
+ return std::make_unique<MultiplexEncoderAdapter>(
+ &internal_encoder_factory, SdpVideoFormat(cricket::kVp9CodecName));
+ });
+ test::FunctionVideoDecoderFactory decoder_factory(
+ [&internal_decoder_factory]() {
+ return std::make_unique<MultiplexDecoderAdapter>(
+ &internal_decoder_factory, SdpVideoFormat(cricket::kVp9CodecName));
+ });
+ CodecObserver test(5, kVideoRotation_90, absl::nullopt, "multiplex",
+ &encoder_factory, &decoder_factory);
+ RunBaseTest(&test);
+}
+
+#endif // defined(RTC_ENABLE_VP9)
+
+#if defined(WEBRTC_USE_H264)
+class EndToEndTestH264 : public test::CallTest,
+ public ::testing::WithParamInterface<std::string> {
+ public:
+ EndToEndTestH264() : field_trial_(GetParam()) {
+ RegisterRtpExtension(RtpExtension(RtpExtension::kVideoRotationUri,
+ kVideoRotationExtensionId));
+ }
+
+ private:
+ test::ScopedFieldTrials field_trial_;
+};
+
+INSTANTIATE_TEST_SUITE_P(
+ SpsPpsIdrIsKeyframe,
+ EndToEndTestH264,
+ ::testing::Values("WebRTC-SpsPpsIdrIsH264Keyframe/Disabled/",
+ "WebRTC-SpsPpsIdrIsH264Keyframe/Enabled/"));
+
+TEST_P(EndToEndTestH264, SendsAndReceivesH264) {
+ test::FunctionVideoEncoderFactory encoder_factory(
+ []() { return H264Encoder::Create(cricket::VideoCodec("H264")); });
+ test::FunctionVideoDecoderFactory decoder_factory(
+ []() { return H264Decoder::Create(); });
+ CodecObserver test(500, kVideoRotation_0, absl::nullopt, "H264",
+ &encoder_factory, &decoder_factory);
+ RunBaseTest(&test);
+}
+
+TEST_P(EndToEndTestH264, SendsAndReceivesH264VideoRotation90) {
+ test::FunctionVideoEncoderFactory encoder_factory(
+ []() { return H264Encoder::Create(cricket::VideoCodec("H264")); });
+ test::FunctionVideoDecoderFactory decoder_factory(
+ []() { return H264Decoder::Create(); });
+ CodecObserver test(5, kVideoRotation_90, absl::nullopt, "H264",
+ &encoder_factory, &decoder_factory);
+ RunBaseTest(&test);
+}
+
+TEST_P(EndToEndTestH264, SendsAndReceivesH264PacketizationMode0) {
+ cricket::VideoCodec codec = cricket::VideoCodec("H264");
+ codec.SetParam(cricket::kH264FmtpPacketizationMode, "0");
+ test::FunctionVideoEncoderFactory encoder_factory(
+ [codec]() { return H264Encoder::Create(codec); });
+ test::FunctionVideoDecoderFactory decoder_factory(
+ []() { return H264Decoder::Create(); });
+ CodecObserver test(500, kVideoRotation_0, absl::nullopt, "H264",
+ &encoder_factory, &decoder_factory);
+ RunBaseTest(&test);
+}
+
+TEST_P(EndToEndTestH264, SendsAndReceivesH264PacketizationMode1) {
+ cricket::VideoCodec codec = cricket::VideoCodec("H264");
+ codec.SetParam(cricket::kH264FmtpPacketizationMode, "1");
+ test::FunctionVideoEncoderFactory encoder_factory(
+ [codec]() { return H264Encoder::Create(codec); });
+ test::FunctionVideoDecoderFactory decoder_factory(
+ []() { return H264Decoder::Create(); });
+ CodecObserver test(500, kVideoRotation_0, absl::nullopt, "H264",
+ &encoder_factory, &decoder_factory);
+ RunBaseTest(&test);
+}
+#endif // defined(WEBRTC_USE_H264)
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/video/end_to_end_tests/config_tests.cc b/third_party/libwebrtc/video/end_to_end_tests/config_tests.cc
new file mode 100644
index 0000000000..7e27448991
--- /dev/null
+++ b/third_party/libwebrtc/video/end_to_end_tests/config_tests.cc
@@ -0,0 +1,113 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <map>
+#include <vector>
+
+#include "api/crypto/crypto_options.h"
+#include "api/rtp_headers.h"
+#include "call/flexfec_receive_stream.h"
+#include "call/rtp_config.h"
+#include "call/video_receive_stream.h"
+#include "call/video_send_stream.h"
+#include "test/call_test.h"
+#include "test/gtest.h"
+#include "test/null_transport.h"
+
+namespace webrtc {
+
+class ConfigEndToEndTest : public test::CallTest {};
+
+namespace {
+void VerifyEmptyNackConfig(const NackConfig& config) {
+ EXPECT_EQ(0, config.rtp_history_ms)
+ << "Enabling NACK requires rtcp-fb: nack negotiation.";
+}
+
+void VerifyEmptyUlpfecConfig(const UlpfecConfig& config) {
+ EXPECT_EQ(-1, config.ulpfec_payload_type)
+ << "Enabling ULPFEC requires rtpmap: ulpfec negotiation.";
+ EXPECT_EQ(-1, config.red_payload_type)
+ << "Enabling ULPFEC requires rtpmap: red negotiation.";
+ EXPECT_EQ(-1, config.red_rtx_payload_type)
+ << "Enabling RTX in ULPFEC requires rtpmap: rtx negotiation.";
+}
+
+void VerifyEmptyFlexfecConfig(const RtpConfig::Flexfec& config) {
+ EXPECT_EQ(-1, config.payload_type)
+ << "Enabling FlexFEC requires rtpmap: flexfec negotiation.";
+ EXPECT_EQ(0U, config.ssrc)
+ << "Enabling FlexFEC requires ssrc-group: FEC-FR negotiation.";
+ EXPECT_TRUE(config.protected_media_ssrcs.empty())
+ << "Enabling FlexFEC requires ssrc-group: FEC-FR negotiation.";
+}
+} // namespace
+
+TEST_F(ConfigEndToEndTest, VerifyDefaultSendConfigParameters) {
+ VideoSendStream::Config default_send_config(nullptr);
+ EXPECT_FALSE(default_send_config.rtp.lntf.enabled)
+ << "Enabling LNTF require rtcp-fb: goog-lntf negotiation.";
+ EXPECT_EQ(0, default_send_config.rtp.nack.rtp_history_ms)
+ << "Enabling NACK require rtcp-fb: nack negotiation.";
+ EXPECT_TRUE(default_send_config.rtp.rtx.ssrcs.empty())
+ << "Enabling RTX requires rtpmap: rtx negotiation.";
+ EXPECT_TRUE(default_send_config.rtp.extensions.empty())
+ << "Enabling RTP extensions require negotiation.";
+ EXPECT_EQ(nullptr, default_send_config.frame_encryptor)
+ << "Enabling Frame Encryption requires a frame encryptor to be attached";
+ EXPECT_FALSE(
+ default_send_config.crypto_options.sframe.require_frame_encryption)
+ << "Enabling Require Frame Encryption means an encryptor must be "
+ "attached";
+
+ VerifyEmptyNackConfig(default_send_config.rtp.nack);
+ VerifyEmptyUlpfecConfig(default_send_config.rtp.ulpfec);
+ VerifyEmptyFlexfecConfig(default_send_config.rtp.flexfec);
+}
+
+TEST_F(ConfigEndToEndTest, VerifyDefaultVideoReceiveConfigParameters) {
+ VideoReceiveStreamInterface::Config default_receive_config(nullptr);
+ EXPECT_EQ(RtcpMode::kCompound, default_receive_config.rtp.rtcp_mode)
+ << "Reduced-size RTCP require rtcp-rsize to be negotiated.";
+ EXPECT_FALSE(default_receive_config.rtp.lntf.enabled)
+ << "Enabling LNTF require rtcp-fb: goog-lntf negotiation.";
+ EXPECT_FALSE(
+ default_receive_config.rtp.rtcp_xr.receiver_reference_time_report)
+ << "RTCP XR settings require rtcp-xr to be negotiated.";
+ EXPECT_EQ(0U, default_receive_config.rtp.rtx_ssrc)
+ << "Enabling RTX requires ssrc-group: FID negotiation";
+ EXPECT_TRUE(default_receive_config.rtp.rtx_associated_payload_types.empty())
+ << "Enabling RTX requires rtpmap: rtx negotiation.";
+ EXPECT_TRUE(default_receive_config.rtp.extensions.empty())
+ << "Enabling RTP extensions require negotiation.";
+ VerifyEmptyNackConfig(default_receive_config.rtp.nack);
+ EXPECT_EQ(-1, default_receive_config.rtp.ulpfec_payload_type)
+ << "Enabling ULPFEC requires rtpmap: ulpfec negotiation.";
+ EXPECT_EQ(-1, default_receive_config.rtp.red_payload_type)
+ << "Enabling ULPFEC requires rtpmap: red negotiation.";
+ EXPECT_EQ(nullptr, default_receive_config.frame_decryptor)
+ << "Enabling Frame Decryption requires a frame decryptor to be attached";
+ EXPECT_FALSE(
+ default_receive_config.crypto_options.sframe.require_frame_encryption)
+ << "Enabling Require Frame Encryption means a decryptor must be attached";
+}
+
+TEST_F(ConfigEndToEndTest, VerifyDefaultFlexfecReceiveConfigParameters) {
+ test::NullTransport rtcp_send_transport;
+ FlexfecReceiveStream::Config default_receive_config(&rtcp_send_transport);
+ EXPECT_EQ(-1, default_receive_config.payload_type)
+ << "Enabling FlexFEC requires rtpmap: flexfec negotiation.";
+ EXPECT_EQ(0U, default_receive_config.rtp.remote_ssrc)
+ << "Enabling FlexFEC requires ssrc-group: FEC-FR negotiation.";
+ EXPECT_TRUE(default_receive_config.protected_media_ssrcs.empty())
+ << "Enabling FlexFEC requires ssrc-group: FEC-FR negotiation.";
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/video/end_to_end_tests/extended_reports_tests.cc b/third_party/libwebrtc/video/end_to_end_tests/extended_reports_tests.cc
new file mode 100644
index 0000000000..2897212e0b
--- /dev/null
+++ b/third_party/libwebrtc/video/end_to_end_tests/extended_reports_tests.cc
@@ -0,0 +1,264 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stddef.h>
+#include <stdint.h>
+
+#include <memory>
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "api/rtp_headers.h"
+#include "api/task_queue/task_queue_base.h"
+#include "api/test/simulated_network.h"
+#include "api/video_codecs/sdp_video_format.h"
+#include "call/call.h"
+#include "call/fake_network_pipe.h"
+#include "call/rtp_config.h"
+#include "call/simulated_network.h"
+#include "call/simulated_packet_receiver.h"
+#include "call/video_receive_stream.h"
+#include "call/video_send_stream.h"
+#include "modules/rtp_rtcp/source/rtcp_packet/dlrr.h"
+#include "modules/rtp_rtcp/source/rtcp_packet/target_bitrate.h"
+#include "rtc_base/event.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "rtc_base/thread_annotations.h"
+#include "system_wrappers/include/clock.h"
+#include "test/call_test.h"
+#include "test/field_trial.h"
+#include "test/gtest.h"
+#include "test/rtcp_packet_parser.h"
+#include "test/rtp_rtcp_observer.h"
+#include "video/config/video_encoder_config.h"
+
+namespace webrtc {
+namespace {
+enum : int { // The first valid value is 1.
+ kColorSpaceExtensionId = 1,
+ kTransportSequenceNumberExtensionId,
+};
+} // namespace
+
+class ExtendedReportsEndToEndTest : public test::CallTest {
+ public:
+ ExtendedReportsEndToEndTest() {
+ RegisterRtpExtension(RtpExtension(RtpExtension::kTransportSequenceNumberUri,
+ kTransportSequenceNumberExtensionId));
+ }
+};
+
+class RtcpXrObserver : public test::EndToEndTest {
+ public:
+ RtcpXrObserver(bool enable_rrtr,
+ bool expect_target_bitrate,
+ bool enable_zero_target_bitrate,
+ VideoEncoderConfig::ContentType content_type)
+ : EndToEndTest(test::CallTest::kDefaultTimeout),
+ enable_rrtr_(enable_rrtr),
+ expect_target_bitrate_(expect_target_bitrate),
+ enable_zero_target_bitrate_(enable_zero_target_bitrate),
+ content_type_(content_type),
+ sent_rtcp_sr_(0),
+ sent_rtcp_rr_(0),
+ sent_rtcp_rrtr_(0),
+ sent_rtcp_target_bitrate_(false),
+ sent_zero_rtcp_target_bitrate_(false),
+ sent_rtcp_dlrr_(0),
+ send_simulated_network_(nullptr) {
+ forward_transport_config_.link_capacity_kbps = 500;
+ forward_transport_config_.queue_delay_ms = 0;
+ forward_transport_config_.loss_percent = 0;
+ }
+
+ private:
+ // Receive stream should send RR packets (and RRTR packets if enabled).
+ Action OnReceiveRtcp(const uint8_t* packet, size_t length) override {
+ MutexLock lock(&mutex_);
+ test::RtcpPacketParser parser;
+ EXPECT_TRUE(parser.Parse(packet, length));
+
+ sent_rtcp_rr_ += parser.receiver_report()->num_packets();
+ EXPECT_EQ(0, parser.sender_report()->num_packets());
+ EXPECT_GE(1, parser.xr()->num_packets());
+ if (parser.xr()->num_packets() > 0) {
+ if (parser.xr()->rrtr())
+ ++sent_rtcp_rrtr_;
+ EXPECT_FALSE(parser.xr()->dlrr());
+ }
+
+ return SEND_PACKET;
+ }
+ // Send stream should send SR packets (and DLRR packets if enabled).
+ Action OnSendRtcp(const uint8_t* packet, size_t length) override {
+ MutexLock lock(&mutex_);
+ test::RtcpPacketParser parser;
+ EXPECT_TRUE(parser.Parse(packet, length));
+
+ if (parser.sender_ssrc() == test::CallTest::kVideoSendSsrcs[1] &&
+ enable_zero_target_bitrate_) {
+ // Reduce bandwidth restriction to disable second stream after it was
+ // enabled for some time.
+ forward_transport_config_.link_capacity_kbps = 200;
+ send_simulated_network_->SetConfig(forward_transport_config_);
+ }
+
+ sent_rtcp_sr_ += parser.sender_report()->num_packets();
+ EXPECT_LE(parser.xr()->num_packets(), 1);
+ if (parser.xr()->num_packets() > 0) {
+ EXPECT_FALSE(parser.xr()->rrtr());
+ if (parser.xr()->dlrr())
+ ++sent_rtcp_dlrr_;
+ if (parser.xr()->target_bitrate()) {
+ sent_rtcp_target_bitrate_ = true;
+ auto target_bitrates =
+ parser.xr()->target_bitrate()->GetTargetBitrates();
+ if (target_bitrates.empty()) {
+ sent_zero_rtcp_target_bitrate_ = true;
+ }
+ for (const rtcp::TargetBitrate::BitrateItem& item : target_bitrates) {
+ if (item.target_bitrate_kbps == 0) {
+ sent_zero_rtcp_target_bitrate_ = true;
+ break;
+ }
+ }
+ }
+ }
+
+ if (sent_rtcp_sr_ > kNumRtcpReportPacketsToObserve &&
+ sent_rtcp_rr_ > kNumRtcpReportPacketsToObserve &&
+ (sent_rtcp_target_bitrate_ || !expect_target_bitrate_) &&
+ (sent_zero_rtcp_target_bitrate_ || !enable_zero_target_bitrate_)) {
+ if (enable_rrtr_) {
+ EXPECT_GT(sent_rtcp_rrtr_, 0);
+ EXPECT_GT(sent_rtcp_dlrr_, 0);
+ } else {
+ EXPECT_EQ(sent_rtcp_rrtr_, 0);
+ EXPECT_EQ(sent_rtcp_dlrr_, 0);
+ }
+ EXPECT_EQ(expect_target_bitrate_, sent_rtcp_target_bitrate_);
+ EXPECT_EQ(enable_zero_target_bitrate_, sent_zero_rtcp_target_bitrate_);
+ observation_complete_.Set();
+ }
+ return SEND_PACKET;
+ }
+
+ size_t GetNumVideoStreams() const override {
+ // When sending a zero target bitrate, we use two spatial layers so that
+ // we'll still have a layer with non-zero bitrate.
+ return enable_zero_target_bitrate_ ? 2 : 1;
+ }
+
+ BuiltInNetworkBehaviorConfig GetSendTransportConfig() const override {
+ return forward_transport_config_;
+ }
+
+ void OnTransportCreated(
+ test::PacketTransport* to_receiver,
+ SimulatedNetworkInterface* sender_network,
+ test::PacketTransport* to_sender,
+ SimulatedNetworkInterface* receiver_network) override {
+ send_simulated_network_ = sender_network;
+ }
+
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStreamInterface::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
+ if (enable_zero_target_bitrate_) {
+ // Configure VP8 to be able to use simulcast.
+ send_config->rtp.payload_name = "VP8";
+ encoder_config->codec_type = kVideoCodecVP8;
+ (*receive_configs)[0].decoders.resize(1);
+ (*receive_configs)[0].decoders[0].payload_type =
+ send_config->rtp.payload_type;
+ (*receive_configs)[0].decoders[0].video_format =
+ SdpVideoFormat(send_config->rtp.payload_name);
+ }
+ encoder_config->content_type = content_type_;
+ (*receive_configs)[0].rtp.rtcp_mode = RtcpMode::kReducedSize;
+ (*receive_configs)[0].rtp.rtcp_xr.receiver_reference_time_report =
+ enable_rrtr_;
+ }
+
+ void PerformTest() override {
+ EXPECT_TRUE(Wait())
+ << "Timed out while waiting for RTCP SR/RR packets to be sent.";
+ }
+
+ static const int kNumRtcpReportPacketsToObserve = 5;
+
+ Mutex mutex_;
+ const bool enable_rrtr_;
+ const bool expect_target_bitrate_;
+ const bool enable_zero_target_bitrate_;
+ const VideoEncoderConfig::ContentType content_type_;
+ int sent_rtcp_sr_;
+ int sent_rtcp_rr_ RTC_GUARDED_BY(&mutex_);
+ int sent_rtcp_rrtr_ RTC_GUARDED_BY(&mutex_);
+ bool sent_rtcp_target_bitrate_ RTC_GUARDED_BY(&mutex_);
+ bool sent_zero_rtcp_target_bitrate_ RTC_GUARDED_BY(&mutex_);
+ int sent_rtcp_dlrr_;
+ BuiltInNetworkBehaviorConfig forward_transport_config_;
+ SimulatedNetworkInterface* send_simulated_network_ = nullptr;
+};
+
+TEST_F(ExtendedReportsEndToEndTest,
+ TestExtendedReportsWithRrtrWithoutTargetBitrate) {
+ RtcpXrObserver test(/*enable_rrtr=*/true, /*expect_target_bitrate=*/false,
+ /*enable_zero_target_bitrate=*/false,
+ VideoEncoderConfig::ContentType::kRealtimeVideo);
+ RunBaseTest(&test);
+}
+
+TEST_F(ExtendedReportsEndToEndTest,
+ TestExtendedReportsWithoutRrtrWithoutTargetBitrate) {
+ RtcpXrObserver test(/*enable_rrtr=*/false, /*expect_target_bitrate=*/false,
+ /*enable_zero_target_bitrate=*/false,
+ VideoEncoderConfig::ContentType::kRealtimeVideo);
+ RunBaseTest(&test);
+}
+
+TEST_F(ExtendedReportsEndToEndTest,
+ TestExtendedReportsWithRrtrWithTargetBitrate) {
+ RtcpXrObserver test(/*enable_rrtr=*/true, /*expect_target_bitrate=*/true,
+ /*enable_zero_target_bitrate=*/false,
+ VideoEncoderConfig::ContentType::kScreen);
+ RunBaseTest(&test);
+}
+
+TEST_F(ExtendedReportsEndToEndTest,
+ TestExtendedReportsWithoutRrtrWithTargetBitrate) {
+ RtcpXrObserver test(/*enable_rrtr=*/false, /*expect_target_bitrate=*/true,
+ /*enable_zero_target_bitrate=*/false,
+ VideoEncoderConfig::ContentType::kScreen);
+ RunBaseTest(&test);
+}
+
+TEST_F(ExtendedReportsEndToEndTest,
+ TestExtendedReportsWithoutRrtrWithTargetBitrateExplicitlySet) {
+ test::ScopedKeyValueConfig field_trials(
+ field_trials_, "WebRTC-Target-Bitrate-Rtcp/Enabled/");
+ RtcpXrObserver test(/*enable_rrtr=*/false, /*expect_target_bitrate=*/true,
+ /*enable_zero_target_bitrate=*/false,
+ VideoEncoderConfig::ContentType::kRealtimeVideo);
+ RunBaseTest(&test);
+}
+
+TEST_F(ExtendedReportsEndToEndTest,
+ TestExtendedReportsCanSignalZeroTargetBitrate) {
+ RtcpXrObserver test(/*enable_rrtr=*/false, /*expect_target_bitrate=*/true,
+ /*enable_zero_target_bitrate=*/true,
+ VideoEncoderConfig::ContentType::kScreen);
+ RunBaseTest(&test);
+}
+} // namespace webrtc
diff --git a/third_party/libwebrtc/video/end_to_end_tests/fec_tests.cc b/third_party/libwebrtc/video/end_to_end_tests/fec_tests.cc
new file mode 100644
index 0000000000..bf3ad0b22d
--- /dev/null
+++ b/third_party/libwebrtc/video/end_to_end_tests/fec_tests.cc
@@ -0,0 +1,502 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <memory>
+
+#include "api/task_queue/task_queue_base.h"
+#include "api/test/simulated_network.h"
+#include "api/test/video/function_video_encoder_factory.h"
+#include "call/fake_network_pipe.h"
+#include "call/simulated_network.h"
+#include "media/engine/internal_decoder_factory.h"
+#include "modules/include/module_common_types_public.h"
+#include "modules/rtp_rtcp/source/byte_io.h"
+#include "modules/rtp_rtcp/source/rtp_packet.h"
+#include "modules/video_coding/codecs/vp8/include/vp8.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "test/call_test.h"
+#include "test/field_trial.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+#include "test/rtcp_packet_parser.h"
+
+using ::testing::Contains;
+using ::testing::Not;
+
+namespace webrtc {
+namespace {
+enum : int { // The first valid value is 1.
+ kTransportSequenceNumberExtensionId = 1,
+ kVideoRotationExtensionId,
+};
+} // namespace
+
+class FecEndToEndTest : public test::CallTest {
+ public:
+ FecEndToEndTest() {
+ RegisterRtpExtension(RtpExtension(RtpExtension::kTransportSequenceNumberUri,
+ kTransportSequenceNumberExtensionId));
+ RegisterRtpExtension(RtpExtension(RtpExtension::kVideoRotationUri,
+ kVideoRotationExtensionId));
+ }
+};
+
+TEST_F(FecEndToEndTest, ReceivesUlpfec) {
+ class UlpfecRenderObserver : public test::EndToEndTest,
+ public rtc::VideoSinkInterface<VideoFrame> {
+ public:
+ UlpfecRenderObserver()
+ : EndToEndTest(kDefaultTimeout),
+ encoder_factory_([]() { return VP8Encoder::Create(); }),
+ random_(0xcafef00d1),
+ num_packets_sent_(0) {}
+
+ private:
+ Action OnSendRtp(const uint8_t* packet, size_t length) override {
+ MutexLock lock(&mutex_);
+ RtpPacket rtp_packet;
+ EXPECT_TRUE(rtp_packet.Parse(packet, length));
+
+ EXPECT_TRUE(rtp_packet.PayloadType() == kVideoSendPayloadType ||
+ rtp_packet.PayloadType() == kRedPayloadType)
+ << "Unknown payload type received.";
+ EXPECT_EQ(kVideoSendSsrcs[0], rtp_packet.Ssrc())
+ << "Unknown SSRC received.";
+
+ // Parse RED header.
+ int encapsulated_payload_type = -1;
+ if (rtp_packet.PayloadType() == kRedPayloadType) {
+ encapsulated_payload_type = rtp_packet.payload()[0];
+
+ EXPECT_TRUE(encapsulated_payload_type == kVideoSendPayloadType ||
+ encapsulated_payload_type == kUlpfecPayloadType)
+ << "Unknown encapsulated payload type received.";
+ }
+
+ // To minimize test flakiness, always let ULPFEC packets through.
+ if (encapsulated_payload_type == kUlpfecPayloadType) {
+ return SEND_PACKET;
+ }
+
+ // Simulate 5% video packet loss after rampup period. Record the
+ // corresponding timestamps that were dropped.
+ if (num_packets_sent_++ > 100 && random_.Rand(1, 100) <= 5) {
+ if (encapsulated_payload_type == kVideoSendPayloadType) {
+ dropped_sequence_numbers_.insert(rtp_packet.SequenceNumber());
+ dropped_timestamps_.insert(rtp_packet.Timestamp());
+ }
+ return DROP_PACKET;
+ }
+
+ return SEND_PACKET;
+ }
+
+ void OnFrame(const VideoFrame& video_frame) override {
+ MutexLock lock(&mutex_);
+ // Rendering frame with timestamp of packet that was dropped -> FEC
+ // protection worked.
+ auto it = dropped_timestamps_.find(video_frame.timestamp());
+ if (it != dropped_timestamps_.end()) {
+ observation_complete_.Set();
+ }
+ }
+
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStreamInterface::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
+ // Use VP8 instead of FAKE, since the latter does not have PictureID
+ // in the packetization headers.
+ send_config->encoder_settings.encoder_factory = &encoder_factory_;
+ send_config->rtp.payload_name = "VP8";
+ send_config->rtp.payload_type = kVideoSendPayloadType;
+ encoder_config->codec_type = kVideoCodecVP8;
+ VideoReceiveStreamInterface::Decoder decoder =
+ test::CreateMatchingDecoder(*send_config);
+ (*receive_configs)[0].decoder_factory = &decoder_factory_;
+ (*receive_configs)[0].decoders.clear();
+ (*receive_configs)[0].decoders.push_back(decoder);
+
+ // Enable ULPFEC over RED.
+ send_config->rtp.ulpfec.red_payload_type = kRedPayloadType;
+ send_config->rtp.ulpfec.ulpfec_payload_type = kUlpfecPayloadType;
+ (*receive_configs)[0].rtp.red_payload_type = kRedPayloadType;
+ (*receive_configs)[0].rtp.ulpfec_payload_type = kUlpfecPayloadType;
+
+ (*receive_configs)[0].renderer = this;
+ }
+
+ void PerformTest() override {
+ EXPECT_TRUE(Wait())
+ << "Timed out waiting for dropped frames to be rendered.";
+ }
+
+ Mutex mutex_;
+ std::unique_ptr<VideoEncoder> encoder_;
+ test::FunctionVideoEncoderFactory encoder_factory_;
+ InternalDecoderFactory decoder_factory_;
+ std::set<uint32_t> dropped_sequence_numbers_ RTC_GUARDED_BY(mutex_);
+ // Several packets can have the same timestamp.
+ std::multiset<uint32_t> dropped_timestamps_ RTC_GUARDED_BY(mutex_);
+ Random random_;
+ int num_packets_sent_ RTC_GUARDED_BY(mutex_);
+ } test;
+
+ RunBaseTest(&test);
+}
+
+class FlexfecRenderObserver : public test::EndToEndTest,
+ public rtc::VideoSinkInterface<VideoFrame> {
+ public:
+ static constexpr uint32_t kVideoLocalSsrc = 123;
+ static constexpr uint32_t kFlexfecLocalSsrc = 456;
+
+ explicit FlexfecRenderObserver(bool enable_nack, bool expect_flexfec_rtcp)
+ : test::EndToEndTest(test::CallTest::kLongTimeout),
+ enable_nack_(enable_nack),
+ expect_flexfec_rtcp_(expect_flexfec_rtcp),
+ received_flexfec_rtcp_(false),
+ random_(0xcafef00d1),
+ num_packets_sent_(0) {}
+
+ size_t GetNumFlexfecStreams() const override { return 1; }
+
+ private:
+ Action OnSendRtp(const uint8_t* packet, size_t length) override {
+ MutexLock lock(&mutex_);
+ RtpPacket rtp_packet;
+ EXPECT_TRUE(rtp_packet.Parse(packet, length));
+
+ EXPECT_TRUE(
+ rtp_packet.PayloadType() == test::CallTest::kFakeVideoSendPayloadType ||
+ rtp_packet.PayloadType() == test::CallTest::kFlexfecPayloadType ||
+ (enable_nack_ &&
+ rtp_packet.PayloadType() == test::CallTest::kSendRtxPayloadType))
+ << "Unknown payload type received.";
+ EXPECT_TRUE(
+ rtp_packet.Ssrc() == test::CallTest::kVideoSendSsrcs[0] ||
+ rtp_packet.Ssrc() == test::CallTest::kFlexfecSendSsrc ||
+ (enable_nack_ && rtp_packet.Ssrc() == test::CallTest::kSendRtxSsrcs[0]))
+ << "Unknown SSRC received.";
+
+ // To reduce test flakiness, always let FlexFEC packets through.
+ if (rtp_packet.PayloadType() == test::CallTest::kFlexfecPayloadType) {
+ EXPECT_EQ(test::CallTest::kFlexfecSendSsrc, rtp_packet.Ssrc());
+
+ return SEND_PACKET;
+ }
+
+ // To reduce test flakiness, always let RTX packets through.
+ if (rtp_packet.PayloadType() == test::CallTest::kSendRtxPayloadType) {
+ EXPECT_EQ(test::CallTest::kSendRtxSsrcs[0], rtp_packet.Ssrc());
+
+ if (rtp_packet.payload_size() == 0) {
+ // Pure padding packet.
+ return SEND_PACKET;
+ }
+
+ // Parse RTX header.
+ uint16_t original_sequence_number =
+ ByteReader<uint16_t>::ReadBigEndian(rtp_packet.payload().data());
+
+ // From the perspective of FEC, a retransmitted packet is no longer
+ // dropped, so remove it from list of dropped packets.
+ auto seq_num_it =
+ dropped_sequence_numbers_.find(original_sequence_number);
+ if (seq_num_it != dropped_sequence_numbers_.end()) {
+ dropped_sequence_numbers_.erase(seq_num_it);
+ auto ts_it = dropped_timestamps_.find(rtp_packet.Timestamp());
+ EXPECT_NE(ts_it, dropped_timestamps_.end());
+ dropped_timestamps_.erase(ts_it);
+ }
+
+ return SEND_PACKET;
+ }
+
+ // Simulate 5% video packet loss after rampup period. Record the
+ // corresponding timestamps that were dropped.
+ if (num_packets_sent_++ > 100 && random_.Rand(1, 100) <= 5) {
+ EXPECT_EQ(test::CallTest::kFakeVideoSendPayloadType,
+ rtp_packet.PayloadType());
+ EXPECT_EQ(test::CallTest::kVideoSendSsrcs[0], rtp_packet.Ssrc());
+
+ dropped_sequence_numbers_.insert(rtp_packet.SequenceNumber());
+ dropped_timestamps_.insert(rtp_packet.Timestamp());
+
+ return DROP_PACKET;
+ }
+
+ return SEND_PACKET;
+ }
+
+ Action OnReceiveRtcp(const uint8_t* data, size_t length) override {
+ test::RtcpPacketParser parser;
+
+ parser.Parse(data, length);
+ if (parser.sender_ssrc() == kFlexfecLocalSsrc) {
+ EXPECT_EQ(1, parser.receiver_report()->num_packets());
+ const std::vector<rtcp::ReportBlock>& report_blocks =
+ parser.receiver_report()->report_blocks();
+ if (!report_blocks.empty()) {
+ EXPECT_EQ(1U, report_blocks.size());
+ EXPECT_EQ(test::CallTest::kFlexfecSendSsrc,
+ report_blocks[0].source_ssrc());
+ MutexLock lock(&mutex_);
+ received_flexfec_rtcp_ = true;
+ }
+ }
+
+ return SEND_PACKET;
+ }
+
+ BuiltInNetworkBehaviorConfig GetSendTransportConfig() const override {
+ // At low RTT (< kLowRttNackMs) -> NACK only, no FEC.
+ const int kNetworkDelayMs = 100;
+ BuiltInNetworkBehaviorConfig config;
+ config.queue_delay_ms = kNetworkDelayMs;
+ return config;
+ }
+
+ void OnFrame(const VideoFrame& video_frame) override {
+ EXPECT_EQ(kVideoRotation_90, video_frame.rotation());
+
+ MutexLock lock(&mutex_);
+ // Rendering frame with timestamp of packet that was dropped -> FEC
+ // protection worked.
+ auto it = dropped_timestamps_.find(video_frame.timestamp());
+ if (it != dropped_timestamps_.end()) {
+ if (!expect_flexfec_rtcp_ || received_flexfec_rtcp_) {
+ observation_complete_.Set();
+ }
+ }
+ }
+
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStreamInterface::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
+ (*receive_configs)[0].rtp.local_ssrc = kVideoLocalSsrc;
+ (*receive_configs)[0].renderer = this;
+
+ if (enable_nack_) {
+ send_config->rtp.nack.rtp_history_ms = test::CallTest::kNackRtpHistoryMs;
+ send_config->rtp.rtx.ssrcs.push_back(test::CallTest::kSendRtxSsrcs[0]);
+ send_config->rtp.rtx.payload_type = test::CallTest::kSendRtxPayloadType;
+
+ (*receive_configs)[0].rtp.nack.rtp_history_ms =
+ test::CallTest::kNackRtpHistoryMs;
+ (*receive_configs)[0].rtp.rtx_ssrc = test::CallTest::kSendRtxSsrcs[0];
+ (*receive_configs)[0]
+ .rtp
+ .rtx_associated_payload_types[test::CallTest::kSendRtxPayloadType] =
+ test::CallTest::kVideoSendPayloadType;
+ }
+ }
+
+ void OnFrameGeneratorCapturerCreated(
+ test::FrameGeneratorCapturer* frame_generator_capturer) override {
+ frame_generator_capturer->SetFakeRotation(kVideoRotation_90);
+ }
+
+ void ModifyFlexfecConfigs(
+ std::vector<FlexfecReceiveStream::Config>* receive_configs) override {
+ (*receive_configs)[0].rtp.local_ssrc = kFlexfecLocalSsrc;
+ }
+
+ void PerformTest() override {
+ EXPECT_TRUE(Wait())
+ << "Timed out waiting for dropped frames to be rendered.";
+ }
+
+ Mutex mutex_;
+ std::set<uint32_t> dropped_sequence_numbers_ RTC_GUARDED_BY(mutex_);
+ // Several packets can have the same timestamp.
+ std::multiset<uint32_t> dropped_timestamps_ RTC_GUARDED_BY(mutex_);
+ const bool enable_nack_;
+ const bool expect_flexfec_rtcp_;
+ bool received_flexfec_rtcp_ RTC_GUARDED_BY(mutex_);
+ Random random_;
+ int num_packets_sent_;
+};
+
+TEST_F(FecEndToEndTest, RecoversWithFlexfec) {
+ FlexfecRenderObserver test(false, false);
+ RunBaseTest(&test);
+}
+
+TEST_F(FecEndToEndTest, RecoversWithFlexfecAndNack) {
+ FlexfecRenderObserver test(true, false);
+ RunBaseTest(&test);
+}
+
+TEST_F(FecEndToEndTest, RecoversWithFlexfecAndSendsCorrespondingRtcp) {
+ FlexfecRenderObserver test(false, true);
+ RunBaseTest(&test);
+}
+
+TEST_F(FecEndToEndTest, ReceivedUlpfecPacketsNotNacked) {
+ class UlpfecNackObserver : public test::EndToEndTest {
+ public:
+ UlpfecNackObserver()
+ : EndToEndTest(kDefaultTimeout),
+ state_(kFirstPacket),
+ ulpfec_sequence_number_(0),
+ has_last_sequence_number_(false),
+ last_sequence_number_(0),
+ encoder_factory_([]() { return VP8Encoder::Create(); }) {}
+
+ private:
+ Action OnSendRtp(const uint8_t* packet, size_t length) override {
+ MutexLock lock_(&mutex_);
+ RtpPacket rtp_packet;
+ EXPECT_TRUE(rtp_packet.Parse(packet, length));
+
+ int encapsulated_payload_type = -1;
+ if (rtp_packet.PayloadType() == kRedPayloadType) {
+ encapsulated_payload_type = rtp_packet.payload()[0];
+ if (encapsulated_payload_type != kFakeVideoSendPayloadType)
+ EXPECT_EQ(kUlpfecPayloadType, encapsulated_payload_type);
+ } else {
+ EXPECT_EQ(kFakeVideoSendPayloadType, rtp_packet.PayloadType());
+ }
+
+ if (has_last_sequence_number_ &&
+ !IsNewerSequenceNumber(rtp_packet.SequenceNumber(),
+ last_sequence_number_)) {
+ // Drop retransmitted packets.
+ return DROP_PACKET;
+ }
+ last_sequence_number_ = rtp_packet.SequenceNumber();
+ has_last_sequence_number_ = true;
+
+ bool ulpfec_packet = encapsulated_payload_type == kUlpfecPayloadType;
+ switch (state_) {
+ case kFirstPacket:
+ state_ = kDropEveryOtherPacketUntilUlpfec;
+ break;
+ case kDropEveryOtherPacketUntilUlpfec:
+ if (ulpfec_packet) {
+ state_ = kDropAllMediaPacketsUntilUlpfec;
+ } else if (rtp_packet.SequenceNumber() % 2 == 0) {
+ return DROP_PACKET;
+ }
+ break;
+ case kDropAllMediaPacketsUntilUlpfec:
+ if (!ulpfec_packet)
+ return DROP_PACKET;
+ ulpfec_sequence_number_ = rtp_packet.SequenceNumber();
+ state_ = kDropOneMediaPacket;
+ break;
+ case kDropOneMediaPacket:
+ if (ulpfec_packet)
+ return DROP_PACKET;
+ state_ = kPassOneMediaPacket;
+ return DROP_PACKET;
+ case kPassOneMediaPacket:
+ if (ulpfec_packet)
+ return DROP_PACKET;
+ // Pass one media packet after dropped packet after last FEC,
+ // otherwise receiver might never see a seq_no after
+ // `ulpfec_sequence_number_`
+ state_ = kVerifyUlpfecPacketNotInNackList;
+ break;
+ case kVerifyUlpfecPacketNotInNackList:
+ // Continue to drop packets. Make sure no frame can be decoded.
+ if (ulpfec_packet || rtp_packet.SequenceNumber() % 2 == 0)
+ return DROP_PACKET;
+ break;
+ }
+ return SEND_PACKET;
+ }
+
+ Action OnReceiveRtcp(const uint8_t* packet, size_t length) override {
+ MutexLock lock_(&mutex_);
+ if (state_ == kVerifyUlpfecPacketNotInNackList) {
+ test::RtcpPacketParser rtcp_parser;
+ rtcp_parser.Parse(packet, length);
+ const std::vector<uint16_t>& nacks = rtcp_parser.nack()->packet_ids();
+ EXPECT_THAT(nacks, Not(Contains(ulpfec_sequence_number_)))
+ << "Got nack for ULPFEC packet";
+ if (!nacks.empty() &&
+ IsNewerSequenceNumber(nacks.back(), ulpfec_sequence_number_)) {
+ observation_complete_.Set();
+ }
+ }
+ return SEND_PACKET;
+ }
+
+ BuiltInNetworkBehaviorConfig GetSendTransportConfig() const override {
+ // At low RTT (< kLowRttNackMs) -> NACK only, no FEC.
+ // Configure some network delay.
+ const int kNetworkDelayMs = 50;
+ BuiltInNetworkBehaviorConfig config;
+ config.queue_delay_ms = kNetworkDelayMs;
+ return config;
+ }
+
+ // TODO(holmer): Investigate why we don't send FEC packets when the bitrate
+ // is 10 kbps.
+ void ModifySenderBitrateConfig(
+ BitrateConstraints* bitrate_config) override {
+ const int kMinBitrateBps = 30000;
+ bitrate_config->min_bitrate_bps = kMinBitrateBps;
+ }
+
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStreamInterface::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
+ // Configure hybrid NACK/FEC.
+ send_config->rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
+ send_config->rtp.ulpfec.red_payload_type = kRedPayloadType;
+ send_config->rtp.ulpfec.ulpfec_payload_type = kUlpfecPayloadType;
+ // Set codec to VP8, otherwise NACK/FEC hybrid will be disabled.
+ send_config->encoder_settings.encoder_factory = &encoder_factory_;
+ send_config->rtp.payload_name = "VP8";
+ send_config->rtp.payload_type = kFakeVideoSendPayloadType;
+ encoder_config->codec_type = kVideoCodecVP8;
+
+ (*receive_configs)[0].rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
+ (*receive_configs)[0].rtp.red_payload_type = kRedPayloadType;
+ (*receive_configs)[0].rtp.ulpfec_payload_type = kUlpfecPayloadType;
+
+ (*receive_configs)[0].decoders.resize(1);
+ (*receive_configs)[0].decoders[0].payload_type =
+ send_config->rtp.payload_type;
+ (*receive_configs)[0].decoders[0].video_format =
+ SdpVideoFormat(send_config->rtp.payload_name);
+ (*receive_configs)[0].decoder_factory = &decoder_factory_;
+ }
+
+ void PerformTest() override {
+ EXPECT_TRUE(Wait())
+ << "Timed out while waiting for FEC packets to be received.";
+ }
+
+ enum {
+ kFirstPacket,
+ kDropEveryOtherPacketUntilUlpfec,
+ kDropAllMediaPacketsUntilUlpfec,
+ kDropOneMediaPacket,
+ kPassOneMediaPacket,
+ kVerifyUlpfecPacketNotInNackList,
+ } state_;
+
+ Mutex mutex_;
+ uint16_t ulpfec_sequence_number_ RTC_GUARDED_BY(&mutex_);
+ bool has_last_sequence_number_;
+ uint16_t last_sequence_number_;
+ test::FunctionVideoEncoderFactory encoder_factory_;
+ InternalDecoderFactory decoder_factory_;
+ } test;
+
+ RunBaseTest(&test);
+}
+} // namespace webrtc
diff --git a/third_party/libwebrtc/video/end_to_end_tests/frame_encryption_tests.cc b/third_party/libwebrtc/video/end_to_end_tests/frame_encryption_tests.cc
new file mode 100644
index 0000000000..6a1b16927c
--- /dev/null
+++ b/third_party/libwebrtc/video/end_to_end_tests/frame_encryption_tests.cc
@@ -0,0 +1,91 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/test/fake_frame_decryptor.h"
+#include "api/test/fake_frame_encryptor.h"
+#include "media/engine/internal_decoder_factory.h"
+#include "modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.h"
+#include "modules/video_coding/codecs/vp8/include/vp8.h"
+#include "test/call_test.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+namespace {
+
+using FrameEncryptionEndToEndTest = test::CallTest;
+
+enum : int { // The first valid value is 1.
+ kGenericDescriptorExtensionId = 1,
+};
+
+class DecryptedFrameObserver : public test::EndToEndTest,
+ public rtc::VideoSinkInterface<VideoFrame> {
+ public:
+ DecryptedFrameObserver()
+ : EndToEndTest(test::CallTest::kDefaultTimeout),
+ encoder_factory_([] { return VP8Encoder::Create(); }) {}
+
+ private:
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStreamInterface::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
+ // Use VP8 instead of FAKE.
+ send_config->encoder_settings.encoder_factory = &encoder_factory_;
+ send_config->rtp.payload_name = "VP8";
+ send_config->rtp.payload_type = test::CallTest::kVideoSendPayloadType;
+ send_config->frame_encryptor = new FakeFrameEncryptor();
+ send_config->crypto_options.sframe.require_frame_encryption = true;
+ encoder_config->codec_type = kVideoCodecVP8;
+ VideoReceiveStreamInterface::Decoder decoder =
+ test::CreateMatchingDecoder(*send_config);
+ for (auto& recv_config : *receive_configs) {
+ recv_config.decoder_factory = &decoder_factory_;
+ recv_config.decoders.clear();
+ recv_config.decoders.push_back(decoder);
+ recv_config.renderer = this;
+ recv_config.frame_decryptor = rtc::make_ref_counted<FakeFrameDecryptor>();
+ recv_config.crypto_options.sframe.require_frame_encryption = true;
+ }
+ }
+
+ void OnFrame(const VideoFrame& video_frame) override {
+ observation_complete_.Set();
+ }
+
+ void PerformTest() override {
+ EXPECT_TRUE(Wait())
+ << "Timed out waiting for decrypted frames to be rendered.";
+ }
+
+ std::unique_ptr<VideoEncoder> encoder_;
+ test::FunctionVideoEncoderFactory encoder_factory_;
+ InternalDecoderFactory decoder_factory_;
+};
+
+// Validates that payloads cannot be sent without a frame encryptor and frame
+// decryptor attached.
+TEST_F(FrameEncryptionEndToEndTest,
+ WithGenericFrameDescriptorRequireFrameEncryptionEnforced) {
+ RegisterRtpExtension(RtpExtension(RtpExtension::kGenericFrameDescriptorUri00,
+ kGenericDescriptorExtensionId));
+ DecryptedFrameObserver test;
+ RunBaseTest(&test);
+}
+
+TEST_F(FrameEncryptionEndToEndTest,
+ WithDependencyDescriptorRequireFrameEncryptionEnforced) {
+ RegisterRtpExtension(RtpExtension(RtpExtension::kDependencyDescriptorUri,
+ kGenericDescriptorExtensionId));
+ DecryptedFrameObserver test;
+ RunBaseTest(&test);
+}
+} // namespace
+} // namespace webrtc
diff --git a/third_party/libwebrtc/video/end_to_end_tests/histogram_tests.cc b/third_party/libwebrtc/video/end_to_end_tests/histogram_tests.cc
new file mode 100644
index 0000000000..03e32ffba8
--- /dev/null
+++ b/third_party/libwebrtc/video/end_to_end_tests/histogram_tests.cc
@@ -0,0 +1,317 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "absl/types/optional.h"
+#include "api/test/video/function_video_encoder_factory.h"
+#include "modules/video_coding/codecs/vp8/include/vp8.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "system_wrappers/include/metrics.h"
+#include "test/call_test.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+namespace {
+enum : int { // The first valid value is 1.
+ kTransportSequenceNumberExtensionId = 1,
+ kVideoContentTypeExtensionId,
+};
+} // namespace
+
+class HistogramTest : public test::CallTest {
+ public:
+ HistogramTest() {
+ RegisterRtpExtension(RtpExtension(RtpExtension::kTransportSequenceNumberUri,
+ kTransportSequenceNumberExtensionId));
+ RegisterRtpExtension(RtpExtension(RtpExtension::kVideoContentTypeUri,
+ kVideoContentTypeExtensionId));
+ }
+
+ protected:
+ void VerifyHistogramStats(bool use_rtx, bool use_fec, bool screenshare);
+};
+
+void HistogramTest::VerifyHistogramStats(bool use_rtx,
+ bool use_fec,
+ bool screenshare) {
+ class FrameObserver : public test::EndToEndTest,
+ public rtc::VideoSinkInterface<VideoFrame> {
+ public:
+ FrameObserver(bool use_rtx, bool use_fec, bool screenshare)
+ : EndToEndTest(kLongTimeout),
+ use_rtx_(use_rtx),
+ use_fec_(use_fec),
+ screenshare_(screenshare),
+ // This test uses NACK, so to send FEC we can't use a fake encoder.
+ encoder_factory_([]() { return VP8Encoder::Create(); }),
+ num_frames_received_(0) {}
+
+ private:
+ void OnFrame(const VideoFrame& video_frame) override {
+ // The RTT is needed to estimate `ntp_time_ms` which is used by
+ // end-to-end delay stats. Therefore, start counting received frames once
+ // `ntp_time_ms` is valid.
+ if (video_frame.ntp_time_ms() > 0 &&
+ Clock::GetRealTimeClock()->CurrentNtpInMilliseconds() >=
+ video_frame.ntp_time_ms()) {
+ MutexLock lock(&mutex_);
+ ++num_frames_received_;
+ }
+ }
+
+ Action OnSendRtp(const uint8_t* packet, size_t length) override {
+ if (MinMetricRunTimePassed() && MinNumberOfFramesReceived())
+ observation_complete_.Set();
+
+ return SEND_PACKET;
+ }
+
+ bool MinMetricRunTimePassed() {
+ int64_t now_ms = Clock::GetRealTimeClock()->TimeInMilliseconds();
+ if (!start_runtime_ms_)
+ start_runtime_ms_ = now_ms;
+
+ int64_t elapsed_sec = (now_ms - *start_runtime_ms_) / 1000;
+ return elapsed_sec > metrics::kMinRunTimeInSeconds * 2;
+ }
+
+ bool MinNumberOfFramesReceived() const {
+ const int kMinRequiredHistogramSamples = 200;
+ MutexLock lock(&mutex_);
+ return num_frames_received_ > kMinRequiredHistogramSamples;
+ }
+
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStreamInterface::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
+ // NACK
+ send_config->rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
+ (*receive_configs)[0].rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
+ (*receive_configs)[0].renderer = this;
+ // FEC
+ if (use_fec_) {
+ send_config->rtp.ulpfec.ulpfec_payload_type = kUlpfecPayloadType;
+ send_config->rtp.ulpfec.red_payload_type = kRedPayloadType;
+ send_config->encoder_settings.encoder_factory = &encoder_factory_;
+ send_config->rtp.payload_name = "VP8";
+ encoder_config->codec_type = kVideoCodecVP8;
+ (*receive_configs)[0].decoders[0].video_format = SdpVideoFormat("VP8");
+ (*receive_configs)[0].rtp.red_payload_type = kRedPayloadType;
+ (*receive_configs)[0].rtp.ulpfec_payload_type = kUlpfecPayloadType;
+ }
+ // RTX
+ if (use_rtx_) {
+ send_config->rtp.rtx.ssrcs.push_back(kSendRtxSsrcs[0]);
+ send_config->rtp.rtx.payload_type = kSendRtxPayloadType;
+ (*receive_configs)[0].rtp.rtx_ssrc = kSendRtxSsrcs[0];
+ (*receive_configs)[0]
+ .rtp.rtx_associated_payload_types[kSendRtxPayloadType] =
+ kFakeVideoSendPayloadType;
+ if (use_fec_) {
+ send_config->rtp.ulpfec.red_rtx_payload_type = kRtxRedPayloadType;
+ (*receive_configs)[0]
+ .rtp.rtx_associated_payload_types[kRtxRedPayloadType] =
+ kSendRtxPayloadType;
+ }
+ }
+ // RTT needed for RemoteNtpTimeEstimator for the receive stream.
+ (*receive_configs)[0].rtp.rtcp_xr.receiver_reference_time_report = true;
+ encoder_config->content_type =
+ screenshare_ ? VideoEncoderConfig::ContentType::kScreen
+ : VideoEncoderConfig::ContentType::kRealtimeVideo;
+ }
+
+ void PerformTest() override {
+ EXPECT_TRUE(Wait()) << "Timed out waiting for min frames to be received.";
+ }
+
+ mutable Mutex mutex_;
+ const bool use_rtx_;
+ const bool use_fec_;
+ const bool screenshare_;
+ test::FunctionVideoEncoderFactory encoder_factory_;
+ absl::optional<int64_t> start_runtime_ms_;
+ int num_frames_received_ RTC_GUARDED_BY(&mutex_);
+ } test(use_rtx, use_fec, screenshare);
+
+ metrics::Reset();
+ RunBaseTest(&test);
+
+ const std::string video_prefix =
+ screenshare ? "WebRTC.Video.Screenshare." : "WebRTC.Video.";
+ // The content type extension is disabled in non screenshare test,
+ // therefore no slicing on simulcast id should be present.
+ const std::string video_suffix = screenshare ? ".S0" : "";
+
+ // Verify that stats have been updated once.
+ EXPECT_METRIC_EQ(2, metrics::NumSamples("WebRTC.Call.LifetimeInSeconds"));
+ EXPECT_METRIC_EQ(1, metrics::NumSamples(
+ "WebRTC.Call.TimeReceivingVideoRtpPacketsInSeconds"));
+ EXPECT_METRIC_EQ(
+ 1, metrics::NumSamples("WebRTC.Call.VideoBitrateReceivedInKbps"));
+ EXPECT_METRIC_EQ(1,
+ metrics::NumSamples("WebRTC.Call.RtcpBitrateReceivedInBps"));
+ EXPECT_METRIC_EQ(1, metrics::NumSamples("WebRTC.Call.BitrateReceivedInKbps"));
+ EXPECT_METRIC_EQ(
+ 1, metrics::NumSamples("WebRTC.Call.EstimatedSendBitrateInKbps"));
+ EXPECT_METRIC_EQ(1, metrics::NumSamples("WebRTC.Call.PacerBitrateInKbps"));
+
+ EXPECT_METRIC_EQ(
+ 1, metrics::NumSamples("WebRTC.Video.SendStreamLifetimeInSeconds"));
+ EXPECT_METRIC_EQ(
+ 1, metrics::NumSamples("WebRTC.Video.ReceiveStreamLifetimeInSeconds"));
+
+ EXPECT_METRIC_EQ(
+ 1, metrics::NumSamples("WebRTC.Video.NackPacketsSentPerMinute"));
+ EXPECT_METRIC_EQ(
+ 1, metrics::NumSamples(video_prefix + "NackPacketsReceivedPerMinute"));
+ EXPECT_METRIC_EQ(1,
+ metrics::NumSamples("WebRTC.Video.FirPacketsSentPerMinute"));
+ EXPECT_METRIC_EQ(
+ 1, metrics::NumSamples(video_prefix + "FirPacketsReceivedPerMinute"));
+ EXPECT_METRIC_EQ(1,
+ metrics::NumSamples("WebRTC.Video.PliPacketsSentPerMinute"));
+ EXPECT_METRIC_EQ(
+ 1, metrics::NumSamples(video_prefix + "PliPacketsReceivedPerMinute"));
+
+ EXPECT_METRIC_EQ(
+ 1, metrics::NumSamples(video_prefix + "KeyFramesSentInPermille"));
+ EXPECT_METRIC_EQ(
+ 1, metrics::NumSamples("WebRTC.Video.KeyFramesReceivedInPermille"));
+
+ EXPECT_METRIC_EQ(
+ 1, metrics::NumSamples(video_prefix + "SentPacketsLostInPercent"));
+ EXPECT_METRIC_EQ(
+ 1, metrics::NumSamples("WebRTC.Video.ReceivedPacketsLostInPercent"));
+
+ EXPECT_METRIC_EQ(1, metrics::NumSamples(video_prefix + "InputWidthInPixels"));
+ EXPECT_METRIC_EQ(1,
+ metrics::NumSamples(video_prefix + "InputHeightInPixels"));
+ EXPECT_METRIC_EQ(1, metrics::NumSamples(video_prefix + "SentWidthInPixels"));
+ EXPECT_METRIC_EQ(1, metrics::NumSamples(video_prefix + "SentHeightInPixels"));
+ EXPECT_METRIC_EQ(1,
+ metrics::NumSamples(video_prefix + "ReceivedWidthInPixels"));
+ EXPECT_METRIC_EQ(
+ 1, metrics::NumSamples(video_prefix + "ReceivedHeightInPixels"));
+
+ EXPECT_METRIC_EQ(1, metrics::NumEvents(video_prefix + "InputWidthInPixels",
+ kDefaultWidth));
+ EXPECT_METRIC_EQ(1, metrics::NumEvents(video_prefix + "InputHeightInPixels",
+ kDefaultHeight));
+ EXPECT_METRIC_EQ(
+ 1, metrics::NumEvents(video_prefix + "SentWidthInPixels", kDefaultWidth));
+ EXPECT_METRIC_EQ(1, metrics::NumEvents(video_prefix + "SentHeightInPixels",
+ kDefaultHeight));
+ EXPECT_METRIC_EQ(1, metrics::NumEvents(video_prefix + "ReceivedWidthInPixels",
+ kDefaultWidth));
+ EXPECT_METRIC_EQ(1,
+ metrics::NumEvents(video_prefix + "ReceivedHeightInPixels",
+ kDefaultHeight));
+
+ EXPECT_METRIC_EQ(1,
+ metrics::NumSamples(video_prefix + "InputFramesPerSecond"));
+ EXPECT_METRIC_EQ(1,
+ metrics::NumSamples(video_prefix + "SentFramesPerSecond"));
+ EXPECT_METRIC_EQ(1,
+ metrics::NumSamples("WebRTC.Video.DecodedFramesPerSecond"));
+ EXPECT_METRIC_EQ(1,
+ metrics::NumSamples("WebRTC.Video.RenderFramesPerSecond"));
+ EXPECT_METRIC_EQ(1,
+ metrics::NumSamples("WebRTC.Video.DelayedFramesToRenderer"));
+
+ EXPECT_METRIC_EQ(1,
+ metrics::NumSamples("WebRTC.Video.JitterBufferDelayInMs"));
+ EXPECT_METRIC_EQ(1, metrics::NumSamples("WebRTC.Video.TargetDelayInMs"));
+ EXPECT_METRIC_EQ(1, metrics::NumSamples("WebRTC.Video.CurrentDelayInMs"));
+ EXPECT_METRIC_EQ(1, metrics::NumSamples("WebRTC.Video.OnewayDelayInMs"));
+
+ EXPECT_METRIC_EQ(1, metrics::NumSamples(video_prefix + "EndToEndDelayInMs" +
+ video_suffix));
+ EXPECT_METRIC_EQ(1,
+ metrics::NumSamples(video_prefix + "EndToEndDelayMaxInMs" +
+ video_suffix));
+ EXPECT_METRIC_EQ(1, metrics::NumSamples(video_prefix + "InterframeDelayInMs" +
+ video_suffix));
+ EXPECT_METRIC_EQ(1,
+ metrics::NumSamples(video_prefix + "InterframeDelayMaxInMs" +
+ video_suffix));
+
+ EXPECT_METRIC_EQ(
+ 1, metrics::NumSamples("WebRTC.Video.RenderSqrtPixelsPerSecond"));
+
+ EXPECT_METRIC_EQ(1, metrics::NumSamples(video_prefix + "EncodeTimeInMs"));
+ EXPECT_METRIC_EQ(1, metrics::NumSamples("WebRTC.Video.DecodeTimeInMs"));
+
+ EXPECT_METRIC_EQ(1,
+ metrics::NumSamples(video_prefix + "NumberOfPauseEvents"));
+ EXPECT_METRIC_EQ(1,
+ metrics::NumSamples(video_prefix + "PausedTimeInPercent"));
+
+ EXPECT_METRIC_EQ(1, metrics::NumSamples(video_prefix + "BitrateSentInKbps"));
+ EXPECT_METRIC_EQ(1,
+ metrics::NumSamples("WebRTC.Video.BitrateReceivedInKbps"));
+ EXPECT_METRIC_EQ(
+ 1, metrics::NumSamples(video_prefix + "MediaBitrateSentInKbps"));
+ EXPECT_METRIC_EQ(
+ 1, metrics::NumSamples("WebRTC.Video.MediaBitrateReceivedInKbps"));
+ EXPECT_METRIC_EQ(
+ 1, metrics::NumSamples(video_prefix + "PaddingBitrateSentInKbps"));
+ EXPECT_METRIC_EQ(
+ 1, metrics::NumSamples("WebRTC.Video.PaddingBitrateReceivedInKbps"));
+ EXPECT_METRIC_EQ(
+ 1, metrics::NumSamples(video_prefix + "RetransmittedBitrateSentInKbps"));
+ EXPECT_METRIC_EQ(1, metrics::NumSamples(
+ "WebRTC.Video.RetransmittedBitrateReceivedInKbps"));
+
+ EXPECT_METRIC_EQ(1, metrics::NumSamples("WebRTC.Video.SendDelayInMs"));
+ EXPECT_METRIC_EQ(1, metrics::NumSamples(video_prefix + "SendSideDelayInMs"));
+ EXPECT_METRIC_EQ(1,
+ metrics::NumSamples(video_prefix + "SendSideDelayMaxInMs"));
+
+ int num_rtx_samples = use_rtx ? 1 : 0;
+ EXPECT_METRIC_EQ(num_rtx_samples,
+ metrics::NumSamples("WebRTC.Video.RtxBitrateSentInKbps"));
+ EXPECT_METRIC_EQ(
+ num_rtx_samples,
+ metrics::NumSamples("WebRTC.Video.RtxBitrateReceivedInKbps"));
+
+ int num_red_samples = use_fec ? 1 : 0;
+ EXPECT_METRIC_EQ(num_red_samples,
+ metrics::NumSamples("WebRTC.Video.FecBitrateSentInKbps"));
+ EXPECT_METRIC_EQ(
+ num_red_samples,
+ metrics::NumSamples("WebRTC.Video.FecBitrateReceivedInKbps"));
+ EXPECT_METRIC_EQ(
+ num_red_samples,
+ metrics::NumSamples("WebRTC.Video.ReceivedFecPacketsInPercent"));
+}
+
+TEST_F(HistogramTest, VerifyStatsWithRtx) {
+ const bool kEnabledRtx = true;
+ const bool kEnabledRed = false;
+ const bool kScreenshare = false;
+ VerifyHistogramStats(kEnabledRtx, kEnabledRed, kScreenshare);
+}
+
+TEST_F(HistogramTest, VerifyStatsWithRed) {
+ const bool kEnabledRtx = false;
+ const bool kEnabledRed = true;
+ const bool kScreenshare = false;
+ VerifyHistogramStats(kEnabledRtx, kEnabledRed, kScreenshare);
+}
+
+TEST_F(HistogramTest, VerifyStatsWithScreenshare) {
+ const bool kEnabledRtx = false;
+ const bool kEnabledRed = false;
+ const bool kScreenshare = true;
+ VerifyHistogramStats(kEnabledRtx, kEnabledRed, kScreenshare);
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/video/end_to_end_tests/multi_codec_receive_tests.cc b/third_party/libwebrtc/video/end_to_end_tests/multi_codec_receive_tests.cc
new file mode 100644
index 0000000000..d8ac606bfd
--- /dev/null
+++ b/third_party/libwebrtc/video/end_to_end_tests/multi_codec_receive_tests.cc
@@ -0,0 +1,291 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <memory>
+
+#include "api/test/simulated_network.h"
+#include "api/test/video/function_video_encoder_factory.h"
+#include "call/fake_network_pipe.h"
+#include "call/simulated_network.h"
+#include "modules/include/module_common_types_public.h"
+#include "modules/rtp_rtcp/source/rtp_packet.h"
+#include "modules/video_coding/codecs/h264/include/h264.h"
+#include "modules/video_coding/codecs/vp8/include/vp8.h"
+#include "modules/video_coding/codecs/vp9/include/vp9.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "rtc_base/task_queue_for_test.h"
+#include "test/call_test.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+
+using ::testing::Contains;
+
+namespace webrtc {
+namespace {
+constexpr int kWidth = 1280;
+constexpr int kHeight = 720;
+constexpr int kFps = 30;
+constexpr int kFramesToObserve = 10;
+
+uint8_t PayloadNameToPayloadType(const std::string& payload_name) {
+ if (payload_name == "VP8") {
+ return test::CallTest::kPayloadTypeVP8;
+ } else if (payload_name == "VP9") {
+ return test::CallTest::kPayloadTypeVP9;
+ } else if (payload_name == "H264") {
+ return test::CallTest::kPayloadTypeH264;
+ } else {
+ RTC_DCHECK_NOTREACHED();
+ return 0;
+ }
+}
+
+int RemoveOlderOrEqual(uint32_t timestamp, std::vector<uint32_t>* timestamps) {
+ int num_removed = 0;
+ while (!timestamps->empty()) {
+ auto it = timestamps->begin();
+ if (IsNewerTimestamp(*it, timestamp))
+ break;
+
+ timestamps->erase(it);
+ ++num_removed;
+ }
+ return num_removed;
+}
+
+class FrameObserver : public test::RtpRtcpObserver,
+ public rtc::VideoSinkInterface<VideoFrame> {
+ public:
+ FrameObserver() : test::RtpRtcpObserver(test::CallTest::kDefaultTimeout) {}
+
+ void Reset(uint8_t expected_payload_type) {
+ MutexLock lock(&mutex_);
+ num_sent_frames_ = 0;
+ num_rendered_frames_ = 0;
+ expected_payload_type_ = expected_payload_type;
+ }
+
+ private:
+ // Sends kFramesToObserve.
+ Action OnSendRtp(const uint8_t* packet, size_t length) override {
+ MutexLock lock(&mutex_);
+
+ RtpPacket rtp_packet;
+ EXPECT_TRUE(rtp_packet.Parse(packet, length));
+ EXPECT_EQ(rtp_packet.Ssrc(), test::CallTest::kVideoSendSsrcs[0]);
+ if (rtp_packet.payload_size() == 0)
+ return SEND_PACKET; // Skip padding, may be sent after OnFrame is called.
+
+ if (expected_payload_type_ &&
+ rtp_packet.PayloadType() != expected_payload_type_.value()) {
+ return DROP_PACKET; // All frames sent.
+ }
+
+ if (!last_timestamp_ || rtp_packet.Timestamp() != *last_timestamp_) {
+ // New frame.
+ // Sent enough frames?
+ if (num_sent_frames_ >= kFramesToObserve)
+ return DROP_PACKET;
+
+ ++num_sent_frames_;
+ sent_timestamps_.push_back(rtp_packet.Timestamp());
+ }
+
+ last_timestamp_ = rtp_packet.Timestamp();
+ return SEND_PACKET;
+ }
+
+ // Verifies that all sent frames are decoded and rendered.
+ void OnFrame(const VideoFrame& rendered_frame) override {
+ MutexLock lock(&mutex_);
+ EXPECT_THAT(sent_timestamps_, Contains(rendered_frame.timestamp()));
+
+ // Remove old timestamps too, only the newest decoded frame is rendered.
+ num_rendered_frames_ +=
+ RemoveOlderOrEqual(rendered_frame.timestamp(), &sent_timestamps_);
+
+ if (num_rendered_frames_ >= kFramesToObserve) {
+ EXPECT_TRUE(sent_timestamps_.empty()) << "All sent frames not decoded.";
+ observation_complete_.Set();
+ }
+ }
+
+ Mutex mutex_;
+ absl::optional<uint32_t> last_timestamp_; // Only accessed from pacer thread.
+ absl::optional<uint8_t> expected_payload_type_ RTC_GUARDED_BY(mutex_);
+ int num_sent_frames_ RTC_GUARDED_BY(mutex_) = 0;
+ int num_rendered_frames_ RTC_GUARDED_BY(mutex_) = 0;
+ std::vector<uint32_t> sent_timestamps_ RTC_GUARDED_BY(mutex_);
+};
+} // namespace
+
+class MultiCodecReceiveTest : public test::CallTest {
+ public:
+ MultiCodecReceiveTest() {
+ SendTask(task_queue(), [this]() {
+ CreateCalls();
+ CreateSendTransport(BuiltInNetworkBehaviorConfig(), &observer_);
+ CreateReceiveTransport(BuiltInNetworkBehaviorConfig(), &observer_);
+ });
+ }
+
+ virtual ~MultiCodecReceiveTest() {
+ SendTask(task_queue(), [this]() {
+ send_transport_.reset();
+ receive_transport_.reset();
+ DestroyCalls();
+ });
+ }
+
+ struct CodecConfig {
+ std::string payload_name;
+ size_t num_temporal_layers;
+ };
+
+ void ConfigureEncoder(const CodecConfig& config,
+ VideoEncoderFactory* encoder_factory);
+ void ConfigureDecoders(const std::vector<CodecConfig>& configs,
+ VideoDecoderFactory* decoder_factory);
+ void RunTestWithCodecs(const std::vector<CodecConfig>& configs);
+
+ private:
+ FrameObserver observer_;
+};
+
+void MultiCodecReceiveTest::ConfigureDecoders(
+ const std::vector<CodecConfig>& configs,
+ VideoDecoderFactory* decoder_factory) {
+ video_receive_configs_[0].decoders.clear();
+ // Placing the payload names in a std::set retains the unique names only.
+ video_receive_configs_[0].decoder_factory = decoder_factory;
+ std::set<std::string> unique_payload_names;
+ for (const auto& config : configs)
+ if (unique_payload_names.insert(config.payload_name).second) {
+ VideoReceiveStreamInterface::Decoder decoder =
+ test::CreateMatchingDecoder(
+ PayloadNameToPayloadType(config.payload_name),
+ config.payload_name);
+
+ video_receive_configs_[0].decoders.push_back(decoder);
+ }
+}
+
+void MultiCodecReceiveTest::ConfigureEncoder(
+ const CodecConfig& config,
+ VideoEncoderFactory* encoder_factory) {
+ GetVideoSendConfig()->encoder_settings.encoder_factory = encoder_factory;
+ GetVideoSendConfig()->rtp.payload_name = config.payload_name;
+ GetVideoSendConfig()->rtp.payload_type =
+ PayloadNameToPayloadType(config.payload_name);
+ GetVideoEncoderConfig()->codec_type =
+ PayloadStringToCodecType(config.payload_name);
+ EXPECT_EQ(1u, GetVideoEncoderConfig()->simulcast_layers.size());
+ GetVideoEncoderConfig()->simulcast_layers[0].num_temporal_layers =
+ config.num_temporal_layers;
+ GetVideoEncoderConfig()->video_format.name = config.payload_name;
+}
+
+void MultiCodecReceiveTest::RunTestWithCodecs(
+ const std::vector<CodecConfig>& configs) {
+ EXPECT_TRUE(!configs.empty());
+
+ test::FunctionVideoEncoderFactory encoder_factory(
+ [](const SdpVideoFormat& format) -> std::unique_ptr<VideoEncoder> {
+ if (format.name == "VP8") {
+ return VP8Encoder::Create();
+ }
+ if (format.name == "VP9") {
+ return VP9Encoder::Create();
+ }
+ if (format.name == "H264") {
+ return H264Encoder::Create(cricket::VideoCodec("H264"));
+ }
+ RTC_DCHECK_NOTREACHED() << format.name;
+ return nullptr;
+ });
+ test::FunctionVideoDecoderFactory decoder_factory(
+ [](const SdpVideoFormat& format) -> std::unique_ptr<VideoDecoder> {
+ if (format.name == "VP8") {
+ return VP8Decoder::Create();
+ }
+ if (format.name == "VP9") {
+ return VP9Decoder::Create();
+ }
+ if (format.name == "H264") {
+ return H264Decoder::Create();
+ }
+ RTC_DCHECK_NOTREACHED() << format.name;
+ return nullptr;
+ });
+ // Create and start call.
+ SendTask(task_queue(),
+ [this, &configs, &encoder_factory, &decoder_factory]() {
+ CreateSendConfig(1, 0, 0);
+ ConfigureEncoder(configs[0], &encoder_factory);
+ CreateMatchingReceiveConfigs();
+ video_receive_configs_[0].renderer = &observer_;
+ // Disable to avoid post-decode frame dropping in
+ // VideoRenderFrames.
+ video_receive_configs_[0].enable_prerenderer_smoothing = false;
+ ConfigureDecoders(configs, &decoder_factory);
+ CreateVideoStreams();
+ CreateFrameGeneratorCapturer(kFps, kWidth, kHeight);
+ Start();
+ });
+ EXPECT_TRUE(observer_.Wait()) << "Timed out waiting for frames.";
+
+ for (size_t i = 1; i < configs.size(); ++i) {
+ // Recreate VideoSendStream with new config (codec, temporal layers).
+ SendTask(task_queue(), [this, i, &configs, &encoder_factory]() {
+ DestroyVideoSendStreams();
+ observer_.Reset(PayloadNameToPayloadType(configs[i].payload_name));
+
+ ConfigureEncoder(configs[i], &encoder_factory);
+ CreateVideoSendStreams();
+ GetVideoSendStream()->Start();
+ CreateFrameGeneratorCapturer(kFps, kWidth / 2, kHeight / 2);
+ ConnectVideoSourcesToStreams();
+ });
+ EXPECT_TRUE(observer_.Wait()) << "Timed out waiting for frames.";
+ }
+
+ SendTask(task_queue(), [this]() {
+ Stop();
+ DestroyStreams();
+ });
+}
+
+TEST_F(MultiCodecReceiveTest, SingleStreamReceivesVp8Vp9) {
+ RunTestWithCodecs({{"VP8", 1}, {"VP9", 1}, {"VP8", 1}});
+}
+
+TEST_F(MultiCodecReceiveTest, SingleStreamReceivesVp8Vp9WithTl) {
+ RunTestWithCodecs({{"VP8", 2}, {"VP9", 2}, {"VP8", 2}});
+}
+
+#if defined(WEBRTC_USE_H264)
+TEST_F(MultiCodecReceiveTest, SingleStreamReceivesVp8H264) {
+ RunTestWithCodecs({{"VP8", 1}, {"H264", 1}, {"VP8", 1}});
+}
+
+TEST_F(MultiCodecReceiveTest, SingleStreamReceivesVp8H264WithTl) {
+ RunTestWithCodecs({{"VP8", 3}, {"H264", 1}, {"VP8", 3}});
+}
+
+TEST_F(MultiCodecReceiveTest, SingleStreamReceivesVp8Vp9H264) {
+ RunTestWithCodecs({{"VP8", 1}, {"VP9", 1}, {"H264", 1}, {"VP9", 1}});
+}
+
+TEST_F(MultiCodecReceiveTest, SingleStreamReceivesVp8Vp9H264WithTl) {
+ RunTestWithCodecs({{"VP8", 3}, {"VP9", 2}, {"H264", 1}, {"VP9", 3}});
+}
+#endif // defined(WEBRTC_USE_H264)
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/video/end_to_end_tests/multi_stream_tester.cc b/third_party/libwebrtc/video/end_to_end_tests/multi_stream_tester.cc
new file mode 100644
index 0000000000..82e9eb9417
--- /dev/null
+++ b/third_party/libwebrtc/video/end_to_end_tests/multi_stream_tester.cc
@@ -0,0 +1,180 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video/end_to_end_tests/multi_stream_tester.h"
+
+#include <memory>
+#include <utility>
+#include <vector>
+
+#include "absl/memory/memory.h"
+#include "api/rtc_event_log/rtc_event_log.h"
+#include "api/task_queue/default_task_queue_factory.h"
+#include "api/task_queue/task_queue_base.h"
+#include "api/test/create_frame_generator.h"
+#include "api/test/simulated_network.h"
+#include "api/test/video/function_video_encoder_factory.h"
+#include "api/video/builtin_video_bitrate_allocator_factory.h"
+#include "call/fake_network_pipe.h"
+#include "call/simulated_network.h"
+#include "media/engine/internal_decoder_factory.h"
+#include "modules/video_coding/codecs/vp8/include/vp8.h"
+#include "rtc_base/task_queue_for_test.h"
+#include "test/call_test.h"
+#include "test/encoder_settings.h"
+
+namespace webrtc {
+
+MultiStreamTester::MultiStreamTester() {
+ // TODO(sprang): Cleanup when msvc supports explicit initializers for array.
+ codec_settings[0] = {1, 640, 480};
+ codec_settings[1] = {2, 320, 240};
+ codec_settings[2] = {3, 240, 160};
+}
+
+MultiStreamTester::~MultiStreamTester() = default;
+
+void MultiStreamTester::RunTest() {
+ webrtc::RtcEventLogNull event_log;
+ auto task_queue_factory = CreateDefaultTaskQueueFactory();
+ // Use high prioirity since this task_queue used for fake network delivering
+ // at correct time. Those test tasks should be prefered over code under test
+ // to make test more stable.
+ auto task_queue = task_queue_factory->CreateTaskQueue(
+ "TaskQueue", TaskQueueFactory::Priority::HIGH);
+ Call::Config config(&event_log);
+ test::ScopedKeyValueConfig field_trials;
+ config.trials = &field_trials;
+ config.task_queue_factory = task_queue_factory.get();
+ std::unique_ptr<Call> sender_call;
+ std::unique_ptr<Call> receiver_call;
+ std::unique_ptr<test::DirectTransport> sender_transport;
+ std::unique_ptr<test::DirectTransport> receiver_transport;
+
+ VideoSendStream* send_streams[kNumStreams];
+ VideoReceiveStreamInterface* receive_streams[kNumStreams];
+ test::FrameGeneratorCapturer* frame_generators[kNumStreams];
+ test::FunctionVideoEncoderFactory encoder_factory(
+ []() { return VP8Encoder::Create(); });
+ std::unique_ptr<VideoBitrateAllocatorFactory> bitrate_allocator_factory =
+ CreateBuiltinVideoBitrateAllocatorFactory();
+ InternalDecoderFactory decoder_factory;
+
+ SendTask(task_queue.get(), [&]() {
+ sender_call = absl::WrapUnique(Call::Create(config));
+ receiver_call = absl::WrapUnique(Call::Create(config));
+ sender_transport = CreateSendTransport(task_queue.get(), sender_call.get());
+ receiver_transport =
+ CreateReceiveTransport(task_queue.get(), receiver_call.get());
+ sender_transport->SetReceiver(receiver_call->Receiver());
+ receiver_transport->SetReceiver(sender_call->Receiver());
+
+ for (size_t i = 0; i < kNumStreams; ++i) {
+ uint32_t ssrc = codec_settings[i].ssrc;
+ int width = codec_settings[i].width;
+ int height = codec_settings[i].height;
+
+ VideoSendStream::Config send_config(sender_transport.get());
+ send_config.rtp.ssrcs.push_back(ssrc);
+ send_config.encoder_settings.encoder_factory = &encoder_factory;
+ send_config.encoder_settings.bitrate_allocator_factory =
+ bitrate_allocator_factory.get();
+ send_config.rtp.payload_name = "VP8";
+ send_config.rtp.payload_type = kVideoPayloadType;
+ VideoEncoderConfig encoder_config;
+ test::FillEncoderConfiguration(kVideoCodecVP8, 1, &encoder_config);
+ encoder_config.max_bitrate_bps = 100000;
+
+ UpdateSendConfig(i, &send_config, &encoder_config, &frame_generators[i]);
+
+ send_streams[i] = sender_call->CreateVideoSendStream(
+ send_config.Copy(), encoder_config.Copy());
+ send_streams[i]->Start();
+
+ VideoReceiveStreamInterface::Config receive_config(
+ receiver_transport.get());
+ receive_config.rtp.remote_ssrc = ssrc;
+ receive_config.rtp.local_ssrc = test::CallTest::kReceiverLocalVideoSsrc;
+ receive_config.decoder_factory = &decoder_factory;
+ VideoReceiveStreamInterface::Decoder decoder =
+ test::CreateMatchingDecoder(send_config);
+ receive_config.decoders.push_back(decoder);
+
+ UpdateReceiveConfig(i, &receive_config);
+
+ receive_streams[i] =
+ receiver_call->CreateVideoReceiveStream(std::move(receive_config));
+ receive_streams[i]->Start();
+
+ auto* frame_generator = new test::FrameGeneratorCapturer(
+ Clock::GetRealTimeClock(),
+ test::CreateSquareFrameGenerator(width, height, absl::nullopt,
+ absl::nullopt),
+ 30, *task_queue_factory);
+ frame_generators[i] = frame_generator;
+ send_streams[i]->SetSource(frame_generator,
+ DegradationPreference::MAINTAIN_FRAMERATE);
+ frame_generator->Init();
+ frame_generator->Start();
+ }
+ });
+
+ Wait();
+
+ SendTask(task_queue.get(), [&]() {
+ for (size_t i = 0; i < kNumStreams; ++i) {
+ frame_generators[i]->Stop();
+ sender_call->DestroyVideoSendStream(send_streams[i]);
+ receiver_call->DestroyVideoReceiveStream(receive_streams[i]);
+ delete frame_generators[i];
+ }
+
+ sender_transport.reset();
+ receiver_transport.reset();
+
+ sender_call.reset();
+ receiver_call.reset();
+ });
+}
+
+void MultiStreamTester::UpdateSendConfig(
+ size_t stream_index,
+ VideoSendStream::Config* send_config,
+ VideoEncoderConfig* encoder_config,
+ test::FrameGeneratorCapturer** frame_generator) {}
+
+void MultiStreamTester::UpdateReceiveConfig(
+ size_t stream_index,
+ VideoReceiveStreamInterface::Config* receive_config) {}
+
+std::unique_ptr<test::DirectTransport> MultiStreamTester::CreateSendTransport(
+ TaskQueueBase* task_queue,
+ Call* sender_call) {
+ std::vector<RtpExtension> extensions = {};
+ return std::make_unique<test::DirectTransport>(
+ task_queue,
+ std::make_unique<FakeNetworkPipe>(
+ Clock::GetRealTimeClock(),
+ std::make_unique<SimulatedNetwork>(BuiltInNetworkBehaviorConfig())),
+ sender_call, payload_type_map_, extensions, extensions);
+}
+
+std::unique_ptr<test::DirectTransport>
+MultiStreamTester::CreateReceiveTransport(TaskQueueBase* task_queue,
+ Call* receiver_call) {
+ std::vector<RtpExtension> extensions = {};
+ return std::make_unique<test::DirectTransport>(
+ task_queue,
+ std::make_unique<FakeNetworkPipe>(
+ Clock::GetRealTimeClock(),
+ std::make_unique<SimulatedNetwork>(BuiltInNetworkBehaviorConfig())),
+ receiver_call, payload_type_map_, extensions, extensions);
+}
+} // namespace webrtc
diff --git a/third_party/libwebrtc/video/end_to_end_tests/multi_stream_tester.h b/third_party/libwebrtc/video/end_to_end_tests/multi_stream_tester.h
new file mode 100644
index 0000000000..87200930f4
--- /dev/null
+++ b/third_party/libwebrtc/video/end_to_end_tests/multi_stream_tester.h
@@ -0,0 +1,64 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef VIDEO_END_TO_END_TESTS_MULTI_STREAM_TESTER_H_
+#define VIDEO_END_TO_END_TESTS_MULTI_STREAM_TESTER_H_
+
+#include <map>
+#include <memory>
+
+#include "api/task_queue/task_queue_base.h"
+#include "call/call.h"
+#include "test/direct_transport.h"
+#include "test/frame_generator_capturer.h"
+
+namespace webrtc {
+// Test sets up a Call multiple senders with different resolutions and SSRCs.
+// Another is set up to receive all three of these with different renderers.
+class MultiStreamTester {
+ public:
+ static constexpr size_t kNumStreams = 3;
+ const uint8_t kVideoPayloadType = 124;
+ const std::map<uint8_t, MediaType> payload_type_map_ = {
+ {kVideoPayloadType, MediaType::VIDEO}};
+
+ struct CodecSettings {
+ uint32_t ssrc;
+ int width;
+ int height;
+ } codec_settings[kNumStreams];
+
+ MultiStreamTester();
+
+ virtual ~MultiStreamTester();
+
+ void RunTest();
+
+ protected:
+ virtual void Wait() = 0;
+ // Note: frame_generator is a point-to-pointer, since the actual instance
+ // hasn't been created at the time of this call. Only when packets/frames
+ // start flowing should this be dereferenced.
+ virtual void UpdateSendConfig(size_t stream_index,
+ VideoSendStream::Config* send_config,
+ VideoEncoderConfig* encoder_config,
+ test::FrameGeneratorCapturer** frame_generator);
+ virtual void UpdateReceiveConfig(
+ size_t stream_index,
+ VideoReceiveStreamInterface::Config* receive_config);
+ virtual std::unique_ptr<test::DirectTransport> CreateSendTransport(
+ TaskQueueBase* task_queue,
+ Call* sender_call);
+ virtual std::unique_ptr<test::DirectTransport> CreateReceiveTransport(
+ TaskQueueBase* task_queue,
+ Call* receiver_call);
+};
+} // namespace webrtc
+#endif // VIDEO_END_TO_END_TESTS_MULTI_STREAM_TESTER_H_
diff --git a/third_party/libwebrtc/video/end_to_end_tests/multi_stream_tests.cc b/third_party/libwebrtc/video/end_to_end_tests/multi_stream_tests.cc
new file mode 100644
index 0000000000..b997538d96
--- /dev/null
+++ b/third_party/libwebrtc/video/end_to_end_tests/multi_stream_tests.cc
@@ -0,0 +1,92 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stddef.h>
+#include <stdint.h>
+
+#include <memory>
+#include <vector>
+
+#include "api/video/video_frame.h"
+#include "api/video/video_sink_interface.h"
+#include "call/rtp_config.h"
+#include "call/video_receive_stream.h"
+#include "call/video_send_stream.h"
+#include "rtc_base/event.h"
+#include "test/frame_generator_capturer.h"
+#include "test/gtest.h"
+#include "video/config/video_encoder_config.h"
+#include "video/end_to_end_tests/multi_stream_tester.h"
+
+namespace webrtc {
+// Each renderer verifies that it receives the expected resolution, and as soon
+// as every renderer has received a frame, the test finishes.
+TEST(MultiStreamEndToEndTest, SendsAndReceivesMultipleStreams) {
+ class VideoOutputObserver : public rtc::VideoSinkInterface<VideoFrame> {
+ public:
+ VideoOutputObserver(const MultiStreamTester::CodecSettings& settings,
+ uint32_t ssrc,
+ test::FrameGeneratorCapturer** frame_generator)
+ : settings_(settings), ssrc_(ssrc), frame_generator_(frame_generator) {}
+
+ void OnFrame(const VideoFrame& video_frame) override {
+ EXPECT_EQ(settings_.width, video_frame.width());
+ EXPECT_EQ(settings_.height, video_frame.height());
+ (*frame_generator_)->Stop();
+ done_.Set();
+ }
+
+ uint32_t Ssrc() { return ssrc_; }
+
+ bool Wait() { return done_.Wait(TimeDelta::Seconds(30)); }
+
+ private:
+ const MultiStreamTester::CodecSettings& settings_;
+ const uint32_t ssrc_;
+ test::FrameGeneratorCapturer** const frame_generator_;
+ rtc::Event done_;
+ };
+
+ class Tester : public MultiStreamTester {
+ public:
+ Tester() = default;
+ ~Tester() override = default;
+
+ protected:
+ void Wait() override {
+ for (const auto& observer : observers_) {
+ EXPECT_TRUE(observer->Wait())
+ << "Time out waiting for from on ssrc " << observer->Ssrc();
+ }
+ }
+
+ void UpdateSendConfig(
+ size_t stream_index,
+ VideoSendStream::Config* send_config,
+ VideoEncoderConfig* encoder_config,
+ test::FrameGeneratorCapturer** frame_generator) override {
+ observers_[stream_index] = std::make_unique<VideoOutputObserver>(
+ codec_settings[stream_index], send_config->rtp.ssrcs.front(),
+ frame_generator);
+ }
+
+ void UpdateReceiveConfig(
+ size_t stream_index,
+ VideoReceiveStreamInterface::Config* receive_config) override {
+ receive_config->renderer = observers_[stream_index].get();
+ }
+
+ private:
+ std::unique_ptr<VideoOutputObserver> observers_[kNumStreams];
+ } tester;
+
+ tester.RunTest();
+}
+} // namespace webrtc
diff --git a/third_party/libwebrtc/video/end_to_end_tests/network_state_tests.cc b/third_party/libwebrtc/video/end_to_end_tests/network_state_tests.cc
new file mode 100644
index 0000000000..a39f9fe9e3
--- /dev/null
+++ b/third_party/libwebrtc/video/end_to_end_tests/network_state_tests.cc
@@ -0,0 +1,428 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <memory>
+
+#include "api/media_types.h"
+#include "api/task_queue/default_task_queue_factory.h"
+#include "api/task_queue/task_queue_base.h"
+#include "api/task_queue/task_queue_factory.h"
+#include "api/test/simulated_network.h"
+#include "api/video_codecs/video_encoder.h"
+#include "call/fake_network_pipe.h"
+#include "call/simulated_network.h"
+#include "modules/rtp_rtcp/source/rtp_packet.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "rtc_base/task_queue_for_test.h"
+#include "system_wrappers/include/sleep.h"
+#include "test/call_test.h"
+#include "test/fake_encoder.h"
+#include "test/gtest.h"
+#include "test/video_encoder_proxy_factory.h"
+
+namespace webrtc {
+namespace {
+constexpr int kSilenceTimeoutMs = 2000;
+}
+
+class NetworkStateEndToEndTest : public test::CallTest {
+ protected:
+ class UnusedTransport : public Transport {
+ private:
+ bool SendRtp(const uint8_t* packet,
+ size_t length,
+ const PacketOptions& options) override {
+ ADD_FAILURE() << "Unexpected RTP sent.";
+ return false;
+ }
+
+ bool SendRtcp(const uint8_t* packet, size_t length) override {
+ ADD_FAILURE() << "Unexpected RTCP sent.";
+ return false;
+ }
+ };
+ class RequiredTransport : public Transport {
+ public:
+ RequiredTransport(bool rtp_required, bool rtcp_required)
+ : need_rtp_(rtp_required), need_rtcp_(rtcp_required) {}
+ ~RequiredTransport() {
+ if (need_rtp_) {
+ ADD_FAILURE() << "Expected RTP packet not sent.";
+ }
+ if (need_rtcp_) {
+ ADD_FAILURE() << "Expected RTCP packet not sent.";
+ }
+ }
+
+ private:
+ bool SendRtp(const uint8_t* packet,
+ size_t length,
+ const PacketOptions& options) override {
+ MutexLock lock(&mutex_);
+ need_rtp_ = false;
+ return true;
+ }
+
+ bool SendRtcp(const uint8_t* packet, size_t length) override {
+ MutexLock lock(&mutex_);
+ need_rtcp_ = false;
+ return true;
+ }
+ bool need_rtp_;
+ bool need_rtcp_;
+ Mutex mutex_;
+ };
+ void VerifyNewVideoSendStreamsRespectNetworkState(
+ MediaType network_to_bring_up,
+ VideoEncoder* encoder,
+ Transport* transport);
+ void VerifyNewVideoReceiveStreamsRespectNetworkState(
+ MediaType network_to_bring_up,
+ Transport* transport);
+};
+
+void NetworkStateEndToEndTest::VerifyNewVideoSendStreamsRespectNetworkState(
+ MediaType network_to_bring_up,
+ VideoEncoder* encoder,
+ Transport* transport) {
+ test::VideoEncoderProxyFactory encoder_factory(encoder);
+
+ SendTask(task_queue(),
+ [this, network_to_bring_up, &encoder_factory, transport]() {
+ CreateSenderCall(Call::Config(send_event_log_.get()));
+ sender_call_->SignalChannelNetworkState(network_to_bring_up,
+ kNetworkUp);
+
+ CreateSendConfig(1, 0, 0, transport);
+ GetVideoSendConfig()->encoder_settings.encoder_factory =
+ &encoder_factory;
+ CreateVideoStreams();
+ CreateFrameGeneratorCapturer(kDefaultFramerate, kDefaultWidth,
+ kDefaultHeight);
+
+ Start();
+ });
+
+ SleepMs(kSilenceTimeoutMs);
+
+ SendTask(task_queue(), [this]() {
+ Stop();
+ DestroyStreams();
+ DestroyCalls();
+ });
+}
+
+void NetworkStateEndToEndTest::VerifyNewVideoReceiveStreamsRespectNetworkState(
+ MediaType network_to_bring_up,
+ Transport* transport) {
+ SendTask(task_queue(), [this, network_to_bring_up, transport]() {
+ CreateCalls();
+ receiver_call_->SignalChannelNetworkState(network_to_bring_up, kNetworkUp);
+ CreateSendTransport(BuiltInNetworkBehaviorConfig(),
+ /*observer=*/nullptr);
+
+ CreateSendConfig(1, 0, 0);
+ CreateMatchingReceiveConfigs(transport);
+ CreateVideoStreams();
+ CreateFrameGeneratorCapturer(kDefaultFramerate, kDefaultWidth,
+ kDefaultHeight);
+ Start();
+ });
+
+ SleepMs(kSilenceTimeoutMs);
+
+ SendTask(task_queue(), [this]() {
+ Stop();
+ DestroyStreams();
+ DestroyCalls();
+ });
+}
+
+TEST_F(NetworkStateEndToEndTest, RespectsNetworkState) {
+ // TODO(pbos): Remove accepted downtime packets etc. when signaling network
+ // down blocks until no more packets will be sent.
+
+ // Pacer will send from its packet list and then send required padding before
+ // checking paused_ again. This should be enough for one round of pacing,
+ // otherwise increase.
+ static const int kNumAcceptedDowntimeRtp = 5;
+ // A single RTCP may be in the pipeline.
+ static const int kNumAcceptedDowntimeRtcp = 1;
+ class NetworkStateTest : public test::EndToEndTest, public test::FakeEncoder {
+ public:
+ explicit NetworkStateTest(TaskQueueBase* task_queue)
+ : EndToEndTest(kDefaultTimeout),
+ FakeEncoder(Clock::GetRealTimeClock()),
+ e2e_test_task_queue_(task_queue),
+ task_queue_(CreateDefaultTaskQueueFactory()->CreateTaskQueue(
+ "NetworkStateTest",
+ TaskQueueFactory::Priority::NORMAL)),
+ sender_call_(nullptr),
+ receiver_call_(nullptr),
+ encoder_factory_(this),
+ sender_state_(kNetworkUp),
+ sender_rtp_(0),
+ sender_padding_(0),
+ sender_rtcp_(0),
+ receiver_rtcp_(0),
+ down_frames_(0) {}
+
+ Action OnSendRtp(const uint8_t* packet, size_t length) override {
+ MutexLock lock(&test_mutex_);
+ RtpPacket rtp_packet;
+ EXPECT_TRUE(rtp_packet.Parse(packet, length));
+ if (rtp_packet.payload_size() == 0)
+ ++sender_padding_;
+ ++sender_rtp_;
+ packet_event_.Set();
+ return SEND_PACKET;
+ }
+
+ Action OnSendRtcp(const uint8_t* packet, size_t length) override {
+ MutexLock lock(&test_mutex_);
+ ++sender_rtcp_;
+ packet_event_.Set();
+ return SEND_PACKET;
+ }
+
+ Action OnReceiveRtp(const uint8_t* packet, size_t length) override {
+ ADD_FAILURE() << "Unexpected receiver RTP, should not be sending.";
+ return SEND_PACKET;
+ }
+
+ Action OnReceiveRtcp(const uint8_t* packet, size_t length) override {
+ MutexLock lock(&test_mutex_);
+ ++receiver_rtcp_;
+ packet_event_.Set();
+ return SEND_PACKET;
+ }
+
+ void OnCallsCreated(Call* sender_call, Call* receiver_call) override {
+ sender_call_ = sender_call;
+ receiver_call_ = receiver_call;
+ }
+
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStreamInterface::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
+ send_config->encoder_settings.encoder_factory = &encoder_factory_;
+ }
+
+ void SignalChannelNetworkState(Call* call,
+ MediaType media_type,
+ NetworkState network_state) {
+ SendTask(e2e_test_task_queue_, [call, media_type, network_state] {
+ call->SignalChannelNetworkState(media_type, network_state);
+ });
+ }
+
+ void PerformTest() override {
+ EXPECT_TRUE(encoded_frames_.Wait(kDefaultTimeout))
+ << "No frames received by the encoder.";
+
+ SendTask(task_queue_.get(), [this]() {
+ // Wait for packets from both sender/receiver.
+ WaitForPacketsOrSilence(false, false);
+
+ // Sender-side network down for audio; there should be no effect on
+ // video
+ SignalChannelNetworkState(sender_call_, MediaType::AUDIO, kNetworkDown);
+
+ WaitForPacketsOrSilence(false, false);
+
+ // Receiver-side network down for audio; no change expected
+ SignalChannelNetworkState(receiver_call_, MediaType::AUDIO,
+ kNetworkDown);
+ WaitForPacketsOrSilence(false, false);
+
+ // Sender-side network down.
+ SignalChannelNetworkState(sender_call_, MediaType::VIDEO, kNetworkDown);
+ {
+ MutexLock lock(&test_mutex_);
+ // After network goes down we shouldn't be encoding more frames.
+ sender_state_ = kNetworkDown;
+ }
+ // Wait for receiver-packets and no sender packets.
+ WaitForPacketsOrSilence(true, false);
+
+ // Receiver-side network down.
+ SignalChannelNetworkState(receiver_call_, MediaType::VIDEO,
+ kNetworkDown);
+ WaitForPacketsOrSilence(true, true);
+
+ // Network up for audio for both sides; video is still not expected to
+ // start
+ SignalChannelNetworkState(sender_call_, MediaType::AUDIO, kNetworkUp);
+ SignalChannelNetworkState(receiver_call_, MediaType::AUDIO, kNetworkUp);
+ WaitForPacketsOrSilence(true, true);
+
+ // Network back up again for both.
+ {
+ MutexLock lock(&test_mutex_);
+ // It's OK to encode frames again, as we're about to bring up the
+ // network.
+ sender_state_ = kNetworkUp;
+ }
+ SignalChannelNetworkState(sender_call_, MediaType::VIDEO, kNetworkUp);
+ SignalChannelNetworkState(receiver_call_, MediaType::VIDEO, kNetworkUp);
+ WaitForPacketsOrSilence(false, false);
+
+ // TODO(skvlad): add tests to verify that the audio streams are stopped
+ // when the network goes down for audio once the workaround in
+ // paced_sender.cc is removed.
+ });
+ }
+
+ int32_t Encode(const VideoFrame& input_image,
+ const std::vector<VideoFrameType>* frame_types) override {
+ {
+ MutexLock lock(&test_mutex_);
+ if (sender_state_ == kNetworkDown) {
+ ++down_frames_;
+ EXPECT_LE(down_frames_, 1)
+ << "Encoding more than one frame while network is down.";
+ if (down_frames_ > 1)
+ encoded_frames_.Set();
+ } else {
+ encoded_frames_.Set();
+ }
+ }
+ return test::FakeEncoder::Encode(input_image, frame_types);
+ }
+
+ private:
+ void WaitForPacketsOrSilence(bool sender_down, bool receiver_down) {
+ int64_t initial_time_ms = clock_->TimeInMilliseconds();
+ int initial_sender_rtp;
+ int initial_sender_rtcp;
+ int initial_receiver_rtcp;
+ {
+ MutexLock lock(&test_mutex_);
+ initial_sender_rtp = sender_rtp_;
+ initial_sender_rtcp = sender_rtcp_;
+ initial_receiver_rtcp = receiver_rtcp_;
+ }
+ bool sender_done = false;
+ bool receiver_done = false;
+ while (!sender_done || !receiver_done) {
+ packet_event_.Wait(TimeDelta::Millis(kSilenceTimeoutMs));
+ int64_t time_now_ms = clock_->TimeInMilliseconds();
+ MutexLock lock(&test_mutex_);
+ if (sender_down) {
+ ASSERT_LE(sender_rtp_ - initial_sender_rtp - sender_padding_,
+ kNumAcceptedDowntimeRtp)
+ << "RTP sent during sender-side downtime.";
+ ASSERT_LE(sender_rtcp_ - initial_sender_rtcp,
+ kNumAcceptedDowntimeRtcp)
+ << "RTCP sent during sender-side downtime.";
+ if (time_now_ms - initial_time_ms >=
+ static_cast<int64_t>(kSilenceTimeoutMs)) {
+ sender_done = true;
+ }
+ } else {
+ if (sender_rtp_ > initial_sender_rtp + kNumAcceptedDowntimeRtp)
+ sender_done = true;
+ }
+ if (receiver_down) {
+ ASSERT_LE(receiver_rtcp_ - initial_receiver_rtcp,
+ kNumAcceptedDowntimeRtcp)
+ << "RTCP sent during receiver-side downtime.";
+ if (time_now_ms - initial_time_ms >=
+ static_cast<int64_t>(kSilenceTimeoutMs)) {
+ receiver_done = true;
+ }
+ } else {
+ if (receiver_rtcp_ > initial_receiver_rtcp + kNumAcceptedDowntimeRtcp)
+ receiver_done = true;
+ }
+ }
+ }
+
+ TaskQueueBase* const e2e_test_task_queue_;
+ std::unique_ptr<TaskQueueBase, TaskQueueDeleter> task_queue_;
+ Mutex test_mutex_;
+ rtc::Event encoded_frames_;
+ rtc::Event packet_event_;
+ Call* sender_call_;
+ Call* receiver_call_;
+ test::VideoEncoderProxyFactory encoder_factory_;
+ NetworkState sender_state_ RTC_GUARDED_BY(test_mutex_);
+ int sender_rtp_ RTC_GUARDED_BY(test_mutex_);
+ int sender_padding_ RTC_GUARDED_BY(test_mutex_);
+ int sender_rtcp_ RTC_GUARDED_BY(test_mutex_);
+ int receiver_rtcp_ RTC_GUARDED_BY(test_mutex_);
+ int down_frames_ RTC_GUARDED_BY(test_mutex_);
+ } test(task_queue());
+
+ RunBaseTest(&test);
+}
+
+TEST_F(NetworkStateEndToEndTest, NewVideoSendStreamsRespectVideoNetworkDown) {
+ class UnusedEncoder : public test::FakeEncoder {
+ public:
+ UnusedEncoder() : FakeEncoder(Clock::GetRealTimeClock()) {}
+
+ int32_t InitEncode(const VideoCodec* config,
+ const Settings& settings) override {
+ EXPECT_GT(config->startBitrate, 0u);
+ return 0;
+ }
+ int32_t Encode(const VideoFrame& input_image,
+ const std::vector<VideoFrameType>* frame_types) override {
+ ADD_FAILURE() << "Unexpected frame encode.";
+ return test::FakeEncoder::Encode(input_image, frame_types);
+ }
+ };
+
+ UnusedEncoder unused_encoder;
+ UnusedTransport unused_transport;
+ VerifyNewVideoSendStreamsRespectNetworkState(
+ MediaType::AUDIO, &unused_encoder, &unused_transport);
+}
+
+TEST_F(NetworkStateEndToEndTest, NewVideoSendStreamsIgnoreAudioNetworkDown) {
+ class RequiredEncoder : public test::FakeEncoder {
+ public:
+ RequiredEncoder()
+ : FakeEncoder(Clock::GetRealTimeClock()), encoded_frame_(false) {}
+ ~RequiredEncoder() {
+ if (!encoded_frame_) {
+ ADD_FAILURE() << "Didn't encode an expected frame";
+ }
+ }
+ int32_t Encode(const VideoFrame& input_image,
+ const std::vector<VideoFrameType>* frame_types) override {
+ encoded_frame_ = true;
+ return test::FakeEncoder::Encode(input_image, frame_types);
+ }
+
+ private:
+ bool encoded_frame_;
+ };
+
+ RequiredTransport required_transport(true /*rtp*/, false /*rtcp*/);
+ RequiredEncoder required_encoder;
+ VerifyNewVideoSendStreamsRespectNetworkState(
+ MediaType::VIDEO, &required_encoder, &required_transport);
+}
+
+TEST_F(NetworkStateEndToEndTest,
+ NewVideoReceiveStreamsRespectVideoNetworkDown) {
+ UnusedTransport transport;
+ VerifyNewVideoReceiveStreamsRespectNetworkState(MediaType::AUDIO, &transport);
+}
+
+TEST_F(NetworkStateEndToEndTest, NewVideoReceiveStreamsIgnoreAudioNetworkDown) {
+ RequiredTransport transport(false /*rtp*/, true /*rtcp*/);
+ VerifyNewVideoReceiveStreamsRespectNetworkState(MediaType::VIDEO, &transport);
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/video/end_to_end_tests/resolution_bitrate_limits_tests.cc b/third_party/libwebrtc/video/end_to_end_tests/resolution_bitrate_limits_tests.cc
new file mode 100644
index 0000000000..8455832885
--- /dev/null
+++ b/third_party/libwebrtc/video/end_to_end_tests/resolution_bitrate_limits_tests.cc
@@ -0,0 +1,481 @@
+/*
+ * Copyright 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <algorithm>
+
+#include "media/engine/webrtc_video_engine.h"
+#include "modules/video_coding/svc/scalability_mode_util.h"
+#include "rtc_base/experiments/encoder_info_settings.h"
+#include "test/call_test.h"
+#include "test/fake_encoder.h"
+#include "test/field_trial.h"
+#include "test/gtest.h"
+#include "test/video_encoder_proxy_factory.h"
+#include "video/config/encoder_stream_factory.h"
+
+namespace webrtc {
+namespace test {
+namespace {
+void SetEncoderSpecific(VideoEncoderConfig* encoder_config,
+ VideoCodecType type,
+ size_t num_spatial_layers) {
+ if (type == kVideoCodecVP9) {
+ VideoCodecVP9 vp9 = VideoEncoder::GetDefaultVp9Settings();
+ vp9.numberOfSpatialLayers = num_spatial_layers;
+ encoder_config->encoder_specific_settings =
+ rtc::make_ref_counted<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
+ vp9);
+ }
+}
+
+struct BitrateLimits {
+ DataRate min = DataRate::Zero();
+ DataRate max = DataRate::Zero();
+};
+
+BitrateLimits GetLayerBitrateLimits(int pixels, const VideoCodec& codec) {
+ if (codec.codecType == VideoCodecType::kVideoCodecVP9) {
+ for (size_t i = 0; i < codec.VP9().numberOfSpatialLayers; ++i) {
+ if (codec.spatialLayers[i].width * codec.spatialLayers[i].height ==
+ pixels) {
+ return {DataRate::KilobitsPerSec(codec.spatialLayers[i].minBitrate),
+ DataRate::KilobitsPerSec(codec.spatialLayers[i].maxBitrate)};
+ }
+ }
+ } else {
+ for (int i = 0; i < codec.numberOfSimulcastStreams; ++i) {
+ if (codec.simulcastStream[i].width * codec.simulcastStream[i].height ==
+ pixels) {
+ return {DataRate::KilobitsPerSec(codec.simulcastStream[i].minBitrate),
+ DataRate::KilobitsPerSec(codec.simulcastStream[i].maxBitrate)};
+ }
+ }
+ }
+ ADD_FAILURE();
+ return BitrateLimits();
+}
+
+} // namespace
+
+class ResolutionBitrateLimitsWithScalabilityModeTest : public test::CallTest {};
+
+class ResolutionBitrateLimitsTest
+ : public test::CallTest,
+ public ::testing::WithParamInterface<std::string> {
+ public:
+ ResolutionBitrateLimitsTest() : payload_name_(GetParam()) {}
+
+ const std::string payload_name_;
+};
+
+INSTANTIATE_TEST_SUITE_P(PayloadName,
+ ResolutionBitrateLimitsTest,
+ ::testing::Values("VP8", "VP9"),
+ [](const ::testing::TestParamInfo<std::string>& info) {
+ return info.param;
+ });
+
+class InitEncodeTest : public test::EndToEndTest,
+ public test::FrameGeneratorCapturer::SinkWantsObserver,
+ public test::FakeEncoder {
+ public:
+ struct Bitrate {
+ const absl::optional<DataRate> min;
+ const absl::optional<DataRate> max;
+ };
+ struct TestConfig {
+ const bool active;
+ const Bitrate bitrate;
+ const absl::optional<ScalabilityMode> scalability_mode;
+ };
+ struct Expectation {
+ const uint32_t pixels = 0;
+ const Bitrate eq_bitrate;
+ const Bitrate ne_bitrate;
+ };
+
+ InitEncodeTest(const std::string& payload_name,
+ const std::vector<TestConfig>& configs,
+ const std::vector<Expectation>& expectations)
+ : EndToEndTest(test::CallTest::kDefaultTimeout),
+ FakeEncoder(Clock::GetRealTimeClock()),
+ encoder_factory_(this),
+ payload_name_(payload_name),
+ configs_(configs),
+ expectations_(expectations) {}
+
+ void OnFrameGeneratorCapturerCreated(
+ test::FrameGeneratorCapturer* frame_generator_capturer) override {
+ frame_generator_capturer->SetSinkWantsObserver(this);
+ // Set initial resolution.
+ frame_generator_capturer->ChangeResolution(1280, 720);
+ }
+
+ void OnSinkWantsChanged(rtc::VideoSinkInterface<VideoFrame>* sink,
+ const rtc::VideoSinkWants& wants) override {}
+
+ size_t GetNumVideoStreams() const override {
+ return (payload_name_ == "VP9") ? 1 : configs_.size();
+ }
+
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStreamInterface::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
+ webrtc::VideoEncoder::EncoderInfo encoder_info;
+ send_config->encoder_settings.encoder_factory = &encoder_factory_;
+ send_config->rtp.payload_name = payload_name_;
+ send_config->rtp.payload_type = test::CallTest::kVideoSendPayloadType;
+ const VideoCodecType codec_type = PayloadStringToCodecType(payload_name_);
+ encoder_config->codec_type = codec_type;
+ encoder_config->video_stream_factory =
+ rtc::make_ref_counted<cricket::EncoderStreamFactory>(
+ payload_name_, /*max qp*/ 0, /*screencast*/ false,
+ /*screenshare enabled*/ false, encoder_info);
+ encoder_config->max_bitrate_bps = -1;
+ if (configs_.size() == 1 && configs_[0].bitrate.max)
+ encoder_config->max_bitrate_bps = configs_[0].bitrate.max->bps();
+ if (payload_name_ == "VP9") {
+ // Simulcast layers indicates which spatial layers are active.
+ encoder_config->simulcast_layers.resize(configs_.size());
+ }
+ double scale_factor = 1.0;
+ for (int i = configs_.size() - 1; i >= 0; --i) {
+ VideoStream& stream = encoder_config->simulcast_layers[i];
+ stream.active = configs_[i].active;
+ stream.scalability_mode = configs_[i].scalability_mode;
+ if (configs_[i].bitrate.min)
+ stream.min_bitrate_bps = configs_[i].bitrate.min->bps();
+ if (configs_[i].bitrate.max)
+ stream.max_bitrate_bps = configs_[i].bitrate.max->bps();
+ stream.scale_resolution_down_by = scale_factor;
+ scale_factor *= (payload_name_ == "VP9") ? 1.0 : 2.0;
+ }
+ SetEncoderSpecific(encoder_config, codec_type, configs_.size());
+ }
+
+ int32_t InitEncode(const VideoCodec* codec,
+ const Settings& settings) override {
+ for (const auto& expected : expectations_) {
+ BitrateLimits limits = GetLayerBitrateLimits(expected.pixels, *codec);
+ if (expected.eq_bitrate.min)
+ EXPECT_EQ(*expected.eq_bitrate.min, limits.min);
+ if (expected.eq_bitrate.max)
+ EXPECT_EQ(*expected.eq_bitrate.max, limits.max);
+ EXPECT_NE(expected.ne_bitrate.min, limits.min);
+ EXPECT_NE(expected.ne_bitrate.max, limits.max);
+ }
+ observation_complete_.Set();
+ return 0;
+ }
+
+ VideoEncoder::EncoderInfo GetEncoderInfo() const override {
+ EncoderInfo info = FakeEncoder::GetEncoderInfo();
+ if (!encoder_info_override_.resolution_bitrate_limits().empty()) {
+ info.resolution_bitrate_limits =
+ encoder_info_override_.resolution_bitrate_limits();
+ }
+ return info;
+ }
+
+ void PerformTest() override {
+ ASSERT_TRUE(Wait()) << "Timed out while waiting for InitEncode() call.";
+ }
+
+ private:
+ test::VideoEncoderProxyFactory encoder_factory_;
+ const std::string payload_name_;
+ const std::vector<TestConfig> configs_;
+ const std::vector<Expectation> expectations_;
+ const LibvpxVp8EncoderInfoSettings encoder_info_override_;
+};
+
+TEST_P(ResolutionBitrateLimitsTest, LimitsApplied) {
+ webrtc::test::ScopedFieldTrials field_trials(
+ "WebRTC-GetEncoderInfoOverride/"
+ "frame_size_pixels:921600,"
+ "min_start_bitrate_bps:0,"
+ "min_bitrate_bps:32000,"
+ "max_bitrate_bps:3333000/");
+
+ InitEncodeTest test(payload_name_, {{.active = true}},
+ // Expectations:
+ {{.pixels = 1280 * 720,
+ .eq_bitrate = {DataRate::KilobitsPerSec(32),
+ DataRate::KilobitsPerSec(3333)}}});
+ RunBaseTest(&test);
+}
+
+TEST_F(ResolutionBitrateLimitsWithScalabilityModeTest,
+ OneStreamLimitsAppliedForOneSpatialLayer) {
+ webrtc::test::ScopedFieldTrials field_trials(
+ "WebRTC-GetEncoderInfoOverride/"
+ "frame_size_pixels:921600,"
+ "min_start_bitrate_bps:0,"
+ "min_bitrate_bps:32000,"
+ "max_bitrate_bps:3333000/");
+
+ InitEncodeTest test(
+ "VP9", {{.active = true, .scalability_mode = ScalabilityMode::kL1T1}},
+ // Expectations:
+ {{.pixels = 1280 * 720,
+ .eq_bitrate = {DataRate::KilobitsPerSec(32),
+ DataRate::KilobitsPerSec(3333)}}});
+ RunBaseTest(&test);
+}
+
+TEST_F(ResolutionBitrateLimitsWithScalabilityModeTest,
+ OneStreamLimitsNotAppliedForMultipleSpatialLayers) {
+ webrtc::test::ScopedFieldTrials field_trials(
+ "WebRTC-GetEncoderInfoOverride/"
+ "frame_size_pixels:230400|921600,"
+ "min_start_bitrate_bps:0|0,"
+ "min_bitrate_bps:21000|32000,"
+ "max_bitrate_bps:2222000|3333000/");
+
+ InitEncodeTest test(
+ "VP9", {{.active = true, .scalability_mode = ScalabilityMode::kL2T1}},
+ // Expectations:
+ {{.pixels = 640 * 360,
+ .ne_bitrate = {DataRate::KilobitsPerSec(31),
+ DataRate::KilobitsPerSec(2222)}},
+ {.pixels = 1280 * 720,
+ .ne_bitrate = {DataRate::KilobitsPerSec(32),
+ DataRate::KilobitsPerSec(3333)}}});
+ RunBaseTest(&test);
+}
+
+TEST_P(ResolutionBitrateLimitsTest, EncodingsApplied) {
+ InitEncodeTest test(payload_name_,
+ {{.active = true,
+ .bitrate = {DataRate::KilobitsPerSec(22),
+ DataRate::KilobitsPerSec(3555)}}},
+ // Expectations:
+ {{.pixels = 1280 * 720,
+ .eq_bitrate = {DataRate::KilobitsPerSec(22),
+ DataRate::KilobitsPerSec(3555)}}});
+ RunBaseTest(&test);
+}
+
+TEST_P(ResolutionBitrateLimitsTest, IntersectionApplied) {
+ webrtc::test::ScopedFieldTrials field_trials(
+ "WebRTC-GetEncoderInfoOverride/"
+ "frame_size_pixels:921600,"
+ "min_start_bitrate_bps:0,"
+ "min_bitrate_bps:32000,"
+ "max_bitrate_bps:3333000/");
+
+ InitEncodeTest test(payload_name_,
+ {{.active = true,
+ .bitrate = {DataRate::KilobitsPerSec(22),
+ DataRate::KilobitsPerSec(1555)}}},
+ // Expectations:
+ {{.pixels = 1280 * 720,
+ .eq_bitrate = {DataRate::KilobitsPerSec(32),
+ DataRate::KilobitsPerSec(1555)}}});
+ RunBaseTest(&test);
+}
+
+TEST_P(ResolutionBitrateLimitsTest, LimitsAppliedMiddleActive) {
+ webrtc::test::ScopedFieldTrials field_trials(
+ "WebRTC-GetEncoderInfoOverride/"
+ "frame_size_pixels:230400|921600,"
+ "min_start_bitrate_bps:0|0,"
+ "min_bitrate_bps:21000|32000,"
+ "max_bitrate_bps:2222000|3333000/");
+
+ InitEncodeTest test(payload_name_,
+ {{.active = false}, {.active = true}, {.active = false}},
+ // Expectations:
+ {{.pixels = 640 * 360,
+ .eq_bitrate = {DataRate::KilobitsPerSec(21),
+ DataRate::KilobitsPerSec(2222)}}});
+ RunBaseTest(&test);
+}
+
+TEST_P(ResolutionBitrateLimitsTest, IntersectionAppliedMiddleActive) {
+ webrtc::test::ScopedFieldTrials field_trials(
+ "WebRTC-GetEncoderInfoOverride/"
+ "frame_size_pixels:230400|921600,"
+ "min_start_bitrate_bps:0|0,"
+ "min_bitrate_bps:31000|32000,"
+ "max_bitrate_bps:2222000|3333000/");
+
+ InitEncodeTest test(payload_name_,
+ {{.active = false},
+ {.active = true,
+ .bitrate = {DataRate::KilobitsPerSec(30),
+ DataRate::KilobitsPerSec(1555)}},
+ {.active = false}},
+ // Expectations:
+ {{.pixels = 640 * 360,
+ .eq_bitrate = {DataRate::KilobitsPerSec(31),
+ DataRate::KilobitsPerSec(1555)}}});
+ RunBaseTest(&test);
+}
+
+TEST_P(ResolutionBitrateLimitsTest, DefaultLimitsAppliedMiddleActive) {
+ const absl::optional<VideoEncoder::ResolutionBitrateLimits>
+ kDefaultSinglecastLimits360p =
+ EncoderInfoSettings::GetDefaultSinglecastBitrateLimitsForResolution(
+ PayloadStringToCodecType(payload_name_), 640 * 360);
+
+ InitEncodeTest test(
+ payload_name_, {{.active = false}, {.active = true}, {.active = false}},
+ // Expectations:
+ {{.pixels = 640 * 360,
+ .eq_bitrate = {
+ DataRate::BitsPerSec(kDefaultSinglecastLimits360p->min_bitrate_bps),
+ DataRate::BitsPerSec(
+ kDefaultSinglecastLimits360p->max_bitrate_bps)}}});
+ RunBaseTest(&test);
+}
+
+TEST_F(ResolutionBitrateLimitsWithScalabilityModeTest,
+ DefaultLimitsAppliedForOneSpatialLayer) {
+ const absl::optional<VideoEncoder::ResolutionBitrateLimits>
+ kDefaultSinglecastLimits720p =
+ EncoderInfoSettings::GetDefaultSinglecastBitrateLimitsForResolution(
+ PayloadStringToCodecType("VP9"), 1280 * 720);
+
+ InitEncodeTest test(
+ "VP9",
+ {{.active = true, .scalability_mode = ScalabilityMode::kL1T3},
+ {.active = false}},
+ // Expectations:
+ {{.pixels = 1280 * 720,
+ .eq_bitrate = {
+ DataRate::BitsPerSec(kDefaultSinglecastLimits720p->min_bitrate_bps),
+ DataRate::BitsPerSec(
+ kDefaultSinglecastLimits720p->max_bitrate_bps)}}});
+ RunBaseTest(&test);
+}
+
+TEST_P(ResolutionBitrateLimitsTest, LimitsAppliedHighestActive) {
+ webrtc::test::ScopedFieldTrials field_trials(
+ "WebRTC-GetEncoderInfoOverride/"
+ "frame_size_pixels:230400|921600,"
+ "min_start_bitrate_bps:0|0,"
+ "min_bitrate_bps:31000|32000,"
+ "max_bitrate_bps:2222000|3333000/");
+
+ InitEncodeTest test(payload_name_,
+ {{.active = false}, {.active = false}, {.active = true}},
+ // Expectations:
+ {{.pixels = 1280 * 720,
+ .eq_bitrate = {DataRate::KilobitsPerSec(32),
+ DataRate::KilobitsPerSec(3333)}}});
+ RunBaseTest(&test);
+}
+
+TEST_P(ResolutionBitrateLimitsTest, IntersectionAppliedHighestActive) {
+ webrtc::test::ScopedFieldTrials field_trials(
+ "WebRTC-GetEncoderInfoOverride/"
+ "frame_size_pixels:230400|921600,"
+ "min_start_bitrate_bps:0|0,"
+ "min_bitrate_bps:31000|32000,"
+ "max_bitrate_bps:2222000|3333000/");
+
+ InitEncodeTest test(payload_name_,
+ {{.active = false},
+ {.active = false},
+ {.active = true,
+ .bitrate = {DataRate::KilobitsPerSec(30),
+ DataRate::KilobitsPerSec(1555)}}},
+ // Expectations:
+ {{.pixels = 1280 * 720,
+ .eq_bitrate = {DataRate::KilobitsPerSec(32),
+ DataRate::KilobitsPerSec(1555)}}});
+ RunBaseTest(&test);
+}
+
+TEST_P(ResolutionBitrateLimitsTest, LimitsNotAppliedLowestActive) {
+ webrtc::test::ScopedFieldTrials field_trials(
+ "WebRTC-GetEncoderInfoOverride/"
+ "frame_size_pixels:230400|921600,"
+ "min_start_bitrate_bps:0|0,"
+ "min_bitrate_bps:31000|32000,"
+ "max_bitrate_bps:2222000|3333000/");
+
+ InitEncodeTest test(payload_name_, {{.active = true}, {.active = false}},
+ // Expectations:
+ {{.pixels = 640 * 360,
+ .ne_bitrate = {DataRate::KilobitsPerSec(31),
+ DataRate::KilobitsPerSec(2222)}},
+ {.pixels = 1280 * 720,
+ .ne_bitrate = {DataRate::KilobitsPerSec(32),
+ DataRate::KilobitsPerSec(3333)}}});
+ RunBaseTest(&test);
+}
+
+TEST_F(ResolutionBitrateLimitsWithScalabilityModeTest,
+ LimitsAppliedForVp9OneSpatialLayer) {
+ webrtc::test::ScopedFieldTrials field_trials(
+ "WebRTC-GetEncoderInfoOverride/"
+ "frame_size_pixels:230400|921600,"
+ "min_start_bitrate_bps:0|0,"
+ "min_bitrate_bps:31000|32000,"
+ "max_bitrate_bps:2222000|3333000/");
+
+ InitEncodeTest test(
+ "VP9",
+ {{.active = true, .scalability_mode = ScalabilityMode::kL1T1},
+ {.active = false}},
+ // Expectations:
+ {{.pixels = 1280 * 720,
+ .eq_bitrate = {DataRate::KilobitsPerSec(32),
+ DataRate::KilobitsPerSec(3333)}}});
+ RunBaseTest(&test);
+}
+
+TEST_F(ResolutionBitrateLimitsWithScalabilityModeTest,
+ LimitsNotAppliedForVp9MultipleSpatialLayers) {
+ webrtc::test::ScopedFieldTrials field_trials(
+ "WebRTC-GetEncoderInfoOverride/"
+ "frame_size_pixels:230400|921600,"
+ "min_start_bitrate_bps:0|0,"
+ "min_bitrate_bps:31000|32000,"
+ "max_bitrate_bps:2222000|3333000/");
+
+ InitEncodeTest test(
+ "VP9",
+ {{.active = true, .scalability_mode = ScalabilityMode::kL2T1},
+ {.active = false}},
+ // Expectations:
+ {{.pixels = 640 * 360,
+ .ne_bitrate = {DataRate::KilobitsPerSec(31),
+ DataRate::KilobitsPerSec(2222)}},
+ {.pixels = 1280 * 720,
+ .ne_bitrate = {DataRate::KilobitsPerSec(32),
+ DataRate::KilobitsPerSec(3333)}}});
+ RunBaseTest(&test);
+}
+
+TEST_P(ResolutionBitrateLimitsTest, LimitsNotAppliedSimulcast) {
+ webrtc::test::ScopedFieldTrials field_trials(
+ "WebRTC-GetEncoderInfoOverride/"
+ "frame_size_pixels:230400|921600,"
+ "min_start_bitrate_bps:0|0,"
+ "min_bitrate_bps:31000|32000,"
+ "max_bitrate_bps:2222000|3333000/");
+
+ InitEncodeTest test(payload_name_, {{.active = true}, {.active = true}},
+ // Expectations:
+ {{.pixels = 640 * 360,
+ .ne_bitrate = {DataRate::KilobitsPerSec(31),
+ DataRate::KilobitsPerSec(2222)}},
+ {.pixels = 1280 * 720,
+ .ne_bitrate = {DataRate::KilobitsPerSec(32),
+ DataRate::KilobitsPerSec(3333)}}});
+ RunBaseTest(&test);
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/video/end_to_end_tests/retransmission_tests.cc b/third_party/libwebrtc/video/end_to_end_tests/retransmission_tests.cc
new file mode 100644
index 0000000000..45a9dae1e8
--- /dev/null
+++ b/third_party/libwebrtc/video/end_to_end_tests/retransmission_tests.cc
@@ -0,0 +1,513 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <memory>
+
+#include "absl/algorithm/container.h"
+#include "api/task_queue/task_queue_base.h"
+#include "api/test/simulated_network.h"
+#include "api/test/video/function_video_encoder_factory.h"
+#include "api/units/time_delta.h"
+#include "call/fake_network_pipe.h"
+#include "call/simulated_network.h"
+#include "modules/rtp_rtcp/source/rtp_packet.h"
+#include "modules/video_coding/codecs/vp8/include/vp8.h"
+#include "rtc_base/event.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "rtc_base/task_queue_for_test.h"
+#include "test/call_test.h"
+#include "test/field_trial.h"
+#include "test/gtest.h"
+#include "test/rtcp_packet_parser.h"
+
+namespace webrtc {
+namespace {
+enum : int { // The first valid value is 1.
+ kVideoRotationExtensionId = 1,
+};
+} // namespace
+
+class RetransmissionEndToEndTest : public test::CallTest {
+ public:
+ RetransmissionEndToEndTest() {
+ RegisterRtpExtension(RtpExtension(RtpExtension::kVideoRotationUri,
+ kVideoRotationExtensionId));
+ }
+
+ protected:
+ void DecodesRetransmittedFrame(bool enable_rtx, bool enable_red);
+ void ReceivesPliAndRecovers(int rtp_history_ms);
+};
+
+TEST_F(RetransmissionEndToEndTest, ReceivesAndRetransmitsNack) {
+ static const int kNumberOfNacksToObserve = 2;
+ static const int kLossBurstSize = 2;
+ static const int kPacketsBetweenLossBursts = 9;
+ class NackObserver : public test::EndToEndTest {
+ public:
+ NackObserver()
+ : EndToEndTest(kLongTimeout),
+ sent_rtp_packets_(0),
+ packets_left_to_drop_(0),
+ nacks_left_(kNumberOfNacksToObserve) {}
+
+ private:
+ Action OnSendRtp(const uint8_t* packet, size_t length) override {
+ MutexLock lock(&mutex_);
+ RtpPacket rtp_packet;
+ EXPECT_TRUE(rtp_packet.Parse(packet, length));
+
+ // Never drop retransmitted packets.
+ if (dropped_packets_.find(rtp_packet.SequenceNumber()) !=
+ dropped_packets_.end()) {
+ retransmitted_packets_.insert(rtp_packet.SequenceNumber());
+ return SEND_PACKET;
+ }
+
+ if (nacks_left_ <= 0 &&
+ retransmitted_packets_.size() == dropped_packets_.size()) {
+ observation_complete_.Set();
+ }
+
+ ++sent_rtp_packets_;
+
+ // Enough NACKs received, stop dropping packets.
+ if (nacks_left_ <= 0)
+ return SEND_PACKET;
+
+ // Check if it's time for a new loss burst.
+ if (sent_rtp_packets_ % kPacketsBetweenLossBursts == 0)
+ packets_left_to_drop_ = kLossBurstSize;
+
+ // Never drop padding packets as those won't be retransmitted.
+ if (packets_left_to_drop_ > 0 && rtp_packet.padding_size() == 0) {
+ --packets_left_to_drop_;
+ dropped_packets_.insert(rtp_packet.SequenceNumber());
+ return DROP_PACKET;
+ }
+
+ return SEND_PACKET;
+ }
+
+ Action OnReceiveRtcp(const uint8_t* packet, size_t length) override {
+ MutexLock lock(&mutex_);
+ test::RtcpPacketParser parser;
+ EXPECT_TRUE(parser.Parse(packet, length));
+ nacks_left_ -= parser.nack()->num_packets();
+ return SEND_PACKET;
+ }
+
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStreamInterface::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
+ send_config->rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
+ (*receive_configs)[0].rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
+ }
+
+ void PerformTest() override {
+ EXPECT_TRUE(Wait())
+ << "Timed out waiting for packets to be NACKed, retransmitted and "
+ "rendered.";
+ }
+
+ Mutex mutex_;
+ std::set<uint16_t> dropped_packets_;
+ std::set<uint16_t> retransmitted_packets_;
+ uint64_t sent_rtp_packets_;
+ int packets_left_to_drop_;
+ int nacks_left_ RTC_GUARDED_BY(&mutex_);
+ } test;
+
+ RunBaseTest(&test);
+}
+
+TEST_F(RetransmissionEndToEndTest, ReceivesNackAndRetransmitsAudio) {
+ class NackObserver : public test::EndToEndTest {
+ public:
+ NackObserver()
+ : EndToEndTest(kLongTimeout),
+ local_ssrc_(0),
+ remote_ssrc_(0),
+ receive_transport_(nullptr) {}
+
+ private:
+ size_t GetNumVideoStreams() const override { return 0; }
+ size_t GetNumAudioStreams() const override { return 1; }
+
+ Action OnSendRtp(const uint8_t* packet, size_t length) override {
+ RtpPacket rtp_packet;
+ EXPECT_TRUE(rtp_packet.Parse(packet, length));
+
+ if (!sequence_number_to_retransmit_) {
+ sequence_number_to_retransmit_ = rtp_packet.SequenceNumber();
+ return DROP_PACKET;
+
+ // Don't ask for retransmission straight away, may be deduped in pacer.
+ } else if (rtp_packet.SequenceNumber() ==
+ *sequence_number_to_retransmit_) {
+ observation_complete_.Set();
+ } else {
+ // Send a NACK as often as necessary until retransmission is received.
+ rtcp::Nack nack;
+ nack.SetSenderSsrc(local_ssrc_);
+ nack.SetMediaSsrc(remote_ssrc_);
+ uint16_t nack_list[] = {*sequence_number_to_retransmit_};
+ nack.SetPacketIds(nack_list, 1);
+ rtc::Buffer buffer = nack.Build();
+
+ EXPECT_TRUE(receive_transport_->SendRtcp(buffer.data(), buffer.size()));
+ }
+
+ return SEND_PACKET;
+ }
+
+ void ModifyAudioConfigs(AudioSendStream::Config* send_config,
+ std::vector<AudioReceiveStreamInterface::Config>*
+ receive_configs) override {
+ (*receive_configs)[0].rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
+ local_ssrc_ = (*receive_configs)[0].rtp.local_ssrc;
+ remote_ssrc_ = (*receive_configs)[0].rtp.remote_ssrc;
+ receive_transport_ = (*receive_configs)[0].rtcp_send_transport;
+ }
+
+ void PerformTest() override {
+ EXPECT_TRUE(Wait())
+ << "Timed out waiting for packets to be NACKed, retransmitted and "
+ "rendered.";
+ }
+
+ uint32_t local_ssrc_;
+ uint32_t remote_ssrc_;
+ Transport* receive_transport_;
+ absl::optional<uint16_t> sequence_number_to_retransmit_;
+ } test;
+
+ RunBaseTest(&test);
+}
+
+TEST_F(RetransmissionEndToEndTest,
+ StopSendingKeyframeRequestsForInactiveStream) {
+ class KeyframeRequestObserver : public test::EndToEndTest {
+ public:
+ explicit KeyframeRequestObserver(TaskQueueBase* task_queue)
+ : clock_(Clock::GetRealTimeClock()), task_queue_(task_queue) {}
+
+ void OnVideoStreamsCreated(VideoSendStream* send_stream,
+ const std::vector<VideoReceiveStreamInterface*>&
+ receive_streams) override {
+ RTC_DCHECK_EQ(1, receive_streams.size());
+ send_stream_ = send_stream;
+ receive_stream_ = receive_streams[0];
+ }
+
+ Action OnReceiveRtcp(const uint8_t* packet, size_t length) override {
+ test::RtcpPacketParser parser;
+ EXPECT_TRUE(parser.Parse(packet, length));
+ if (parser.pli()->num_packets() > 0)
+ task_queue_->PostTask([this] { Run(); });
+ return SEND_PACKET;
+ }
+
+ bool PollStats() {
+ if (receive_stream_->GetStats().frames_decoded > 0) {
+ frame_decoded_ = true;
+ } else if (clock_->TimeInMilliseconds() - start_time_ < 5000) {
+ task_queue_->PostDelayedTask([this] { Run(); }, TimeDelta::Millis(100));
+ return false;
+ }
+ return true;
+ }
+
+ void PerformTest() override {
+ start_time_ = clock_->TimeInMilliseconds();
+ task_queue_->PostTask([this] { Run(); });
+ test_done_.Wait(rtc::Event::kForever);
+ }
+
+ void Run() {
+ if (!frame_decoded_) {
+ if (PollStats()) {
+ send_stream_->Stop();
+ if (!frame_decoded_) {
+ test_done_.Set();
+ } else {
+ // Now we wait for the PLI packet. Once we receive it, a task
+ // will be posted (see OnReceiveRtcp) and we'll check the stats
+ // once more before signaling that we're done.
+ }
+ }
+ } else {
+ EXPECT_EQ(
+ 1U,
+ receive_stream_->GetStats().rtcp_packet_type_counts.pli_packets);
+ test_done_.Set();
+ }
+ }
+
+ private:
+ Clock* const clock_;
+ VideoSendStream* send_stream_;
+ VideoReceiveStreamInterface* receive_stream_;
+ TaskQueueBase* const task_queue_;
+ rtc::Event test_done_;
+ bool frame_decoded_ = false;
+ int64_t start_time_ = 0;
+ } test(task_queue());
+
+ RunBaseTest(&test);
+}
+
+void RetransmissionEndToEndTest::ReceivesPliAndRecovers(int rtp_history_ms) {
+ static const int kPacketsToDrop = 1;
+
+ class PliObserver : public test::EndToEndTest,
+ public rtc::VideoSinkInterface<VideoFrame> {
+ public:
+ explicit PliObserver(int rtp_history_ms)
+ : EndToEndTest(kLongTimeout),
+ rtp_history_ms_(rtp_history_ms),
+ nack_enabled_(rtp_history_ms > 0),
+ highest_dropped_timestamp_(0),
+ frames_to_drop_(0),
+ received_pli_(false) {}
+
+ private:
+ Action OnSendRtp(const uint8_t* packet, size_t length) override {
+ MutexLock lock(&mutex_);
+ RtpPacket rtp_packet;
+ EXPECT_TRUE(rtp_packet.Parse(packet, length));
+
+ // Drop all retransmitted packets to force a PLI.
+ if (rtp_packet.Timestamp() <= highest_dropped_timestamp_)
+ return DROP_PACKET;
+
+ if (frames_to_drop_ > 0) {
+ highest_dropped_timestamp_ = rtp_packet.Timestamp();
+ --frames_to_drop_;
+ return DROP_PACKET;
+ }
+
+ return SEND_PACKET;
+ }
+
+ Action OnReceiveRtcp(const uint8_t* packet, size_t length) override {
+ MutexLock lock(&mutex_);
+ test::RtcpPacketParser parser;
+ EXPECT_TRUE(parser.Parse(packet, length));
+ if (!nack_enabled_)
+ EXPECT_EQ(0, parser.nack()->num_packets());
+ if (parser.pli()->num_packets() > 0)
+ received_pli_ = true;
+ return SEND_PACKET;
+ }
+
+ void OnFrame(const VideoFrame& video_frame) override {
+ MutexLock lock(&mutex_);
+ if (received_pli_ &&
+ video_frame.timestamp() > highest_dropped_timestamp_) {
+ observation_complete_.Set();
+ }
+ if (!received_pli_)
+ frames_to_drop_ = kPacketsToDrop;
+ }
+
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStreamInterface::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
+ send_config->rtp.nack.rtp_history_ms = rtp_history_ms_;
+ (*receive_configs)[0].rtp.nack.rtp_history_ms = rtp_history_ms_;
+ (*receive_configs)[0].renderer = this;
+ }
+
+ void PerformTest() override {
+ EXPECT_TRUE(Wait()) << "Timed out waiting for PLI to be "
+ "received and a frame to be "
+ "rendered afterwards.";
+ }
+
+ Mutex mutex_;
+ int rtp_history_ms_;
+ bool nack_enabled_;
+ uint32_t highest_dropped_timestamp_ RTC_GUARDED_BY(&mutex_);
+ int frames_to_drop_ RTC_GUARDED_BY(&mutex_);
+ bool received_pli_ RTC_GUARDED_BY(&mutex_);
+ } test(rtp_history_ms);
+
+ RunBaseTest(&test);
+}
+
+TEST_F(RetransmissionEndToEndTest, ReceivesPliAndRecoversWithNack) {
+ ReceivesPliAndRecovers(1000);
+}
+
+TEST_F(RetransmissionEndToEndTest, ReceivesPliAndRecoversWithoutNack) {
+ ReceivesPliAndRecovers(0);
+}
+
+// This test drops second RTP packet with a marker bit set, makes sure it's
+// retransmitted and renders. Retransmission SSRCs are also checked.
+void RetransmissionEndToEndTest::DecodesRetransmittedFrame(bool enable_rtx,
+ bool enable_red) {
+ static const int kDroppedFrameNumber = 10;
+ class RetransmissionObserver : public test::EndToEndTest,
+ public rtc::VideoSinkInterface<VideoFrame> {
+ public:
+ RetransmissionObserver(bool enable_rtx, bool enable_red)
+ : EndToEndTest(kDefaultTimeout),
+ payload_type_(GetPayloadType(false, enable_red)),
+ retransmission_ssrc_(enable_rtx ? kSendRtxSsrcs[0]
+ : kVideoSendSsrcs[0]),
+ retransmission_payload_type_(GetPayloadType(enable_rtx, enable_red)),
+ encoder_factory_([]() { return VP8Encoder::Create(); }),
+ marker_bits_observed_(0),
+ retransmitted_timestamp_(0) {}
+
+ private:
+ Action OnSendRtp(const uint8_t* packet, size_t length) override {
+ MutexLock lock(&mutex_);
+ RtpPacket rtp_packet;
+ EXPECT_TRUE(rtp_packet.Parse(packet, length));
+
+ // Ignore padding-only packets over RTX.
+ if (rtp_packet.PayloadType() != payload_type_) {
+ EXPECT_EQ(retransmission_ssrc_, rtp_packet.Ssrc());
+ if (rtp_packet.payload_size() == 0)
+ return SEND_PACKET;
+ }
+
+ if (rtp_packet.Timestamp() == retransmitted_timestamp_) {
+ EXPECT_EQ(retransmission_ssrc_, rtp_packet.Ssrc());
+ EXPECT_EQ(retransmission_payload_type_, rtp_packet.PayloadType());
+ return SEND_PACKET;
+ }
+
+ // Found the final packet of the frame to inflict loss to, drop this and
+ // expect a retransmission.
+ if (rtp_packet.PayloadType() == payload_type_ && rtp_packet.Marker() &&
+ ++marker_bits_observed_ == kDroppedFrameNumber) {
+ // This should be the only dropped packet.
+ EXPECT_EQ(0u, retransmitted_timestamp_);
+ retransmitted_timestamp_ = rtp_packet.Timestamp();
+ return DROP_PACKET;
+ }
+
+ return SEND_PACKET;
+ }
+
+ void OnFrame(const VideoFrame& frame) override {
+ EXPECT_EQ(kVideoRotation_90, frame.rotation());
+ {
+ MutexLock lock(&mutex_);
+ if (frame.timestamp() == retransmitted_timestamp_)
+ observation_complete_.Set();
+ }
+ orig_renderer_->OnFrame(frame);
+ }
+
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStreamInterface::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
+ send_config->rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
+
+ // Insert ourselves into the rendering pipeline.
+ RTC_DCHECK(!orig_renderer_);
+ orig_renderer_ = (*receive_configs)[0].renderer;
+ RTC_DCHECK(orig_renderer_);
+ // To avoid post-decode frame dropping, disable the prerender buffer.
+ (*receive_configs)[0].enable_prerenderer_smoothing = false;
+ (*receive_configs)[0].renderer = this;
+
+ (*receive_configs)[0].rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
+
+ if (payload_type_ == kRedPayloadType) {
+ send_config->rtp.ulpfec.ulpfec_payload_type = kUlpfecPayloadType;
+ send_config->rtp.ulpfec.red_payload_type = kRedPayloadType;
+ if (retransmission_ssrc_ == kSendRtxSsrcs[0])
+ send_config->rtp.ulpfec.red_rtx_payload_type = kRtxRedPayloadType;
+ (*receive_configs)[0].rtp.ulpfec_payload_type =
+ send_config->rtp.ulpfec.ulpfec_payload_type;
+ (*receive_configs)[0].rtp.red_payload_type =
+ send_config->rtp.ulpfec.red_payload_type;
+ }
+
+ if (retransmission_ssrc_ == kSendRtxSsrcs[0]) {
+ send_config->rtp.rtx.ssrcs.push_back(kSendRtxSsrcs[0]);
+ send_config->rtp.rtx.payload_type = kSendRtxPayloadType;
+ (*receive_configs)[0].rtp.rtx_ssrc = kSendRtxSsrcs[0];
+ (*receive_configs)[0]
+ .rtp.rtx_associated_payload_types[(payload_type_ == kRedPayloadType)
+ ? kRtxRedPayloadType
+ : kSendRtxPayloadType] =
+ payload_type_;
+ }
+ // Configure encoding and decoding with VP8, since generic packetization
+ // doesn't support FEC with NACK.
+ RTC_DCHECK_EQ(1, (*receive_configs)[0].decoders.size());
+ send_config->encoder_settings.encoder_factory = &encoder_factory_;
+ send_config->rtp.payload_name = "VP8";
+ encoder_config->codec_type = kVideoCodecVP8;
+ (*receive_configs)[0].decoders[0].video_format = SdpVideoFormat("VP8");
+ }
+
+ void OnFrameGeneratorCapturerCreated(
+ test::FrameGeneratorCapturer* frame_generator_capturer) override {
+ frame_generator_capturer->SetFakeRotation(kVideoRotation_90);
+ }
+
+ void PerformTest() override {
+ EXPECT_TRUE(Wait())
+ << "Timed out while waiting for retransmission to render.";
+ }
+
+ int GetPayloadType(bool use_rtx, bool use_fec) {
+ if (use_fec) {
+ if (use_rtx)
+ return kRtxRedPayloadType;
+ return kRedPayloadType;
+ }
+ if (use_rtx)
+ return kSendRtxPayloadType;
+ return kFakeVideoSendPayloadType;
+ }
+
+ Mutex mutex_;
+ rtc::VideoSinkInterface<VideoFrame>* orig_renderer_ = nullptr;
+ const int payload_type_;
+ const uint32_t retransmission_ssrc_;
+ const int retransmission_payload_type_;
+ test::FunctionVideoEncoderFactory encoder_factory_;
+ const std::string payload_name_;
+ int marker_bits_observed_;
+ uint32_t retransmitted_timestamp_ RTC_GUARDED_BY(&mutex_);
+ } test(enable_rtx, enable_red);
+
+ RunBaseTest(&test);
+}
+
+TEST_F(RetransmissionEndToEndTest, DecodesRetransmittedFrame) {
+ DecodesRetransmittedFrame(false, false);
+}
+
+TEST_F(RetransmissionEndToEndTest, DecodesRetransmittedFrameOverRtx) {
+ DecodesRetransmittedFrame(true, false);
+}
+
+TEST_F(RetransmissionEndToEndTest, DecodesRetransmittedFrameByRed) {
+ DecodesRetransmittedFrame(false, true);
+}
+
+TEST_F(RetransmissionEndToEndTest, DecodesRetransmittedFrameByRedOverRtx) {
+ DecodesRetransmittedFrame(true, true);
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/video/end_to_end_tests/rtp_rtcp_tests.cc b/third_party/libwebrtc/video/end_to_end_tests/rtp_rtcp_tests.cc
new file mode 100644
index 0000000000..32d7cd50ef
--- /dev/null
+++ b/third_party/libwebrtc/video/end_to_end_tests/rtp_rtcp_tests.cc
@@ -0,0 +1,551 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <memory>
+
+#include "api/test/simulated_network.h"
+#include "call/fake_network_pipe.h"
+#include "call/simulated_network.h"
+#include "modules/include/module_common_types_public.h"
+#include "modules/rtp_rtcp/source/rtp_packet.h"
+#include "modules/video_coding/codecs/vp8/include/vp8.h"
+#include "rtc_base/numerics/sequence_number_unwrapper.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "rtc_base/task_queue_for_test.h"
+#include "test/call_test.h"
+#include "test/gtest.h"
+#include "test/rtcp_packet_parser.h"
+
+namespace webrtc {
+namespace {
+enum : int { // The first valid value is 1.
+ kTransportSequenceNumberExtensionId = 1,
+};
+} // namespace
+
+class RtpRtcpEndToEndTest : public test::CallTest {
+ protected:
+ void RespectsRtcpMode(RtcpMode rtcp_mode);
+ void TestRtpStatePreservation(bool use_rtx, bool provoke_rtcpsr_before_rtp);
+};
+
+void RtpRtcpEndToEndTest::RespectsRtcpMode(RtcpMode rtcp_mode) {
+ static const int kNumCompoundRtcpPacketsToObserve = 10;
+ class RtcpModeObserver : public test::EndToEndTest {
+ public:
+ explicit RtcpModeObserver(RtcpMode rtcp_mode)
+ : EndToEndTest(kDefaultTimeout),
+ rtcp_mode_(rtcp_mode),
+ sent_rtp_(0),
+ sent_rtcp_(0) {}
+
+ private:
+ Action OnSendRtp(const uint8_t* packet, size_t length) override {
+ MutexLock lock(&mutex_);
+ if (++sent_rtp_ % 3 == 0)
+ return DROP_PACKET;
+
+ return SEND_PACKET;
+ }
+
+ Action OnReceiveRtcp(const uint8_t* packet, size_t length) override {
+ MutexLock lock(&mutex_);
+ ++sent_rtcp_;
+ test::RtcpPacketParser parser;
+ EXPECT_TRUE(parser.Parse(packet, length));
+
+ EXPECT_EQ(0, parser.sender_report()->num_packets());
+
+ switch (rtcp_mode_) {
+ case RtcpMode::kCompound:
+ // TODO(holmer): We shouldn't send transport feedback alone if
+ // compound RTCP is negotiated.
+ if (parser.receiver_report()->num_packets() == 0 &&
+ parser.transport_feedback()->num_packets() == 0) {
+ ADD_FAILURE() << "Received RTCP packet without receiver report for "
+ "RtcpMode::kCompound.";
+ observation_complete_.Set();
+ }
+
+ if (sent_rtcp_ >= kNumCompoundRtcpPacketsToObserve)
+ observation_complete_.Set();
+
+ break;
+ case RtcpMode::kReducedSize:
+ if (parser.receiver_report()->num_packets() == 0)
+ observation_complete_.Set();
+ break;
+ case RtcpMode::kOff:
+ RTC_DCHECK_NOTREACHED();
+ break;
+ }
+
+ return SEND_PACKET;
+ }
+
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStreamInterface::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
+ send_config->rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
+ (*receive_configs)[0].rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
+ (*receive_configs)[0].rtp.rtcp_mode = rtcp_mode_;
+ }
+
+ void PerformTest() override {
+ EXPECT_TRUE(Wait())
+ << (rtcp_mode_ == RtcpMode::kCompound
+ ? "Timed out before observing enough compound packets."
+ : "Timed out before receiving a non-compound RTCP packet.");
+ }
+
+ RtcpMode rtcp_mode_;
+ Mutex mutex_;
+ // Must be protected since RTCP can be sent by both the process thread
+ // and the pacer thread.
+ int sent_rtp_ RTC_GUARDED_BY(&mutex_);
+ int sent_rtcp_ RTC_GUARDED_BY(&mutex_);
+ } test(rtcp_mode);
+
+ RunBaseTest(&test);
+}
+
+TEST_F(RtpRtcpEndToEndTest, UsesRtcpCompoundMode) {
+ RespectsRtcpMode(RtcpMode::kCompound);
+}
+
+TEST_F(RtpRtcpEndToEndTest, UsesRtcpReducedSizeMode) {
+ RespectsRtcpMode(RtcpMode::kReducedSize);
+}
+
+void RtpRtcpEndToEndTest::TestRtpStatePreservation(
+ bool use_rtx,
+ bool provoke_rtcpsr_before_rtp) {
+ // This test uses other VideoStream settings than the the default settings
+ // implemented in DefaultVideoStreamFactory. Therefore this test implements
+ // its own VideoEncoderConfig::VideoStreamFactoryInterface which is created
+ // in ModifyVideoConfigs.
+ class VideoStreamFactory
+ : public VideoEncoderConfig::VideoStreamFactoryInterface {
+ public:
+ VideoStreamFactory() {}
+
+ private:
+ std::vector<VideoStream> CreateEncoderStreams(
+ int frame_width,
+ int frame_height,
+ const VideoEncoderConfig& encoder_config) override {
+ std::vector<VideoStream> streams =
+ test::CreateVideoStreams(frame_width, frame_height, encoder_config);
+
+ if (encoder_config.number_of_streams > 1) {
+ // Lower bitrates so that all streams send initially.
+ RTC_DCHECK_EQ(3, encoder_config.number_of_streams);
+ for (size_t i = 0; i < encoder_config.number_of_streams; ++i) {
+ streams[i].min_bitrate_bps = 10000;
+ streams[i].target_bitrate_bps = 15000;
+ streams[i].max_bitrate_bps = 20000;
+ }
+ } else {
+ // Use the same total bitrates when sending a single stream to avoid
+ // lowering
+ // the bitrate estimate and requiring a subsequent rampup.
+ streams[0].min_bitrate_bps = 3 * 10000;
+ streams[0].target_bitrate_bps = 3 * 15000;
+ streams[0].max_bitrate_bps = 3 * 20000;
+ }
+ return streams;
+ }
+ };
+
+ class RtpSequenceObserver : public test::RtpRtcpObserver {
+ public:
+ explicit RtpSequenceObserver(bool use_rtx)
+ : test::RtpRtcpObserver(kDefaultTimeout),
+ ssrcs_to_observe_(kNumSimulcastStreams) {
+ for (size_t i = 0; i < kNumSimulcastStreams; ++i) {
+ ssrc_is_rtx_[kVideoSendSsrcs[i]] = false;
+ if (use_rtx)
+ ssrc_is_rtx_[kSendRtxSsrcs[i]] = true;
+ }
+ }
+
+ void ResetExpectedSsrcs(size_t num_expected_ssrcs) {
+ MutexLock lock(&mutex_);
+ ssrc_observed_.clear();
+ ssrcs_to_observe_ = num_expected_ssrcs;
+ }
+
+ private:
+ void ValidateTimestampGap(uint32_t ssrc,
+ uint32_t timestamp,
+ bool only_padding)
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_) {
+ static const int32_t kMaxTimestampGap = kDefaultTimeout.ms() * 90;
+ auto timestamp_it = last_observed_timestamp_.find(ssrc);
+ if (timestamp_it == last_observed_timestamp_.end()) {
+ EXPECT_FALSE(only_padding);
+ last_observed_timestamp_[ssrc] = timestamp;
+ } else {
+ // Verify timestamps are reasonably close.
+ uint32_t latest_observed = timestamp_it->second;
+ // Wraparound handling is unnecessary here as long as an int variable
+ // is used to store the result.
+ int32_t timestamp_gap = timestamp - latest_observed;
+ EXPECT_LE(std::abs(timestamp_gap), kMaxTimestampGap)
+ << "Gap in timestamps (" << latest_observed << " -> " << timestamp
+ << ") too large for SSRC: " << ssrc << ".";
+ timestamp_it->second = timestamp;
+ }
+ }
+
+ Action OnSendRtp(const uint8_t* packet, size_t length) override {
+ RtpPacket rtp_packet;
+ EXPECT_TRUE(rtp_packet.Parse(packet, length));
+ const uint32_t ssrc = rtp_packet.Ssrc();
+ const int64_t sequence_number =
+ seq_numbers_unwrapper_.Unwrap(rtp_packet.SequenceNumber());
+ const uint32_t timestamp = rtp_packet.Timestamp();
+ const bool only_padding = rtp_packet.payload_size() == 0;
+
+ EXPECT_TRUE(ssrc_is_rtx_.find(ssrc) != ssrc_is_rtx_.end())
+ << "Received SSRC that wasn't configured: " << ssrc;
+
+ static const int64_t kMaxSequenceNumberGap = 100;
+ std::list<int64_t>* seq_numbers = &last_observed_seq_numbers_[ssrc];
+ if (seq_numbers->empty()) {
+ seq_numbers->push_back(sequence_number);
+ } else {
+ // We shouldn't get replays of previous sequence numbers.
+ for (int64_t observed : *seq_numbers) {
+ EXPECT_NE(observed, sequence_number)
+ << "Received sequence number " << sequence_number << " for SSRC "
+ << ssrc << " 2nd time.";
+ }
+ // Verify sequence numbers are reasonably close.
+ int64_t latest_observed = seq_numbers->back();
+ int64_t sequence_number_gap = sequence_number - latest_observed;
+ EXPECT_LE(std::abs(sequence_number_gap), kMaxSequenceNumberGap)
+ << "Gap in sequence numbers (" << latest_observed << " -> "
+ << sequence_number << ") too large for SSRC: " << ssrc << ".";
+ seq_numbers->push_back(sequence_number);
+ if (seq_numbers->size() >= kMaxSequenceNumberGap) {
+ seq_numbers->pop_front();
+ }
+ }
+
+ if (!ssrc_is_rtx_[ssrc]) {
+ MutexLock lock(&mutex_);
+ ValidateTimestampGap(ssrc, timestamp, only_padding);
+
+ // Wait for media packets on all ssrcs.
+ if (!ssrc_observed_[ssrc] && !only_padding) {
+ ssrc_observed_[ssrc] = true;
+ if (--ssrcs_to_observe_ == 0)
+ observation_complete_.Set();
+ }
+ }
+
+ return SEND_PACKET;
+ }
+
+ Action OnSendRtcp(const uint8_t* packet, size_t length) override {
+ test::RtcpPacketParser rtcp_parser;
+ rtcp_parser.Parse(packet, length);
+ if (rtcp_parser.sender_report()->num_packets() > 0) {
+ uint32_t ssrc = rtcp_parser.sender_report()->sender_ssrc();
+ uint32_t rtcp_timestamp = rtcp_parser.sender_report()->rtp_timestamp();
+
+ MutexLock lock(&mutex_);
+ ValidateTimestampGap(ssrc, rtcp_timestamp, false);
+ }
+ return SEND_PACKET;
+ }
+
+ RtpSequenceNumberUnwrapper seq_numbers_unwrapper_;
+ std::map<uint32_t, std::list<int64_t>> last_observed_seq_numbers_;
+ std::map<uint32_t, uint32_t> last_observed_timestamp_;
+ std::map<uint32_t, bool> ssrc_is_rtx_;
+
+ Mutex mutex_;
+ size_t ssrcs_to_observe_ RTC_GUARDED_BY(mutex_);
+ std::map<uint32_t, bool> ssrc_observed_ RTC_GUARDED_BY(mutex_);
+ } observer(use_rtx);
+
+ VideoEncoderConfig one_stream;
+
+ SendTask(task_queue(), [this, &observer, &one_stream, use_rtx]() {
+ CreateCalls();
+ CreateSendTransport(BuiltInNetworkBehaviorConfig(), &observer);
+ CreateReceiveTransport(BuiltInNetworkBehaviorConfig(), &observer);
+ CreateSendConfig(kNumSimulcastStreams, 0, 0);
+
+ if (use_rtx) {
+ for (size_t i = 0; i < kNumSimulcastStreams; ++i) {
+ GetVideoSendConfig()->rtp.rtx.ssrcs.push_back(kSendRtxSsrcs[i]);
+ }
+ GetVideoSendConfig()->rtp.rtx.payload_type = kSendRtxPayloadType;
+ }
+
+ GetVideoEncoderConfig()->video_stream_factory =
+ rtc::make_ref_counted<VideoStreamFactory>();
+ // Use the same total bitrates when sending a single stream to avoid
+ // lowering the bitrate estimate and requiring a subsequent rampup.
+ one_stream = GetVideoEncoderConfig()->Copy();
+ // one_stream.streams.resize(1);
+ one_stream.number_of_streams = 1;
+ CreateMatchingReceiveConfigs();
+
+ CreateVideoStreams();
+ CreateFrameGeneratorCapturer(30, 1280, 720);
+
+ Start();
+ });
+
+ EXPECT_TRUE(observer.Wait())
+ << "Timed out waiting for all SSRCs to send packets.";
+
+ // Test stream resetting more than once to make sure that the state doesn't
+ // get set once (this could be due to using std::map::insert for instance).
+ for (size_t i = 0; i < 3; ++i) {
+ SendTask(task_queue(), [&]() {
+ DestroyVideoSendStreams();
+
+ // Re-create VideoSendStream with only one stream.
+ CreateVideoSendStream(one_stream);
+ GetVideoSendStream()->Start();
+ if (provoke_rtcpsr_before_rtp) {
+ // Rapid Resync Request forces sending RTCP Sender Report back.
+ // Using this request speeds up this test because then there is no need
+ // to wait for a second for periodic Sender Report.
+ rtcp::RapidResyncRequest force_send_sr_back_request;
+ rtc::Buffer packet = force_send_sr_back_request.Build();
+ static_cast<webrtc::Transport*>(receive_transport_.get())
+ ->SendRtcp(packet.data(), packet.size());
+ }
+ CreateFrameGeneratorCapturer(30, 1280, 720);
+ });
+
+ observer.ResetExpectedSsrcs(1);
+ EXPECT_TRUE(observer.Wait()) << "Timed out waiting for single RTP packet.";
+
+ // Reconfigure back to use all streams.
+ SendTask(task_queue(), [this]() {
+ GetVideoSendStream()->ReconfigureVideoEncoder(
+ GetVideoEncoderConfig()->Copy());
+ });
+ observer.ResetExpectedSsrcs(kNumSimulcastStreams);
+ EXPECT_TRUE(observer.Wait())
+ << "Timed out waiting for all SSRCs to send packets.";
+
+ // Reconfigure down to one stream.
+ SendTask(task_queue(), [this, &one_stream]() {
+ GetVideoSendStream()->ReconfigureVideoEncoder(one_stream.Copy());
+ });
+ observer.ResetExpectedSsrcs(1);
+ EXPECT_TRUE(observer.Wait()) << "Timed out waiting for single RTP packet.";
+
+ // Reconfigure back to use all streams.
+ SendTask(task_queue(), [this]() {
+ GetVideoSendStream()->ReconfigureVideoEncoder(
+ GetVideoEncoderConfig()->Copy());
+ });
+ observer.ResetExpectedSsrcs(kNumSimulcastStreams);
+ EXPECT_TRUE(observer.Wait())
+ << "Timed out waiting for all SSRCs to send packets.";
+ }
+
+ SendTask(task_queue(), [this]() {
+ Stop();
+ DestroyStreams();
+ DestroyCalls();
+ });
+}
+
+TEST_F(RtpRtcpEndToEndTest, RestartingSendStreamPreservesRtpState) {
+ TestRtpStatePreservation(false, false);
+}
+
+TEST_F(RtpRtcpEndToEndTest, RestartingSendStreamPreservesRtpStatesWithRtx) {
+ TestRtpStatePreservation(true, false);
+}
+
+TEST_F(RtpRtcpEndToEndTest,
+ RestartingSendStreamKeepsRtpAndRtcpTimestampsSynced) {
+ TestRtpStatePreservation(true, true);
+}
+
+// See https://bugs.chromium.org/p/webrtc/issues/detail?id=9648.
+TEST_F(RtpRtcpEndToEndTest, DISABLED_TestFlexfecRtpStatePreservation) {
+ class RtpSequenceObserver : public test::RtpRtcpObserver {
+ public:
+ RtpSequenceObserver()
+ : test::RtpRtcpObserver(kDefaultTimeout),
+ num_flexfec_packets_sent_(0) {}
+
+ void ResetPacketCount() {
+ MutexLock lock(&mutex_);
+ num_flexfec_packets_sent_ = 0;
+ }
+
+ private:
+ Action OnSendRtp(const uint8_t* packet, size_t length) override {
+ MutexLock lock(&mutex_);
+
+ RtpPacket rtp_packet;
+ EXPECT_TRUE(rtp_packet.Parse(packet, length));
+ const uint16_t sequence_number = rtp_packet.SequenceNumber();
+ const uint32_t timestamp = rtp_packet.Timestamp();
+ const uint32_t ssrc = rtp_packet.Ssrc();
+
+ if (ssrc == kVideoSendSsrcs[0] || ssrc == kSendRtxSsrcs[0]) {
+ return SEND_PACKET;
+ }
+ EXPECT_EQ(kFlexfecSendSsrc, ssrc) << "Unknown SSRC sent.";
+
+ ++num_flexfec_packets_sent_;
+
+ // If this is the first packet, we have nothing to compare to.
+ if (!last_observed_sequence_number_) {
+ last_observed_sequence_number_.emplace(sequence_number);
+ last_observed_timestamp_.emplace(timestamp);
+
+ return SEND_PACKET;
+ }
+
+ // Verify continuity and monotonicity of RTP sequence numbers.
+ EXPECT_EQ(static_cast<uint16_t>(*last_observed_sequence_number_ + 1),
+ sequence_number);
+ last_observed_sequence_number_.emplace(sequence_number);
+
+ // Timestamps should be non-decreasing...
+ const bool timestamp_is_same_or_newer =
+ timestamp == *last_observed_timestamp_ ||
+ IsNewerTimestamp(timestamp, *last_observed_timestamp_);
+ EXPECT_TRUE(timestamp_is_same_or_newer);
+ // ...but reasonably close in time.
+ const int k10SecondsInRtpTimestampBase = 10 * kVideoPayloadTypeFrequency;
+ EXPECT_TRUE(IsNewerTimestamp(
+ *last_observed_timestamp_ + k10SecondsInRtpTimestampBase, timestamp));
+ last_observed_timestamp_.emplace(timestamp);
+
+ // Pass test when enough packets have been let through.
+ if (num_flexfec_packets_sent_ >= 10) {
+ observation_complete_.Set();
+ }
+
+ return SEND_PACKET;
+ }
+
+ absl::optional<uint16_t> last_observed_sequence_number_
+ RTC_GUARDED_BY(mutex_);
+ absl::optional<uint32_t> last_observed_timestamp_ RTC_GUARDED_BY(mutex_);
+ size_t num_flexfec_packets_sent_ RTC_GUARDED_BY(mutex_);
+ Mutex mutex_;
+ } observer;
+
+ static constexpr int kFrameMaxWidth = 320;
+ static constexpr int kFrameMaxHeight = 180;
+ static constexpr int kFrameRate = 15;
+
+ test::FunctionVideoEncoderFactory encoder_factory(
+ []() { return VP8Encoder::Create(); });
+
+ SendTask(task_queue(), [&]() {
+ CreateCalls();
+
+ BuiltInNetworkBehaviorConfig lossy_delayed_link;
+ lossy_delayed_link.loss_percent = 2;
+ lossy_delayed_link.queue_delay_ms = 50;
+
+ CreateSendTransport(lossy_delayed_link, &observer);
+ CreateReceiveTransport(BuiltInNetworkBehaviorConfig(), &observer);
+
+ // For reduced flakyness, we use a real VP8 encoder together with NACK
+ // and RTX.
+ const int kNumVideoStreams = 1;
+ const int kNumFlexfecStreams = 1;
+ CreateSendConfig(kNumVideoStreams, 0, kNumFlexfecStreams);
+
+ GetVideoSendConfig()->encoder_settings.encoder_factory = &encoder_factory;
+ GetVideoSendConfig()->rtp.payload_name = "VP8";
+ GetVideoSendConfig()->rtp.payload_type = kVideoSendPayloadType;
+ GetVideoSendConfig()->rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
+ GetVideoSendConfig()->rtp.rtx.ssrcs.push_back(kSendRtxSsrcs[0]);
+ GetVideoSendConfig()->rtp.rtx.payload_type = kSendRtxPayloadType;
+ GetVideoEncoderConfig()->codec_type = kVideoCodecVP8;
+
+ CreateMatchingReceiveConfigs();
+ video_receive_configs_[0].rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
+ video_receive_configs_[0].rtp.rtx_ssrc = kSendRtxSsrcs[0];
+ video_receive_configs_[0]
+ .rtp.rtx_associated_payload_types[kSendRtxPayloadType] =
+ kVideoSendPayloadType;
+
+ // The matching FlexFEC receive config is not created by
+ // CreateMatchingReceiveConfigs since this is not a test::BaseTest.
+ // Set up the receive config manually instead.
+ FlexfecReceiveStream::Config flexfec_receive_config(
+ receive_transport_.get());
+ flexfec_receive_config.payload_type =
+ GetVideoSendConfig()->rtp.flexfec.payload_type;
+ flexfec_receive_config.rtp.remote_ssrc =
+ GetVideoSendConfig()->rtp.flexfec.ssrc;
+ flexfec_receive_config.protected_media_ssrcs =
+ GetVideoSendConfig()->rtp.flexfec.protected_media_ssrcs;
+ flexfec_receive_config.rtp.local_ssrc = kReceiverLocalVideoSsrc;
+ flexfec_receive_config.rtp.extensions.emplace_back(
+ RtpExtension::kTransportSequenceNumberUri,
+ kTransportSequenceNumberExtensionId);
+ flexfec_receive_configs_.push_back(flexfec_receive_config);
+
+ CreateFlexfecStreams();
+ CreateVideoStreams();
+
+ // RTCP might be disabled if the network is "down".
+ sender_call_->SignalChannelNetworkState(MediaType::VIDEO, kNetworkUp);
+ receiver_call_->SignalChannelNetworkState(MediaType::VIDEO, kNetworkUp);
+
+ CreateFrameGeneratorCapturer(kFrameRate, kFrameMaxWidth, kFrameMaxHeight);
+
+ Start();
+ });
+
+ // Initial test.
+ EXPECT_TRUE(observer.Wait()) << "Timed out waiting for packets.";
+
+ SendTask(task_queue(), [this, &observer]() {
+ // Ensure monotonicity when the VideoSendStream is restarted.
+ Stop();
+ observer.ResetPacketCount();
+ Start();
+ });
+
+ EXPECT_TRUE(observer.Wait()) << "Timed out waiting for packets.";
+
+ SendTask(task_queue(), [this, &observer]() {
+ // Ensure monotonicity when the VideoSendStream is recreated.
+ DestroyVideoSendStreams();
+ observer.ResetPacketCount();
+ CreateVideoSendStreams();
+ GetVideoSendStream()->Start();
+ CreateFrameGeneratorCapturer(kFrameRate, kFrameMaxWidth, kFrameMaxHeight);
+ });
+
+ EXPECT_TRUE(observer.Wait()) << "Timed out waiting for packets.";
+
+ // Cleanup.
+ SendTask(task_queue(), [this]() {
+ Stop();
+ DestroyStreams();
+ DestroyCalls();
+ });
+}
+} // namespace webrtc
diff --git a/third_party/libwebrtc/video/end_to_end_tests/ssrc_tests.cc b/third_party/libwebrtc/video/end_to_end_tests/ssrc_tests.cc
new file mode 100644
index 0000000000..edacde115a
--- /dev/null
+++ b/third_party/libwebrtc/video/end_to_end_tests/ssrc_tests.cc
@@ -0,0 +1,325 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <memory>
+
+#include "api/test/simulated_network.h"
+#include "call/fake_network_pipe.h"
+#include "call/packet_receiver.h"
+#include "call/simulated_network.h"
+#include "modules/rtp_rtcp/source/rtp_packet.h"
+#include "modules/rtp_rtcp/source/rtp_packet_received.h"
+#include "modules/rtp_rtcp/source/rtp_util.h"
+#include "rtc_base/task_queue_for_test.h"
+#include "test/call_test.h"
+#include "test/gtest.h"
+#include "test/rtcp_packet_parser.h"
+
+namespace webrtc {
+class SsrcEndToEndTest : public test::CallTest {
+ public:
+ SsrcEndToEndTest() {
+ RegisterRtpExtension(
+ RtpExtension(RtpExtension::kTransportSequenceNumberUri, 1));
+ }
+
+ protected:
+ void TestSendsSetSsrcs(size_t num_ssrcs, bool send_single_ssrc_first);
+};
+
+TEST_F(SsrcEndToEndTest, ReceiverUsesLocalSsrc) {
+ class SyncRtcpObserver : public test::EndToEndTest {
+ public:
+ SyncRtcpObserver() : EndToEndTest(kDefaultTimeout) {}
+
+ Action OnReceiveRtcp(const uint8_t* packet, size_t length) override {
+ test::RtcpPacketParser parser;
+ EXPECT_TRUE(parser.Parse(packet, length));
+ EXPECT_EQ(kReceiverLocalVideoSsrc, parser.sender_ssrc());
+ observation_complete_.Set();
+
+ return SEND_PACKET;
+ }
+
+ void PerformTest() override {
+ EXPECT_TRUE(Wait())
+ << "Timed out while waiting for a receiver RTCP packet to be sent.";
+ }
+ } test;
+
+ RunBaseTest(&test);
+}
+
+TEST_F(SsrcEndToEndTest, UnknownRtpPacketTriggersUndemuxablePacketHandler) {
+ class PacketInputObserver : public PacketReceiver {
+ public:
+ explicit PacketInputObserver(PacketReceiver* receiver)
+ : receiver_(receiver) {}
+
+ bool Wait() {
+ return undemuxable_packet_handler_triggered_.Wait(kDefaultTimeout);
+ }
+
+ private:
+ void DeliverRtpPacket(
+ MediaType media_type,
+ RtpPacketReceived packet,
+ OnUndemuxablePacketHandler undemuxable_packet_handler) override {
+ PacketReceiver::OnUndemuxablePacketHandler handler =
+ [this](const RtpPacketReceived& packet) {
+ undemuxable_packet_handler_triggered_.Set();
+ // No need to re-attempt deliver the packet.
+ return false;
+ };
+ receiver_->DeliverRtpPacket(media_type, std::move(packet),
+ std::move(handler));
+ }
+ void DeliverRtcpPacket(rtc::CopyOnWriteBuffer packet) override {}
+
+ PacketReceiver* receiver_;
+ rtc::Event undemuxable_packet_handler_triggered_;
+ };
+
+ std::unique_ptr<test::DirectTransport> send_transport;
+ std::unique_ptr<test::DirectTransport> receive_transport;
+ std::unique_ptr<PacketInputObserver> input_observer;
+
+ SendTask(
+ task_queue(),
+ [this, &send_transport, &receive_transport, &input_observer]() {
+ CreateCalls();
+
+ send_transport = std::make_unique<test::DirectTransport>(
+ task_queue(),
+ std::make_unique<FakeNetworkPipe>(
+ Clock::GetRealTimeClock(), std::make_unique<SimulatedNetwork>(
+ BuiltInNetworkBehaviorConfig())),
+ sender_call_.get(), payload_type_map_, GetRegisteredExtensions(),
+ GetRegisteredExtensions());
+ receive_transport = std::make_unique<test::DirectTransport>(
+ task_queue(),
+ std::make_unique<FakeNetworkPipe>(
+ Clock::GetRealTimeClock(), std::make_unique<SimulatedNetwork>(
+ BuiltInNetworkBehaviorConfig())),
+ receiver_call_.get(), payload_type_map_, GetRegisteredExtensions(),
+ GetRegisteredExtensions());
+ input_observer =
+ std::make_unique<PacketInputObserver>(receiver_call_->Receiver());
+ send_transport->SetReceiver(input_observer.get());
+ receive_transport->SetReceiver(sender_call_->Receiver());
+
+ CreateSendConfig(1, 0, 0, send_transport.get());
+ CreateMatchingReceiveConfigs(receive_transport.get());
+
+ CreateVideoStreams();
+ CreateFrameGeneratorCapturer(kDefaultFramerate, kDefaultWidth,
+ kDefaultHeight);
+ Start();
+
+ receiver_call_->DestroyVideoReceiveStream(video_receive_streams_[0]);
+ video_receive_streams_.clear();
+ });
+
+ // Wait() waits for a received packet.
+ EXPECT_TRUE(input_observer->Wait());
+
+ SendTask(task_queue(), [this, &send_transport, &receive_transport]() {
+ Stop();
+ DestroyStreams();
+ send_transport.reset();
+ receive_transport.reset();
+ DestroyCalls();
+ });
+}
+
+void SsrcEndToEndTest::TestSendsSetSsrcs(size_t num_ssrcs,
+ bool send_single_ssrc_first) {
+ class SendsSetSsrcs : public test::EndToEndTest {
+ public:
+ SendsSetSsrcs(const uint32_t* ssrcs,
+ size_t num_ssrcs,
+ bool send_single_ssrc_first,
+ TaskQueueBase* task_queue)
+ : EndToEndTest(kDefaultTimeout),
+ num_ssrcs_(num_ssrcs),
+ send_single_ssrc_first_(send_single_ssrc_first),
+ ssrcs_to_observe_(num_ssrcs),
+ expect_single_ssrc_(send_single_ssrc_first),
+ send_stream_(nullptr),
+ task_queue_(task_queue) {
+ for (size_t i = 0; i < num_ssrcs; ++i)
+ valid_ssrcs_[ssrcs[i]] = true;
+ }
+
+ private:
+ Action OnSendRtp(const uint8_t* packet, size_t length) override {
+ RtpPacket rtp_packet;
+ EXPECT_TRUE(rtp_packet.Parse(packet, length));
+
+ EXPECT_TRUE(valid_ssrcs_[rtp_packet.Ssrc()])
+ << "Received unknown SSRC: " << rtp_packet.Ssrc();
+
+ if (!valid_ssrcs_[rtp_packet.Ssrc()])
+ observation_complete_.Set();
+
+ if (!is_observed_[rtp_packet.Ssrc()]) {
+ is_observed_[rtp_packet.Ssrc()] = true;
+ --ssrcs_to_observe_;
+ if (expect_single_ssrc_) {
+ expect_single_ssrc_ = false;
+ observation_complete_.Set();
+ }
+ }
+
+ if (ssrcs_to_observe_ == 0)
+ observation_complete_.Set();
+
+ return SEND_PACKET;
+ }
+
+ size_t GetNumVideoStreams() const override { return num_ssrcs_; }
+
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStreamInterface::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
+ // Set low simulcast bitrates to not have to wait for bandwidth ramp-up.
+ encoder_config->max_bitrate_bps = 50000;
+ for (auto& layer : encoder_config->simulcast_layers) {
+ layer.min_bitrate_bps = 10000;
+ layer.target_bitrate_bps = 15000;
+ layer.max_bitrate_bps = 20000;
+ }
+ video_encoder_config_all_streams_ = encoder_config->Copy();
+ if (send_single_ssrc_first_)
+ encoder_config->number_of_streams = 1;
+ }
+
+ void OnVideoStreamsCreated(VideoSendStream* send_stream,
+ const std::vector<VideoReceiveStreamInterface*>&
+ receive_streams) override {
+ send_stream_ = send_stream;
+ }
+
+ void PerformTest() override {
+ EXPECT_TRUE(Wait()) << "Timed out while waiting for "
+ << (send_single_ssrc_first_ ? "first SSRC."
+ : "SSRCs.");
+
+ if (send_single_ssrc_first_) {
+ // Set full simulcast and continue with the rest of the SSRCs.
+ SendTask(task_queue_, [&]() {
+ send_stream_->ReconfigureVideoEncoder(
+ std::move(video_encoder_config_all_streams_));
+ });
+ EXPECT_TRUE(Wait()) << "Timed out while waiting on additional SSRCs.";
+ }
+ }
+
+ private:
+ std::map<uint32_t, bool> valid_ssrcs_;
+ std::map<uint32_t, bool> is_observed_;
+
+ const size_t num_ssrcs_;
+ const bool send_single_ssrc_first_;
+
+ size_t ssrcs_to_observe_;
+ bool expect_single_ssrc_;
+
+ VideoSendStream* send_stream_;
+ VideoEncoderConfig video_encoder_config_all_streams_;
+ TaskQueueBase* task_queue_;
+ } test(kVideoSendSsrcs, num_ssrcs, send_single_ssrc_first, task_queue());
+
+ RunBaseTest(&test);
+}
+
+TEST_F(SsrcEndToEndTest, SendsSetSsrc) {
+ TestSendsSetSsrcs(1, false);
+}
+
+TEST_F(SsrcEndToEndTest, SendsSetSimulcastSsrcs) {
+ TestSendsSetSsrcs(kNumSimulcastStreams, false);
+}
+
+TEST_F(SsrcEndToEndTest, CanSwitchToUseAllSsrcs) {
+ TestSendsSetSsrcs(kNumSimulcastStreams, true);
+}
+
+TEST_F(SsrcEndToEndTest, DISABLED_RedundantPayloadsTransmittedOnAllSsrcs) {
+ class ObserveRedundantPayloads : public test::EndToEndTest {
+ public:
+ ObserveRedundantPayloads()
+ : EndToEndTest(kDefaultTimeout),
+ ssrcs_to_observe_(kNumSimulcastStreams) {
+ for (size_t i = 0; i < kNumSimulcastStreams; ++i) {
+ registered_rtx_ssrc_[kSendRtxSsrcs[i]] = true;
+ }
+ }
+
+ private:
+ Action OnSendRtp(const uint8_t* packet, size_t length) override {
+ RtpPacket rtp_packet;
+ EXPECT_TRUE(rtp_packet.Parse(packet, length));
+
+ if (!registered_rtx_ssrc_[rtp_packet.Ssrc()])
+ return SEND_PACKET;
+
+ const bool packet_is_redundant_payload = rtp_packet.payload_size() > 0;
+
+ if (!packet_is_redundant_payload)
+ return SEND_PACKET;
+
+ if (!observed_redundant_retransmission_[rtp_packet.Ssrc()]) {
+ observed_redundant_retransmission_[rtp_packet.Ssrc()] = true;
+ if (--ssrcs_to_observe_ == 0)
+ observation_complete_.Set();
+ }
+
+ return SEND_PACKET;
+ }
+
+ size_t GetNumVideoStreams() const override { return kNumSimulcastStreams; }
+
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStreamInterface::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
+ // Set low simulcast bitrates to not have to wait for bandwidth ramp-up.
+ encoder_config->max_bitrate_bps = 50000;
+ for (auto& layer : encoder_config->simulcast_layers) {
+ layer.min_bitrate_bps = 10000;
+ layer.target_bitrate_bps = 15000;
+ layer.max_bitrate_bps = 20000;
+ }
+ send_config->rtp.rtx.payload_type = kSendRtxPayloadType;
+
+ for (size_t i = 0; i < kNumSimulcastStreams; ++i)
+ send_config->rtp.rtx.ssrcs.push_back(kSendRtxSsrcs[i]);
+
+ // Significantly higher than max bitrates for all video streams -> forcing
+ // padding to trigger redundant padding on all RTX SSRCs.
+ encoder_config->min_transmit_bitrate_bps = 100000;
+ }
+
+ void PerformTest() override {
+ EXPECT_TRUE(Wait())
+ << "Timed out while waiting for redundant payloads on all SSRCs.";
+ }
+
+ private:
+ size_t ssrcs_to_observe_;
+ std::map<uint32_t, bool> observed_redundant_retransmission_;
+ std::map<uint32_t, bool> registered_rtx_ssrc_;
+ } test;
+
+ RunBaseTest(&test);
+}
+} // namespace webrtc
diff --git a/third_party/libwebrtc/video/end_to_end_tests/stats_tests.cc b/third_party/libwebrtc/video/end_to_end_tests/stats_tests.cc
new file mode 100644
index 0000000000..62acca3b5f
--- /dev/null
+++ b/third_party/libwebrtc/video/end_to_end_tests/stats_tests.cc
@@ -0,0 +1,733 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <memory>
+
+#include "absl/algorithm/container.h"
+#include "absl/types/optional.h"
+#include "api/task_queue/task_queue_base.h"
+#include "api/test/simulated_network.h"
+#include "api/test/video/function_video_encoder_factory.h"
+#include "call/fake_network_pipe.h"
+#include "call/simulated_network.h"
+#include "modules/rtp_rtcp/source/rtp_packet.h"
+#include "modules/video_coding/include/video_coding_defines.h"
+#include "rtc_base/strings/string_builder.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "rtc_base/task_queue_for_test.h"
+#include "system_wrappers/include/metrics.h"
+#include "system_wrappers/include/sleep.h"
+#include "test/call_test.h"
+#include "test/fake_encoder.h"
+#include "test/gtest.h"
+#include "test/rtcp_packet_parser.h"
+
+namespace webrtc {
+namespace {
+enum : int { // The first valid value is 1.
+ kVideoContentTypeExtensionId = 1,
+};
+} // namespace
+
+class StatsEndToEndTest : public test::CallTest {
+ public:
+ StatsEndToEndTest() {
+ RegisterRtpExtension(RtpExtension(RtpExtension::kVideoContentTypeUri,
+ kVideoContentTypeExtensionId));
+ }
+};
+
+TEST_F(StatsEndToEndTest, GetStats) {
+ static const int kStartBitrateBps = 3000000;
+ static const int kExpectedRenderDelayMs = 20;
+
+ class StatsObserver : public test::EndToEndTest {
+ public:
+ StatsObserver()
+ : EndToEndTest(kLongTimeout), encoder_factory_([]() {
+ return std::make_unique<test::DelayedEncoder>(
+ Clock::GetRealTimeClock(), 10);
+ }) {}
+
+ private:
+ Action OnSendRtp(const uint8_t* packet, size_t length) override {
+ // Drop every 25th packet => 4% loss.
+ static const int kPacketLossFrac = 25;
+ RtpPacket header;
+ if (header.Parse(packet, length) &&
+ expected_send_ssrcs_.find(header.Ssrc()) !=
+ expected_send_ssrcs_.end() &&
+ header.SequenceNumber() % kPacketLossFrac == 0) {
+ return DROP_PACKET;
+ }
+ check_stats_event_.Set();
+ return SEND_PACKET;
+ }
+
+ Action OnSendRtcp(const uint8_t* packet, size_t length) override {
+ check_stats_event_.Set();
+ return SEND_PACKET;
+ }
+
+ Action OnReceiveRtp(const uint8_t* packet, size_t length) override {
+ check_stats_event_.Set();
+ return SEND_PACKET;
+ }
+
+ Action OnReceiveRtcp(const uint8_t* packet, size_t length) override {
+ check_stats_event_.Set();
+ return SEND_PACKET;
+ }
+
+ bool CheckReceiveStats() {
+ for (size_t i = 0; i < receive_streams_.size(); ++i) {
+ VideoReceiveStreamInterface::Stats stats =
+ receive_streams_[i]->GetStats();
+ EXPECT_EQ(expected_receive_ssrcs_[i], stats.ssrc);
+
+ // Make sure all fields have been populated.
+ // TODO(pbos): Use CompoundKey if/when we ever know that all stats are
+ // always filled for all receivers.
+ receive_stats_filled_["IncomingRate"] |=
+ stats.network_frame_rate != 0 || stats.total_bitrate_bps != 0;
+
+ send_stats_filled_["DecoderImplementationName"] |=
+ stats.decoder_implementation_name ==
+ test::FakeDecoder::kImplementationName;
+ receive_stats_filled_["PowerEfficientDecoder"] =
+ stats.power_efficient_decoder.has_value();
+ receive_stats_filled_["RenderDelayAsHighAsExpected"] |=
+ stats.render_delay_ms >= kExpectedRenderDelayMs;
+
+ receive_stats_filled_["FrameCallback"] |= stats.decode_frame_rate != 0;
+
+ receive_stats_filled_["FrameRendered"] |= stats.render_frame_rate != 0;
+
+ receive_stats_filled_["StatisticsUpdated"] |=
+ stats.rtp_stats.packets_lost != 0 || stats.rtp_stats.jitter != 0;
+
+ receive_stats_filled_["DataCountersUpdated"] |=
+ stats.rtp_stats.packet_counter.payload_bytes != 0 ||
+ stats.rtp_stats.packet_counter.header_bytes != 0 ||
+ stats.rtp_stats.packet_counter.packets != 0 ||
+ stats.rtp_stats.packet_counter.padding_bytes != 0;
+
+ receive_stats_filled_["CodecStats"] |= stats.target_delay_ms != 0;
+
+ receive_stats_filled_["FrameCounts"] |=
+ stats.frame_counts.key_frames != 0 ||
+ stats.frame_counts.delta_frames != 0;
+
+ receive_stats_filled_["CName"] |= !stats.c_name.empty();
+
+ receive_stats_filled_["RtcpPacketTypeCount"] |=
+ stats.rtcp_packet_type_counts.fir_packets != 0 ||
+ stats.rtcp_packet_type_counts.nack_packets != 0 ||
+ stats.rtcp_packet_type_counts.pli_packets != 0 ||
+ stats.rtcp_packet_type_counts.nack_requests != 0 ||
+ stats.rtcp_packet_type_counts.unique_nack_requests != 0;
+
+ RTC_DCHECK(stats.current_payload_type == -1 ||
+ stats.current_payload_type == kFakeVideoSendPayloadType);
+ receive_stats_filled_["IncomingPayloadType"] |=
+ stats.current_payload_type == kFakeVideoSendPayloadType;
+ }
+
+ return AllStatsFilled(receive_stats_filled_);
+ }
+
+ bool CheckSendStats() {
+ RTC_DCHECK(send_stream_);
+
+ VideoSendStream::Stats stats;
+ SendTask(task_queue_, [&]() { stats = send_stream_->GetStats(); });
+
+ size_t expected_num_streams =
+ kNumSimulcastStreams + expected_send_ssrcs_.size();
+ send_stats_filled_["NumStreams"] |=
+ stats.substreams.size() == expected_num_streams;
+
+ send_stats_filled_["CpuOveruseMetrics"] |=
+ stats.avg_encode_time_ms != 0 && stats.encode_usage_percent != 0 &&
+ stats.total_encode_time_ms != 0;
+
+ send_stats_filled_["EncoderImplementationName"] |=
+ stats.encoder_implementation_name ==
+ test::FakeEncoder::kImplementationName;
+
+ send_stats_filled_["PowerEfficientEncoder"] |=
+ stats.power_efficient_encoder == true;
+
+ for (const auto& kv : stats.substreams) {
+ if (expected_send_ssrcs_.find(kv.first) == expected_send_ssrcs_.end())
+ continue; // Probably RTX.
+
+ send_stats_filled_[CompoundKey("CapturedFrameRate", kv.first)] |=
+ stats.input_frame_rate != 0;
+
+ const VideoSendStream::StreamStats& stream_stats = kv.second;
+
+ send_stats_filled_[CompoundKey("StatisticsUpdated", kv.first)] |=
+ stream_stats.report_block_data.has_value();
+
+ send_stats_filled_[CompoundKey("DataCountersUpdated", kv.first)] |=
+ stream_stats.rtp_stats.fec.packets != 0 ||
+ stream_stats.rtp_stats.transmitted.padding_bytes != 0 ||
+ stream_stats.rtp_stats.retransmitted.packets != 0 ||
+ stream_stats.rtp_stats.transmitted.packets != 0;
+
+ send_stats_filled_[CompoundKey("BitrateStatisticsObserver.Total",
+ kv.first)] |=
+ stream_stats.total_bitrate_bps != 0;
+
+ send_stats_filled_[CompoundKey("BitrateStatisticsObserver.Retransmit",
+ kv.first)] |=
+ stream_stats.retransmit_bitrate_bps != 0;
+
+ send_stats_filled_[CompoundKey("FrameCountObserver", kv.first)] |=
+ stream_stats.frame_counts.delta_frames != 0 ||
+ stream_stats.frame_counts.key_frames != 0;
+
+ send_stats_filled_[CompoundKey("OutgoingRate", kv.first)] |=
+ stats.encode_frame_rate != 0;
+
+ send_stats_filled_[CompoundKey("Delay", kv.first)] |=
+ stream_stats.avg_delay_ms != 0 || stream_stats.max_delay_ms != 0;
+
+ // TODO(pbos): Use CompoundKey when the test makes sure that all SSRCs
+ // report dropped packets.
+ send_stats_filled_["RtcpPacketTypeCount"] |=
+ stream_stats.rtcp_packet_type_counts.fir_packets != 0 ||
+ stream_stats.rtcp_packet_type_counts.nack_packets != 0 ||
+ stream_stats.rtcp_packet_type_counts.pli_packets != 0 ||
+ stream_stats.rtcp_packet_type_counts.nack_requests != 0 ||
+ stream_stats.rtcp_packet_type_counts.unique_nack_requests != 0;
+ }
+
+ return AllStatsFilled(send_stats_filled_);
+ }
+
+ std::string CompoundKey(const char* name, uint32_t ssrc) {
+ rtc::StringBuilder oss;
+ oss << name << "_" << ssrc;
+ return oss.Release();
+ }
+
+ bool AllStatsFilled(const std::map<std::string, bool>& stats_map) {
+ for (const auto& stat : stats_map) {
+ if (!stat.second)
+ return false;
+ }
+ return true;
+ }
+
+ BuiltInNetworkBehaviorConfig GetSendTransportConfig() const override {
+ BuiltInNetworkBehaviorConfig network_config;
+ network_config.loss_percent = 5;
+ return network_config;
+ }
+
+ void ModifySenderBitrateConfig(
+ BitrateConstraints* bitrate_config) override {
+ bitrate_config->start_bitrate_bps = kStartBitrateBps;
+ }
+
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStreamInterface::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
+ // Set low simulcast bitrates to not have to wait for bandwidth ramp-up.
+ encoder_config->max_bitrate_bps = 50000;
+ for (auto& layer : encoder_config->simulcast_layers) {
+ layer.min_bitrate_bps = 10000;
+ layer.target_bitrate_bps = 15000;
+ layer.max_bitrate_bps = 20000;
+ }
+
+ send_config->rtp.c_name = "SomeCName";
+ send_config->rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
+ send_config->rtp.rtx.payload_type = kSendRtxPayloadType;
+
+ const std::vector<uint32_t>& ssrcs = send_config->rtp.ssrcs;
+ for (size_t i = 0; i < ssrcs.size(); ++i) {
+ expected_send_ssrcs_.insert(ssrcs[i]);
+ expected_receive_ssrcs_.push_back(
+ (*receive_configs)[i].rtp.remote_ssrc);
+ (*receive_configs)[i].render_delay_ms = kExpectedRenderDelayMs;
+ (*receive_configs)[i].rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
+
+ (*receive_configs)[i].rtp.rtx_ssrc = kSendRtxSsrcs[i];
+ (*receive_configs)[i]
+ .rtp.rtx_associated_payload_types[kSendRtxPayloadType] =
+ kFakeVideoSendPayloadType;
+ }
+
+ for (size_t i = 0; i < kNumSimulcastStreams; ++i)
+ send_config->rtp.rtx.ssrcs.push_back(kSendRtxSsrcs[i]);
+
+ // Use a delayed encoder to make sure we see CpuOveruseMetrics stats that
+ // are non-zero.
+ send_config->encoder_settings.encoder_factory = &encoder_factory_;
+ }
+
+ size_t GetNumVideoStreams() const override { return kNumSimulcastStreams; }
+
+ void OnVideoStreamsCreated(VideoSendStream* send_stream,
+ const std::vector<VideoReceiveStreamInterface*>&
+ receive_streams) override {
+ send_stream_ = send_stream;
+ receive_streams_ = receive_streams;
+ task_queue_ = TaskQueueBase::Current();
+ }
+
+ void PerformTest() override {
+ Clock* clock = Clock::GetRealTimeClock();
+ int64_t now_ms = clock->TimeInMilliseconds();
+ int64_t stop_time_ms = now_ms + test::CallTest::kLongTimeout.ms();
+ bool receive_ok = false;
+ bool send_ok = false;
+
+ while (now_ms < stop_time_ms) {
+ if (!receive_ok && task_queue_) {
+ SendTask(task_queue_, [&]() { receive_ok = CheckReceiveStats(); });
+ }
+ if (!send_ok)
+ send_ok = CheckSendStats();
+
+ if (receive_ok && send_ok)
+ return;
+
+ int64_t time_until_timeout_ms = stop_time_ms - now_ms;
+ if (time_until_timeout_ms > 0)
+ check_stats_event_.Wait(TimeDelta::Millis(time_until_timeout_ms));
+ now_ms = clock->TimeInMilliseconds();
+ }
+
+ ADD_FAILURE() << "Timed out waiting for filled stats.";
+ for (const auto& kv : receive_stats_filled_) {
+ if (!kv.second) {
+ ADD_FAILURE() << "Missing receive stats: " << kv.first;
+ }
+ }
+ for (const auto& kv : send_stats_filled_) {
+ if (!kv.second) {
+ ADD_FAILURE() << "Missing send stats: " << kv.first;
+ }
+ }
+ }
+
+ test::FunctionVideoEncoderFactory encoder_factory_;
+ std::vector<VideoReceiveStreamInterface*> receive_streams_;
+ std::map<std::string, bool> receive_stats_filled_;
+
+ VideoSendStream* send_stream_ = nullptr;
+ std::map<std::string, bool> send_stats_filled_;
+
+ std::vector<uint32_t> expected_receive_ssrcs_;
+ std::set<uint32_t> expected_send_ssrcs_;
+
+ rtc::Event check_stats_event_;
+ TaskQueueBase* task_queue_ = nullptr;
+ } test;
+
+ RunBaseTest(&test);
+}
+
+TEST_F(StatsEndToEndTest, TimingFramesAreReported) {
+ static const int kExtensionId = 5;
+ RegisterRtpExtension(
+ RtpExtension(RtpExtension::kVideoTimingUri, kExtensionId));
+
+ class StatsObserver : public test::EndToEndTest {
+ public:
+ StatsObserver() : EndToEndTest(kLongTimeout) {}
+
+ private:
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStreamInterface::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
+ send_config->rtp.extensions.clear();
+ send_config->rtp.extensions.push_back(
+ RtpExtension(RtpExtension::kVideoTimingUri, kExtensionId));
+ for (auto& receive_config : *receive_configs) {
+ receive_config.rtp.extensions.clear();
+ receive_config.rtp.extensions.push_back(
+ RtpExtension(RtpExtension::kVideoTimingUri, kExtensionId));
+ }
+ }
+
+ void OnVideoStreamsCreated(VideoSendStream* send_stream,
+ const std::vector<VideoReceiveStreamInterface*>&
+ receive_streams) override {
+ receive_streams_ = receive_streams;
+ task_queue_ = TaskQueueBase::Current();
+ }
+
+ void PerformTest() override {
+ // No frames reported initially.
+ SendTask(task_queue_, [&]() {
+ for (const auto& receive_stream : receive_streams_) {
+ EXPECT_FALSE(receive_stream->GetStats().timing_frame_info);
+ }
+ });
+ // Wait for at least one timing frame to be sent with 100ms grace period.
+ SleepMs(kDefaultTimingFramesDelayMs + 100);
+ // Check that timing frames are reported for each stream.
+ SendTask(task_queue_, [&]() {
+ for (const auto& receive_stream : receive_streams_) {
+ EXPECT_TRUE(receive_stream->GetStats().timing_frame_info);
+ }
+ });
+ }
+
+ std::vector<VideoReceiveStreamInterface*> receive_streams_;
+ TaskQueueBase* task_queue_ = nullptr;
+ } test;
+
+ RunBaseTest(&test);
+}
+
+TEST_F(StatsEndToEndTest, TestReceivedRtpPacketStats) {
+ static const size_t kNumRtpPacketsToSend = 5;
+ class ReceivedRtpStatsObserver : public test::EndToEndTest {
+ public:
+ explicit ReceivedRtpStatsObserver(TaskQueueBase* task_queue)
+ : EndToEndTest(kDefaultTimeout), task_queue_(task_queue) {}
+
+ private:
+ void OnVideoStreamsCreated(VideoSendStream* send_stream,
+ const std::vector<VideoReceiveStreamInterface*>&
+ receive_streams) override {
+ receive_stream_ = receive_streams[0];
+ }
+
+ void OnStreamsStopped() override { task_safety_flag_->SetNotAlive(); }
+
+ Action OnSendRtp(const uint8_t* packet, size_t length) override {
+ if (sent_rtp_ >= kNumRtpPacketsToSend) {
+ // Need to check the stats on the correct thread.
+ task_queue_->PostTask(SafeTask(task_safety_flag_, [this]() {
+ VideoReceiveStreamInterface::Stats stats =
+ receive_stream_->GetStats();
+ if (kNumRtpPacketsToSend == stats.rtp_stats.packet_counter.packets) {
+ observation_complete_.Set();
+ }
+ }));
+ return DROP_PACKET;
+ }
+ ++sent_rtp_;
+ return SEND_PACKET;
+ }
+
+ void PerformTest() override {
+ EXPECT_TRUE(Wait())
+ << "Timed out while verifying number of received RTP packets.";
+ }
+
+ VideoReceiveStreamInterface* receive_stream_ = nullptr;
+ uint32_t sent_rtp_ = 0;
+ TaskQueueBase* const task_queue_;
+ rtc::scoped_refptr<PendingTaskSafetyFlag> task_safety_flag_ =
+ PendingTaskSafetyFlag::CreateDetached();
+ } test(task_queue());
+
+ RunBaseTest(&test);
+}
+
+#if defined(WEBRTC_WIN)
+// Disabled due to flakiness on Windows (bugs.webrtc.org/7483).
+#define MAYBE_ContentTypeSwitches DISABLED_ContentTypeSwitches
+#else
+#define MAYBE_ContentTypeSwitches ContentTypeSwitches
+#endif
+TEST_F(StatsEndToEndTest, MAYBE_ContentTypeSwitches) {
+ class StatsObserver : public test::BaseTest,
+ public rtc::VideoSinkInterface<VideoFrame> {
+ public:
+ StatsObserver() : BaseTest(kLongTimeout), num_frames_received_(0) {}
+
+ bool ShouldCreateReceivers() const override { return true; }
+
+ void OnFrame(const VideoFrame& video_frame) override {
+ // The RTT is needed to estimate `ntp_time_ms` which is used by
+ // end-to-end delay stats. Therefore, start counting received frames once
+ // `ntp_time_ms` is valid.
+ if (video_frame.ntp_time_ms() > 0 &&
+ Clock::GetRealTimeClock()->CurrentNtpInMilliseconds() >=
+ video_frame.ntp_time_ms()) {
+ MutexLock lock(&mutex_);
+ ++num_frames_received_;
+ }
+ }
+
+ Action OnSendRtp(const uint8_t* packet, size_t length) override {
+ if (MinNumberOfFramesReceived())
+ observation_complete_.Set();
+ return SEND_PACKET;
+ }
+
+ bool MinNumberOfFramesReceived() const {
+ // Have some room for frames with wrong content type during switch.
+ const int kMinRequiredHistogramSamples = 200 + 50;
+ MutexLock lock(&mutex_);
+ return num_frames_received_ > kMinRequiredHistogramSamples;
+ }
+
+ // May be called several times.
+ void PerformTest() override {
+ EXPECT_TRUE(Wait()) << "Timed out waiting for enough packets.";
+ // Reset frame counter so next PerformTest() call will do something.
+ {
+ MutexLock lock(&mutex_);
+ num_frames_received_ = 0;
+ }
+ }
+
+ mutable Mutex mutex_;
+ int num_frames_received_ RTC_GUARDED_BY(&mutex_);
+ } test;
+
+ metrics::Reset();
+
+ Call::Config send_config(send_event_log_.get());
+ test.ModifySenderBitrateConfig(&send_config.bitrate_config);
+ Call::Config recv_config(recv_event_log_.get());
+ test.ModifyReceiverBitrateConfig(&recv_config.bitrate_config);
+
+ VideoEncoderConfig encoder_config_with_screenshare;
+
+ SendTask(
+ task_queue(), [this, &test, &send_config, &recv_config,
+ &encoder_config_with_screenshare]() {
+ CreateSenderCall(send_config);
+ CreateReceiverCall(recv_config);
+ CreateReceiveTransport(test.GetReceiveTransportConfig(), &test);
+ CreateSendTransport(test.GetReceiveTransportConfig(), &test);
+
+ receiver_call_->SignalChannelNetworkState(MediaType::VIDEO, kNetworkUp);
+ CreateSendConfig(1, 0, 0);
+ CreateMatchingReceiveConfigs();
+
+ // Modify send and receive configs.
+ GetVideoSendConfig()->rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
+ video_receive_configs_[0].rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
+ video_receive_configs_[0].renderer = &test;
+ // RTT needed for RemoteNtpTimeEstimator for the receive stream.
+ video_receive_configs_[0].rtp.rtcp_xr.receiver_reference_time_report =
+ true;
+ // Start with realtime video.
+ GetVideoEncoderConfig()->content_type =
+ VideoEncoderConfig::ContentType::kRealtimeVideo;
+ // Encoder config for the second part of the test uses screenshare.
+ encoder_config_with_screenshare = GetVideoEncoderConfig()->Copy();
+ encoder_config_with_screenshare.content_type =
+ VideoEncoderConfig::ContentType::kScreen;
+
+ CreateVideoStreams();
+ CreateFrameGeneratorCapturer(kDefaultFramerate, kDefaultWidth,
+ kDefaultHeight);
+ Start();
+ });
+
+ test.PerformTest();
+
+ // Replace old send stream.
+ SendTask(task_queue(), [this, &encoder_config_with_screenshare]() {
+ DestroyVideoSendStreams();
+ CreateVideoSendStream(encoder_config_with_screenshare);
+ SetVideoDegradation(DegradationPreference::BALANCED);
+ GetVideoSendStream()->Start();
+ });
+
+ // Continue to run test but now with screenshare.
+ test.PerformTest();
+
+ SendTask(task_queue(), [this]() {
+ Stop();
+ DestroyStreams();
+ DestroyCalls();
+ });
+
+ // Verify that stats have been updated for both screenshare and video.
+ EXPECT_METRIC_EQ(1, metrics::NumSamples("WebRTC.Video.EndToEndDelayInMs"));
+ EXPECT_METRIC_EQ(
+ 1, metrics::NumSamples("WebRTC.Video.Screenshare.EndToEndDelayInMs"));
+ EXPECT_METRIC_EQ(1, metrics::NumSamples("WebRTC.Video.EndToEndDelayMaxInMs"));
+ EXPECT_METRIC_EQ(
+ 1, metrics::NumSamples("WebRTC.Video.Screenshare.EndToEndDelayMaxInMs"));
+ EXPECT_METRIC_EQ(1, metrics::NumSamples("WebRTC.Video.InterframeDelayInMs"));
+ EXPECT_METRIC_EQ(
+ 1, metrics::NumSamples("WebRTC.Video.Screenshare.InterframeDelayInMs"));
+ EXPECT_METRIC_EQ(1,
+ metrics::NumSamples("WebRTC.Video.InterframeDelayMaxInMs"));
+ EXPECT_METRIC_EQ(1, metrics::NumSamples(
+ "WebRTC.Video.Screenshare.InterframeDelayMaxInMs"));
+}
+
+TEST_F(StatsEndToEndTest, VerifyNackStats) {
+ static const int kPacketNumberToDrop = 200;
+ class NackObserver : public test::EndToEndTest {
+ public:
+ explicit NackObserver(TaskQueueBase* task_queue)
+ : EndToEndTest(kLongTimeout), task_queue_(task_queue) {}
+
+ private:
+ Action OnSendRtp(const uint8_t* packet, size_t length) override {
+ {
+ MutexLock lock(&mutex_);
+ if (++sent_rtp_packets_ == kPacketNumberToDrop) {
+ RtpPacket header;
+ EXPECT_TRUE(header.Parse(packet, length));
+ dropped_rtp_packet_ = header.SequenceNumber();
+ return DROP_PACKET;
+ }
+ }
+ task_queue_->PostTask(
+ SafeTask(task_safety_flag_, [this]() { VerifyStats(); }));
+ return SEND_PACKET;
+ }
+
+ Action OnReceiveRtcp(const uint8_t* packet, size_t length) override {
+ MutexLock lock(&mutex_);
+ test::RtcpPacketParser rtcp_parser;
+ rtcp_parser.Parse(packet, length);
+ const std::vector<uint16_t>& nacks = rtcp_parser.nack()->packet_ids();
+ if (!nacks.empty() && absl::c_linear_search(nacks, dropped_rtp_packet_)) {
+ dropped_rtp_packet_requested_ = true;
+ }
+ return SEND_PACKET;
+ }
+
+ void VerifyStats() {
+ MutexLock lock(&mutex_);
+ if (!dropped_rtp_packet_requested_)
+ return;
+ int send_stream_nack_packets = 0;
+ int receive_stream_nack_packets = 0;
+ VideoSendStream::Stats stats = send_stream_->GetStats();
+ for (const auto& kv : stats.substreams) {
+ const VideoSendStream::StreamStats& stream_stats = kv.second;
+ send_stream_nack_packets +=
+ stream_stats.rtcp_packet_type_counts.nack_packets;
+ }
+ for (const auto& receive_stream : receive_streams_) {
+ VideoReceiveStreamInterface::Stats stats = receive_stream->GetStats();
+ receive_stream_nack_packets +=
+ stats.rtcp_packet_type_counts.nack_packets;
+ }
+ if (send_stream_nack_packets >= 1 && receive_stream_nack_packets >= 1) {
+ // NACK packet sent on receive stream and received on sent stream.
+ if (MinMetricRunTimePassed())
+ observation_complete_.Set();
+ }
+ }
+
+ bool MinMetricRunTimePassed() {
+ int64_t now_ms = Clock::GetRealTimeClock()->TimeInMilliseconds();
+ if (!start_runtime_ms_)
+ start_runtime_ms_ = now_ms;
+
+ int64_t elapsed_sec = (now_ms - *start_runtime_ms_) / 1000;
+ return elapsed_sec > metrics::kMinRunTimeInSeconds;
+ }
+
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStreamInterface::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
+ send_config->rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
+ (*receive_configs)[0].rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
+ }
+
+ void OnVideoStreamsCreated(VideoSendStream* send_stream,
+ const std::vector<VideoReceiveStreamInterface*>&
+ receive_streams) override {
+ send_stream_ = send_stream;
+ receive_streams_ = receive_streams;
+ }
+
+ void OnStreamsStopped() override { task_safety_flag_->SetNotAlive(); }
+
+ void PerformTest() override {
+ EXPECT_TRUE(Wait()) << "Timed out waiting for packet to be NACKed.";
+ }
+
+ Mutex mutex_;
+ uint64_t sent_rtp_packets_ RTC_GUARDED_BY(&mutex_) = 0;
+ uint16_t dropped_rtp_packet_ RTC_GUARDED_BY(&mutex_) = 0;
+ bool dropped_rtp_packet_requested_ RTC_GUARDED_BY(&mutex_) = false;
+ std::vector<VideoReceiveStreamInterface*> receive_streams_;
+ VideoSendStream* send_stream_ = nullptr;
+ absl::optional<int64_t> start_runtime_ms_;
+ TaskQueueBase* const task_queue_;
+ rtc::scoped_refptr<PendingTaskSafetyFlag> task_safety_flag_ =
+ PendingTaskSafetyFlag::CreateDetached();
+ } test(task_queue());
+
+ metrics::Reset();
+ RunBaseTest(&test);
+
+ EXPECT_METRIC_EQ(
+ 1, metrics::NumSamples("WebRTC.Video.UniqueNackRequestsSentInPercent"));
+ EXPECT_METRIC_EQ(1, metrics::NumSamples(
+ "WebRTC.Video.UniqueNackRequestsReceivedInPercent"));
+ EXPECT_METRIC_GT(metrics::MinSample("WebRTC.Video.NackPacketsSentPerMinute"),
+ 0);
+}
+
+TEST_F(StatsEndToEndTest, CallReportsRttForSender) {
+ static const int kSendDelayMs = 30;
+ static const int kReceiveDelayMs = 70;
+
+ SendTask(task_queue(), [this]() {
+ BuiltInNetworkBehaviorConfig config;
+ config.queue_delay_ms = kSendDelayMs;
+ CreateCalls();
+ CreateSendTransport(config, /*observer*/ nullptr);
+
+ config.queue_delay_ms = kReceiveDelayMs;
+ CreateReceiveTransport(config, /*observer*/ nullptr);
+
+ CreateSendConfig(1, 0, 0);
+ CreateMatchingReceiveConfigs();
+
+ CreateVideoStreams();
+ CreateFrameGeneratorCapturer(kDefaultFramerate, kDefaultWidth,
+ kDefaultHeight);
+ receiver_call_->SignalChannelNetworkState(MediaType::VIDEO, kNetworkUp);
+ Start();
+ });
+
+ int64_t start_time_ms = clock_->TimeInMilliseconds();
+ while (true) {
+ Call::Stats stats;
+ SendTask(task_queue(),
+ [this, &stats]() { stats = sender_call_->GetStats(); });
+ ASSERT_GE(start_time_ms + kDefaultTimeout.ms(),
+ clock_->TimeInMilliseconds())
+ << "No RTT stats before timeout!";
+ if (stats.rtt_ms != -1) {
+ // To avoid failures caused by rounding or minor ntp clock adjustments,
+ // relax expectation by 1ms.
+ constexpr int kAllowedErrorMs = 1;
+ EXPECT_GE(stats.rtt_ms, kSendDelayMs + kReceiveDelayMs - kAllowedErrorMs);
+ break;
+ }
+ SleepMs(10);
+ }
+
+ SendTask(task_queue(), [this]() {
+ Stop();
+ DestroyStreams();
+ DestroyCalls();
+ });
+}
+} // namespace webrtc
diff --git a/third_party/libwebrtc/video/end_to_end_tests/transport_feedback_tests.cc b/third_party/libwebrtc/video/end_to_end_tests/transport_feedback_tests.cc
new file mode 100644
index 0000000000..f6e20498e3
--- /dev/null
+++ b/third_party/libwebrtc/video/end_to_end_tests/transport_feedback_tests.cc
@@ -0,0 +1,493 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <memory>
+#include <vector>
+
+#include "api/rtp_parameters.h"
+#include "api/task_queue/task_queue_base.h"
+#include "api/units/time_delta.h"
+#include "call/call.h"
+#include "call/fake_network_pipe.h"
+#include "call/simulated_network.h"
+#include "modules/rtp_rtcp/source/byte_io.h"
+#include "modules/rtp_rtcp/source/rtp_header_extensions.h"
+#include "modules/rtp_rtcp/source/rtp_packet.h"
+#include "rtc_base/numerics/sequence_number_unwrapper.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "test/call_test.h"
+#include "test/field_trial.h"
+#include "test/gtest.h"
+#include "test/rtcp_packet_parser.h"
+#include "video/end_to_end_tests/multi_stream_tester.h"
+
+namespace webrtc {
+namespace {
+enum : int { // The first valid value is 1.
+ kTransportSequenceNumberExtensionId = 1,
+};
+} // namespace
+
+TEST(TransportFeedbackMultiStreamTest, AssignsTransportSequenceNumbers) {
+ static constexpr int kSendRtxPayloadType = 98;
+ static constexpr TimeDelta kDefaultTimeout = TimeDelta::Seconds(30);
+ static constexpr int kNackRtpHistoryMs = 1000;
+ static constexpr uint32_t kSendRtxSsrcs[MultiStreamTester::kNumStreams] = {
+ 0xBADCAFD, 0xBADCAFE, 0xBADCAFF};
+
+ class RtpExtensionHeaderObserver : public test::DirectTransport {
+ public:
+ RtpExtensionHeaderObserver(
+ TaskQueueBase* task_queue,
+ Call* sender_call,
+ const std::map<uint32_t, uint32_t>& ssrc_map,
+ const std::map<uint8_t, MediaType>& payload_type_map,
+ rtc::ArrayView<const RtpExtension> audio_extensions,
+ rtc::ArrayView<const RtpExtension> video_extensions)
+ : DirectTransport(task_queue,
+ std::make_unique<FakeNetworkPipe>(
+ Clock::GetRealTimeClock(),
+ std::make_unique<SimulatedNetwork>(
+ BuiltInNetworkBehaviorConfig())),
+ sender_call,
+ payload_type_map,
+ audio_extensions,
+ video_extensions),
+ rtx_to_media_ssrcs_(ssrc_map),
+ rtx_padding_observed_(false),
+ retransmit_observed_(false),
+ started_(false) {
+ extensions_.Register<TransportSequenceNumber>(
+ kTransportSequenceNumberExtensionId);
+ }
+ virtual ~RtpExtensionHeaderObserver() {}
+
+ bool SendRtp(const uint8_t* data,
+ size_t length,
+ const PacketOptions& options) override {
+ {
+ MutexLock lock(&lock_);
+
+ if (IsDone())
+ return false;
+
+ if (started_) {
+ RtpPacket rtp_packet(&extensions_);
+ EXPECT_TRUE(rtp_packet.Parse(data, length));
+ bool drop_packet = false;
+
+ uint16_t transport_sequence_number = 0;
+ EXPECT_TRUE(rtp_packet.GetExtension<TransportSequenceNumber>(
+ &transport_sequence_number));
+ EXPECT_EQ(options.packet_id, transport_sequence_number);
+ if (!streams_observed_.empty()) {
+ // Unwrap packet id and verify uniqueness.
+ int64_t packet_id = unwrapper_.Unwrap(options.packet_id);
+ EXPECT_TRUE(received_packed_ids_.insert(packet_id).second);
+ }
+
+ // Drop (up to) every 17th packet, so we get retransmits.
+ // Only drop media, do not drop padding packets.
+ if (rtp_packet.PayloadType() != kSendRtxPayloadType &&
+ rtp_packet.payload_size() > 0 &&
+ transport_sequence_number % 17 == 0) {
+ dropped_seq_[rtp_packet.Ssrc()].insert(rtp_packet.SequenceNumber());
+ drop_packet = true;
+ }
+
+ if (rtp_packet.payload_size() == 0) {
+ // Ignore padding packets.
+ } else if (rtp_packet.PayloadType() == kSendRtxPayloadType) {
+ uint16_t original_sequence_number =
+ ByteReader<uint16_t>::ReadBigEndian(
+ rtp_packet.payload().data());
+ uint32_t original_ssrc =
+ rtx_to_media_ssrcs_.find(rtp_packet.Ssrc())->second;
+ std::set<uint16_t>* seq_no_map = &dropped_seq_[original_ssrc];
+ auto it = seq_no_map->find(original_sequence_number);
+ if (it != seq_no_map->end()) {
+ retransmit_observed_ = true;
+ seq_no_map->erase(it);
+ } else {
+ rtx_padding_observed_ = true;
+ }
+ } else {
+ streams_observed_.insert(rtp_packet.Ssrc());
+ }
+
+ if (IsDone())
+ done_.Set();
+
+ if (drop_packet)
+ return true;
+ }
+ }
+
+ return test::DirectTransport::SendRtp(data, length, options);
+ }
+
+ bool IsDone() {
+ bool observed_types_ok =
+ streams_observed_.size() == MultiStreamTester::kNumStreams &&
+ retransmit_observed_ && rtx_padding_observed_;
+ if (!observed_types_ok)
+ return false;
+ // We should not have any gaps in the sequence number range.
+ size_t seqno_range =
+ *received_packed_ids_.rbegin() - *received_packed_ids_.begin() + 1;
+ return seqno_range == received_packed_ids_.size();
+ }
+
+ bool Wait() {
+ {
+ // Can't be sure until this point that rtx_to_media_ssrcs_ etc have
+ // been initialized and are OK to read.
+ MutexLock lock(&lock_);
+ started_ = true;
+ }
+ return done_.Wait(kDefaultTimeout);
+ }
+
+ private:
+ Mutex lock_;
+ rtc::Event done_;
+ RtpHeaderExtensionMap extensions_;
+ RtpSequenceNumberUnwrapper unwrapper_;
+ std::set<int64_t> received_packed_ids_;
+ std::set<uint32_t> streams_observed_;
+ std::map<uint32_t, std::set<uint16_t>> dropped_seq_;
+ const std::map<uint32_t, uint32_t>& rtx_to_media_ssrcs_;
+ bool rtx_padding_observed_;
+ bool retransmit_observed_;
+ bool started_;
+ };
+
+ class TransportSequenceNumberTester : public MultiStreamTester {
+ public:
+ TransportSequenceNumberTester() : observer_(nullptr) {}
+ ~TransportSequenceNumberTester() override = default;
+
+ protected:
+ void Wait() override {
+ RTC_DCHECK(observer_);
+ EXPECT_TRUE(observer_->Wait());
+ }
+
+ void UpdateSendConfig(
+ size_t stream_index,
+ VideoSendStream::Config* send_config,
+ VideoEncoderConfig* encoder_config,
+ test::FrameGeneratorCapturer** frame_generator) override {
+ send_config->rtp.extensions.clear();
+ send_config->rtp.extensions.push_back(
+ RtpExtension(RtpExtension::kTransportSequenceNumberUri,
+ kTransportSequenceNumberExtensionId));
+
+ // Force some padding to be sent. Note that since we do send media
+ // packets we can not guarantee that a padding only packet is sent.
+ // Instead, padding will most likely be send as an RTX packet.
+ const int kPaddingBitrateBps = 50000;
+ encoder_config->max_bitrate_bps = 200000;
+ encoder_config->min_transmit_bitrate_bps =
+ encoder_config->max_bitrate_bps + kPaddingBitrateBps;
+
+ // Configure RTX for redundant payload padding.
+ send_config->rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
+ send_config->rtp.rtx.ssrcs.push_back(kSendRtxSsrcs[stream_index]);
+ send_config->rtp.rtx.payload_type = kSendRtxPayloadType;
+ rtx_to_media_ssrcs_[kSendRtxSsrcs[stream_index]] =
+ send_config->rtp.ssrcs[0];
+ }
+
+ void UpdateReceiveConfig(
+ size_t stream_index,
+ VideoReceiveStreamInterface::Config* receive_config) override {
+ receive_config->rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
+ receive_config->rtp.extensions.clear();
+ receive_config->rtp.extensions.push_back(
+ RtpExtension(RtpExtension::kTransportSequenceNumberUri,
+ kTransportSequenceNumberExtensionId));
+ receive_config->renderer = &fake_renderer_;
+ }
+
+ std::unique_ptr<test::DirectTransport> CreateSendTransport(
+ TaskQueueBase* task_queue,
+ Call* sender_call) override {
+ std::map<uint8_t, MediaType> payload_type_map =
+ MultiStreamTester::payload_type_map_;
+ RTC_DCHECK(payload_type_map.find(kSendRtxPayloadType) ==
+ payload_type_map.end());
+ payload_type_map[kSendRtxPayloadType] = MediaType::VIDEO;
+ std::vector<RtpExtension> extensions = {
+ RtpExtension(RtpExtension::kTransportSequenceNumberUri,
+ kTransportSequenceNumberExtensionId)};
+ auto observer = std::make_unique<RtpExtensionHeaderObserver>(
+ task_queue, sender_call, rtx_to_media_ssrcs_, payload_type_map,
+ extensions, extensions);
+ observer_ = observer.get();
+ return observer;
+ }
+
+ private:
+ test::FakeVideoRenderer fake_renderer_;
+ std::map<uint32_t, uint32_t> rtx_to_media_ssrcs_;
+ RtpExtensionHeaderObserver* observer_;
+ } tester;
+
+ tester.RunTest();
+}
+
+class TransportFeedbackEndToEndTest : public test::CallTest {
+ public:
+ TransportFeedbackEndToEndTest() {
+ RegisterRtpExtension(RtpExtension(RtpExtension::kTransportSequenceNumberUri,
+ kTransportSequenceNumberExtensionId));
+ }
+};
+
+class TransportFeedbackTester : public test::EndToEndTest {
+ public:
+ TransportFeedbackTester(size_t num_video_streams, size_t num_audio_streams)
+ : EndToEndTest(::webrtc::TransportFeedbackEndToEndTest::kDefaultTimeout),
+ num_video_streams_(num_video_streams),
+ num_audio_streams_(num_audio_streams),
+ receiver_call_(nullptr) {
+ // Only one stream of each supported for now.
+ EXPECT_LE(num_video_streams, 1u);
+ EXPECT_LE(num_audio_streams, 1u);
+ }
+
+ protected:
+ Action OnSendRtcp(const uint8_t* data, size_t length) override {
+ EXPECT_FALSE(HasTransportFeedback(data, length));
+ return SEND_PACKET;
+ }
+
+ Action OnReceiveRtcp(const uint8_t* data, size_t length) override {
+ if (HasTransportFeedback(data, length))
+ observation_complete_.Set();
+ return SEND_PACKET;
+ }
+
+ bool HasTransportFeedback(const uint8_t* data, size_t length) const {
+ test::RtcpPacketParser parser;
+ EXPECT_TRUE(parser.Parse(data, length));
+ return parser.transport_feedback()->num_packets() > 0;
+ }
+
+ void PerformTest() override {
+ EXPECT_TRUE(observation_complete_.Wait(test::CallTest::kDefaultTimeout));
+ }
+
+ void OnCallsCreated(Call* sender_call, Call* receiver_call) override {
+ receiver_call_ = receiver_call;
+ }
+
+ size_t GetNumVideoStreams() const override { return num_video_streams_; }
+ size_t GetNumAudioStreams() const override { return num_audio_streams_; }
+
+ void ModifyAudioConfigs(AudioSendStream::Config* send_config,
+ std::vector<AudioReceiveStreamInterface::Config>*
+ receive_configs) override {
+ send_config->rtp.extensions.clear();
+ send_config->rtp.extensions.push_back(
+ RtpExtension(RtpExtension::kTransportSequenceNumberUri,
+ kTransportSequenceNumberExtensionId));
+ (*receive_configs)[0].rtp.extensions.clear();
+ (*receive_configs)[0].rtp.extensions = send_config->rtp.extensions;
+ }
+
+ private:
+ const size_t num_video_streams_;
+ const size_t num_audio_streams_;
+ Call* receiver_call_;
+};
+
+TEST_F(TransportFeedbackEndToEndTest, VideoReceivesTransportFeedback) {
+ TransportFeedbackTester test(1, 0);
+ RunBaseTest(&test);
+}
+TEST_F(TransportFeedbackEndToEndTest, AudioReceivesTransportFeedback) {
+ TransportFeedbackTester test(0, 1);
+ RunBaseTest(&test);
+}
+
+TEST_F(TransportFeedbackEndToEndTest, AudioVideoReceivesTransportFeedback) {
+ TransportFeedbackTester test(1, 1);
+ RunBaseTest(&test);
+}
+
+TEST_F(TransportFeedbackEndToEndTest,
+ StopsAndResumesMediaWhenCongestionWindowFull) {
+ test::ScopedFieldTrials override_field_trials(
+ "WebRTC-CongestionWindow/QueueSize:250/");
+
+ class TransportFeedbackTester : public test::EndToEndTest {
+ public:
+ TransportFeedbackTester(size_t num_video_streams, size_t num_audio_streams)
+ : EndToEndTest(
+ ::webrtc::TransportFeedbackEndToEndTest::kDefaultTimeout),
+ num_video_streams_(num_video_streams),
+ num_audio_streams_(num_audio_streams),
+ media_sent_(0),
+ media_sent_before_(0),
+ padding_sent_(0) {
+ // Only one stream of each supported for now.
+ EXPECT_LE(num_video_streams, 1u);
+ EXPECT_LE(num_audio_streams, 1u);
+ }
+
+ protected:
+ Action OnSendRtp(const uint8_t* packet, size_t length) override {
+ RtpPacket rtp_packet;
+ EXPECT_TRUE(rtp_packet.Parse(packet, length));
+ const bool only_padding = rtp_packet.payload_size() == 0;
+ MutexLock lock(&mutex_);
+ // Padding is expected in congested state to probe for connectivity when
+ // packets has been dropped.
+ if (only_padding) {
+ media_sent_before_ = media_sent_;
+ ++padding_sent_;
+ } else {
+ ++media_sent_;
+ if (padding_sent_ == 0) {
+ ++media_sent_before_;
+ EXPECT_LT(media_sent_, 40)
+ << "Media sent without feedback when congestion window is full.";
+ } else if (media_sent_ > media_sent_before_) {
+ observation_complete_.Set();
+ }
+ }
+ return SEND_PACKET;
+ }
+
+ Action OnReceiveRtcp(const uint8_t* data, size_t length) override {
+ MutexLock lock(&mutex_);
+ // To fill up the congestion window we drop feedback on packets after 20
+ // packets have been sent. This means that any packets that has not yet
+ // received feedback after that will be considered as oustanding data and
+ // therefore filling up the congestion window. In the congested state, the
+ // pacer should send padding packets to trigger feedback in case all
+ // feedback of previous traffic was lost. This test listens for the
+ // padding packets and when 2 padding packets have been received, feedback
+ // will be let trough again. This should cause the pacer to continue
+ // sending meadia yet again.
+ if (media_sent_ > 20 && HasTransportFeedback(data, length) &&
+ padding_sent_ < 2) {
+ return DROP_PACKET;
+ }
+ return SEND_PACKET;
+ }
+
+ bool HasTransportFeedback(const uint8_t* data, size_t length) const {
+ test::RtcpPacketParser parser;
+ EXPECT_TRUE(parser.Parse(data, length));
+ return parser.transport_feedback()->num_packets() > 0;
+ }
+ void ModifySenderBitrateConfig(
+ BitrateConstraints* bitrate_config) override {
+ bitrate_config->max_bitrate_bps = 300000;
+ }
+
+ void PerformTest() override {
+ constexpr TimeDelta kFailureTimeout = TimeDelta::Seconds(10);
+ EXPECT_TRUE(observation_complete_.Wait(kFailureTimeout))
+ << "Stream not continued after congestion window full.";
+ }
+
+ size_t GetNumVideoStreams() const override { return num_video_streams_; }
+ size_t GetNumAudioStreams() const override { return num_audio_streams_; }
+
+ private:
+ const size_t num_video_streams_;
+ const size_t num_audio_streams_;
+ Mutex mutex_;
+ int media_sent_ RTC_GUARDED_BY(mutex_);
+ int media_sent_before_ RTC_GUARDED_BY(mutex_);
+ int padding_sent_ RTC_GUARDED_BY(mutex_);
+ } test(1, 0);
+ RunBaseTest(&test);
+}
+
+TEST_F(TransportFeedbackEndToEndTest, TransportSeqNumOnAudioAndVideo) {
+ static constexpr size_t kMinPacketsToWaitFor = 50;
+ class TransportSequenceNumberTest : public test::EndToEndTest {
+ public:
+ TransportSequenceNumberTest()
+ : EndToEndTest(kDefaultTimeout),
+ video_observed_(false),
+ audio_observed_(false) {
+ extensions_.Register<TransportSequenceNumber>(
+ kTransportSequenceNumberExtensionId);
+ }
+
+ size_t GetNumVideoStreams() const override { return 1; }
+ size_t GetNumAudioStreams() const override { return 1; }
+
+ void ModifyAudioConfigs(AudioSendStream::Config* send_config,
+ std::vector<AudioReceiveStreamInterface::Config>*
+ receive_configs) override {
+ send_config->rtp.extensions.clear();
+ send_config->rtp.extensions.push_back(
+ RtpExtension(RtpExtension::kTransportSequenceNumberUri,
+ kTransportSequenceNumberExtensionId));
+ (*receive_configs)[0].rtp.extensions.clear();
+ (*receive_configs)[0].rtp.extensions = send_config->rtp.extensions;
+ }
+
+ Action OnSendRtp(const uint8_t* packet, size_t length) override {
+ RtpPacket rtp_packet(&extensions_);
+ EXPECT_TRUE(rtp_packet.Parse(packet, length));
+ uint16_t transport_sequence_number = 0;
+ EXPECT_TRUE(rtp_packet.GetExtension<TransportSequenceNumber>(
+ &transport_sequence_number));
+ // Unwrap packet id and verify uniqueness.
+ int64_t packet_id = unwrapper_.Unwrap(transport_sequence_number);
+ EXPECT_TRUE(received_packet_ids_.insert(packet_id).second);
+
+ if (rtp_packet.Ssrc() == kVideoSendSsrcs[0])
+ video_observed_ = true;
+ if (rtp_packet.Ssrc() == kAudioSendSsrc)
+ audio_observed_ = true;
+ if (audio_observed_ && video_observed_ &&
+ received_packet_ids_.size() >= kMinPacketsToWaitFor) {
+ size_t packet_id_range =
+ *received_packet_ids_.rbegin() - *received_packet_ids_.begin() + 1;
+ EXPECT_EQ(received_packet_ids_.size(), packet_id_range);
+ observation_complete_.Set();
+ }
+ return SEND_PACKET;
+ }
+
+ void PerformTest() override {
+ EXPECT_TRUE(Wait()) << "Timed out while waiting for audio and video "
+ "packets with transport sequence number.";
+ }
+
+ void ExpectSuccessful() {
+ EXPECT_TRUE(video_observed_);
+ EXPECT_TRUE(audio_observed_);
+ EXPECT_GE(received_packet_ids_.size(), kMinPacketsToWaitFor);
+ }
+
+ private:
+ bool video_observed_;
+ bool audio_observed_;
+ RtpSequenceNumberUnwrapper unwrapper_;
+ std::set<int64_t> received_packet_ids_;
+ RtpHeaderExtensionMap extensions_;
+ } test;
+
+ RunBaseTest(&test);
+ // Double check conditions for successful test to produce better error
+ // message when the test fail.
+ test.ExpectSuccessful();
+}
+} // namespace webrtc